text
stringlengths 8
6.05M
|
|---|
from django.contrib import admin
from models import Women_Warriors
#PART 6.4 see the line above, don't forget to tell the page from where it is supposed to pull!! I forgot about this line and was getting into hella errors. Super important.
admin.site.register(Women_Warriors)
#PART 6.5 You are telling the admin site, 'hey, little app here, you are going to refer to the modles I have set up for this specific app from the models called Powerful_Warriors" Remember, you are doing this from the
|
import random
random_integer = random.randint(1, 10)
print(random_integer)
|
'''
Created on 11/mar/2014
@author: isiu
'''
from xml.dom.minidom import parseString
#all these imports are standard on most modern python implementations
def readXml(filename):
#open the xml file for reading:
file = open(filename,'r')
#convert to string:
data = file.read()
file.close()
#parse the xml you got from the file
dom = parseString(data)
#retrieve the first xml tag (<tag>data</tag>) that the parser finds with name tagName:
xmlTagP = dom.getElementsByTagName('partenza')
xmlTagP += dom.getElementsByTagName('ingresso')
dict = {}
for partenza in xmlTagP:
prevcdb = partenza.getAttribute('prevcdb')
name = partenza.getAttribute('name')
#print name
#print prevcdb
xmlTagPCDB = partenza.getElementsByTagName('cdb')
listcdb = []
for cdb in xmlTagPCDB:
value = cdb.firstChild.data
qdev = cdb.getAttribute('q_dev')
listcdb.append((value,qdev))
#print value
if not prevcdb in dict:
dict[prevcdb] = [listcdb]
else:
lis = dict[prevcdb]
lis.append(listcdb)
#print dict
return dict
|
'''
A unit fraction contains 1 in the numerator.
The decimal representation of the unit fractions with denominators 2 to 10 are given:
1/2 = 0.5
1/3 = 0.(3)
1/4 = 0.25
1/5 = 0.2
1/6 = 0.1(6)
1/7 = 0.(142857)
1/8 = 0.125
1/9 = 0.(1)
1/10 = 0.1
Where 0.1(6) means 0.166666..., and has a 1-digit recurring cycle. It can be seen that 1/7 has a 6-digit recurring cycle.
Find the value of d < 1000 for which 1/d contains the longest recurring cycle in its decimal fraction part.
'''
# -*- coding: utf-8 -*
def f(n):
_len = 0
d = 0
for i in range(1, n):
r = recurringCycles(i)
if r > _len:
_len = r
d = i
print(d)
def recurringCycles(n):
remainders = [1]
numerator = 1
while True:
remainder = numerator * 10 % n
numerator = remainder
if remainder == 0:
return 0
elif remainder in remainders:
return len(remainders) - remainders.index(remainder)
else:
remainders.append(remainder)
f(1000)
|
# Generated by Django 2.2.4 on 2020-12-26 09:17
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('whcapp', '0003_comment'),
]
operations = [
migrations.RenameField(
model_name='comment',
old_name='user_comment',
new_name='user',
),
migrations.RemoveField(
model_name='comment',
name='post_comment',
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='m_comments', to='whcapp.Post'),
preserve_default=False,
),
]
|
def NumberChooser(number):
if number == 0:
return "ゼロ"
elif len(str(number)) == 1:
return Unit(number)
elif len(str(number)) == 2:
return Dozens(number)
elif len(str(number)) == 3:
return Hundreds(number)
elif len(str(number)) == 4:
return Thousands(number)
elif len(str(number)) == 5:
return TenOfThousands(number)
elif len(str(number)) == 6:
return HundredThousands(number)
elif len(str(number)) == 7:
return Million(number)
elif len(str(number)) == 8:
return TensOfMillions(number)
elif len(str(number)) == 9:
return HundredMillions(number)
elif len(str(number)) == 10:
return Billions(number)
elif len(str(number)) == 11:
return TensOfBillions(number)
elif len(str(number)) == 12:
return HundredBillions(number)
def Unit(number):
if number == 0:
return ""
elif number == 1:
return "いち"
elif number == 2:
return "に"
elif number == 3:
return "さん"
elif number == 4:
return "よん"
elif number == 5:
return "ご"
elif number == 6:
return "ろく"
elif number == 7:
return "なな"
elif number == 8:
return "はち"
elif number == 9:
return "きゅう"
def Dozens(number):
if number < 10:
return Unit(number)
BackDigit = number % 10
FrontDigit = number//10
if FrontDigit == 1:
return "じゅう" + Unit(BackDigit)
else:
return Unit(FrontDigit) + "じゅう" + Unit(BackDigit)
def Hundreds(number):
if number < 100:
return Dozens(number)
BackDigit = number % 100
FrontDigit = number//100
if FrontDigit == 1:
return "ひゃく" + Dozens(BackDigit)
elif FrontDigit == 3:
return "さんびゃく" + Dozens(BackDigit)
elif FrontDigit == 6:
return "ろっぴゃく" + Dozens(BackDigit)
elif FrontDigit == 8:
return "はっぴゃく" + Dozens(BackDigit)
else:
return Unit(FrontDigit) + "ひゃく" + Dozens(BackDigit)
def Thousands(number):
if number < 1000:
return Hundreds(number)
BackDigit = number % 1000
FrontDigit = number//1000
if FrontDigit == 1:
return "せん" + Hundreds(BackDigit)
elif FrontDigit == 3:
return "さんぜん" + Hundreds(BackDigit)
elif FrontDigit == 8:
return "はっせん" + Hundreds(BackDigit)
else:
return Unit(FrontDigit) + "せん" + Hundreds(BackDigit)
def TenOfThousands(number):
if number < 10000:
return Thousands(number)
BackDigit = number % 10000
FrontDigit = number//10000
return Unit(FrontDigit) + "まん" + Thousands(BackDigit)
def HundredThousands(number):
if number < 100000:
return TenOfThousands(number)
BackDigit = number % 10000
FrontDigit = number//10000
return Dozens(FrontDigit) + "まん" + Thousands(BackDigit)
#
def Million(number):
if number < 1000000:
return HundredThousands(number)
BackDigit = number % 10000
FrontDigit = number//10000
return Hundreds(FrontDigit) + "まん" + Thousands(BackDigit)
def TensOfMillions(number):
if number < 10000000:
return Million(number)
BackDigit = number % 10000
FrontDigit = number//10000
return Thousands(FrontDigit) + "まん" + Thousands(BackDigit)
def HundredMillions(number):
if number < 100000000:
return TensOfMillions(number)
BackDigit = number % 100000000
FrontDigit = number//100000000
return Unit(FrontDigit) + "おく" + TensOfMillions(BackDigit)
def Billions(number):
if number < 100000000:
return TensOfMillions(number)
BackDigit = number % 100000000
FrontDigit = number//100000000
return Dozens(FrontDigit) + "おく" + TensOfMillions(BackDigit)
def TensOfBillions(number):
if number < 100000000:
return TensOfMillions(number)
BackDigit = number % 100000000
FrontDigit = number//100000000
return Hundreds(FrontDigit) + "おく" + TensOfMillions(BackDigit)
def HundredBillions(number):
if number < 100000000:
return TensOfMillions(number)
BackDigit = number % 100000000
FrontDigit = number//100000000
return Thousands(FrontDigit) + "おく" + TensOfMillions(BackDigit)
print("Enter number to convert: ")
integer = int(input())
print(NumberChooser(integer))
# for i in range(1,1000000):
# print(i, NumberChooser(i))
|
from django.contrib import admin
from .models import Topic, Video, Question, Pdf, Query, Comment
class TopicAdmin(admin.ModelAdmin):
list_display = ('id', 'topicName')
list_display_links = ('id', 'topicName')
list_filter = ('id',)
search_fields = ('topicName',)
list_per_page = 10
class VideoAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'key', 'topic')
list_display_links = ('id', 'title')
list_filter = ('topic', )
search_fields = ('title', 'key', 'channelName', 'id')
list_per_page = 25
class QuestionAdmin(admin.ModelAdmin):
list_display = ('id', 'question', 'topic')
list_display_links = ('id', 'question')
list_filter = ('topic',)
search_fields = ('question', 'id')
list_per_page = 25
class PdfAdmin(admin.ModelAdmin):
list_display = ('id', 'topic', 'file')
list_display_links = ('id', 'topic')
list_filter = ('topic',)
search_fields = ('file', 'id')
class QueryAdmin(admin.ModelAdmin):
list_display = ('id', 'question', 'topic', 'user', 'date')
list_display_links = ('id', 'question')
list_filter = ('topic', 'date')
search_fields = ('question', 'id')
class CommentAdmin(admin.ModelAdmin):
list_display = ('id', 'comment', 'query', 'user', 'date')
list_display_links = ('id', 'comment')
list_filter = ('query', 'date')
search_fields = ('comment', 'id')
admin.site.register(Topic, TopicAdmin)
admin.site.register(Video, VideoAdmin)
admin.site.register(Question, QuestionAdmin)
admin.site.register(Pdf, PdfAdmin)
admin.site.register(Query, QueryAdmin)
admin.site.register(Comment, CommentAdmin)
|
from django.urls import path
from .views import create_user,activate,forgot_password_mail,reset_password,change_password,teacher_details
from django.conf.urls import url
app_name = 'contacts'
urlpatterns = [
path('/signup',create_user,name='signup'),
url(r'^activate/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
activate, name='activate'),
path('/resetmail',forgot_password_mail,name='resetmail'),
url(r'^resetmail/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
reset_password, name='reset'),
path('/changepassword',change_password,name='change_password'),
path('/teacher/<int:pk>',teacher_details,name="teacher"),
]
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from unittest import TestCase, main
from os import close, remove
from os.path import basename, join
from tempfile import mkstemp
from qiita_core.util import qiita_test_checker
import qiita_db as qdb
@qiita_test_checker()
class ReferenceTests(TestCase):
def setUp(self):
self.name = "Fake Greengenes"
self.version = "13_8"
fd, self.seqs_fp = mkstemp(suffix="_seqs.fna")
close(fd)
fd, self.tax_fp = mkstemp(suffix="_tax.txt")
close(fd)
fd, self.tree_fp = mkstemp(suffix="_tree.tre")
close(fd)
_, self.db_dir = qdb.util.get_mountpoint('reference')[0]
self._clean_up_files = []
def tearDown(self):
for f in self._clean_up_files:
remove(f)
def test_create(self):
"""Correctly creates the rows in the DB for the reference"""
# Check that the returned object has the correct id
obs = qdb.reference.Reference.create(
self.name, self.version, self.seqs_fp, self.tax_fp, self.tree_fp)
self.assertEqual(obs.id, 3)
# Check that the information on the database is correct
with qdb.sql_connection.TRN:
qdb.sql_connection.TRN.add(
"SELECT * FROM qiita.reference WHERE reference_id=3")
obs = qdb.sql_connection.TRN.execute_fetchindex()
self.assertEqual(obs[0][1], self.name)
self.assertEqual(obs[0][2], self.version)
seqs_id = obs[0][3]
tax_id = obs[0][4]
tree_id = obs[0][5]
# Check that the filepaths have been correctly added to the DB
with qdb.sql_connection.TRN:
sql = """SELECT * FROM qiita.filepath
WHERE filepath_id=%s OR filepath_id=%s
OR filepath_id=%s"""
qdb.sql_connection.TRN.add(sql, [seqs_id, tax_id, tree_id])
obs = qdb.sql_connection.TRN.execute_fetchindex()
exp_seq = "%s_%s_%s" % (self.name, self.version,
basename(self.seqs_fp))
exp_tax = "%s_%s_%s" % (self.name, self.version,
basename(self.tax_fp))
exp_tree = "%s_%s_%s" % (self.name, self.version,
basename(self.tree_fp))
exp = [[seqs_id, exp_seq, 10, '0', 1, 6, 0],
[tax_id, exp_tax, 11, '0', 1, 6, 0],
[tree_id, exp_tree, 12, '0', 1, 6, 0]]
self.assertEqual(obs, exp)
def test_sequence_fp(self):
ref = qdb.reference.Reference(1)
exp = join(self.db_dir, "GreenGenes_13_8_97_otus.fasta")
self.assertEqual(ref.sequence_fp, exp)
def test_taxonomy_fp(self):
ref = qdb.reference.Reference(1)
exp = join(self.db_dir, "GreenGenes_13_8_97_otu_taxonomy.txt")
self.assertEqual(ref.taxonomy_fp, exp)
def test_tree_fp(self):
ref = qdb.reference.Reference(1)
exp = join(self.db_dir, "GreenGenes_13_8_97_otus.tree")
self.assertEqual(ref.tree_fp, exp)
def test_tree_fp_empty(self):
ref = qdb.reference.Reference(2)
self.assertEqual(ref.tree_fp, '')
if __name__ == '__main__':
main()
|
#!env python3
# -*- coding: utf-8 -*-
class Clock:
def __init__(self, hour):
self._hour = hour
self._ampm = "am"
@property
def hour(self):
return self._hour
@hour.setter
def hour(self, value):
self._hour = value % 12
self._ampm = "am" if value <= 12 else "pm"
@property
def ampm(self):
return self._ampm
obj = Clock(11)
print(obj.hour, obj.ampm)
obj.hour = 13
print(obj.hour, obj.ampm)
|
import pickle
from scipy import sparse as sp
def matrix_equal(m1: sp.csr_matrix, m2: sp.csr_matrix):
return (m1 != m2).nnz == 0
def _structure_equal(s1, s2):
for m1, m2 in zip(s1, s2):
yield matrix_equal(m1, m2)
def structure_equal(s1, s2):
if len(s1) != len(s2):
return False
return all(_structure_equal(s1, s2))
def is_structure_known(known_structures, structure):
for idx, (s2, usages) in enumerate(known_structures):
if structure_equal(structure, s2):
known_structures[idx] = (s2, usages + 1)
return True
return False
DATA_SET_PATH = 'C:\\workspace\\ucca-4-bpm\\ucca4bpm\\data\\transformed\\ucca-output.pickle'
with open(DATA_SET_PATH, 'rb') as f:
data = pickle.load(f)
structures = data['adjacencies']
known_structures = []
for i, structure in enumerate(structures):
print(f'Structure {i}/{len(structures)} analyzed.')
if not is_structure_known(known_structures, structure):
known_structures.append((structure, 1))
print(f'Got {len(known_structures)} types of matrices, usage below:')
for structure, usage in known_structures:
print(f'{usage}')
|
from skimage import io, img_as_ubyte
import numpy as np
from os import listdir
from os.path import isfile, join
from utilities import *
import warnings
import png
mypath = './data_relabelled/'
outputdir = './img_nodupes_test'
onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
with warnings.catch_warnings():
warnings.simplefilter("ignore")
io.use_plugin('freeimage')
for f in onlyfiles:
if f=="labels.csv": continue
with open("{}/exceptions.txt".format(outputdir), 'w') as exceptfile:
print("Loading image: {}".format(f))
arr = np.load(mypath+ f, mmap_mode='r')
try:
io.imsave('{}/{}.png'.format(outputdir, f[:-4]), arr)
#with open('{}/{}.png'.format(outputdir, f[:-4]), 'wb') as pngfile:
# writer = png.Writer(width=arr.shape[1], height=arr.shape[0], bitdepth=16, greyscale=True)
# arr2list = arr.tolist()
# writer.write(pngfile, arr2list)
except:
err_msg = "Error when converting file {}. File not processed.".format(f)
print(err_msg)
exceptfile.write(err_msg + "\n")
|
class Solution(object):
def convert(self, s, numRows):
"""
:type s: str
:type numRows: int
:rtype: str
"""
result = []
for row in range(numRows):
go_down = False
next_index = row
while next_index < len(s):
result.append(s[next_index])
if (go_down and row != 0) or (not go_down and row != numRows - 1):
go_down = not go_down
if go_down:
next_index += (numRows - row - 1) * 2
else:
next_index += row * 2
import string
return string.join(result, '')
import unittest
class TestSolution(unittest.TestCase):
def test_solution(self):
print Solution().convert('A', 1)
#PAHNAPLSIIGYIR
|
from bfimpl.bfunc import generateId
PATTERN = """//Start:Declarations
int sub_%ID%(%ARGS%);
//Stop:Declarations
//Start:Definitions
int sub_%ID%(%ARGS%) {
int retval;
retval = %EXPRESSION%;
return retval;
}
//Stop:Definitions
"""
def generate(n):
identification = generateId()
arguments = ""
subtraction = ""
for i in range(0, n):
arguments += "int arg%d, " % (i + 1)
subtraction += "arg%d - " % (i + 1)
subtraction = subtraction[:-2]
arguments = arguments[:-2]
code = PATTERN
code = code.replace("%ID%", identification)
code = code.replace("%ARGS%", arguments)
code = code.replace("%EXPRESSION%", subtraction)
return "sub_%s" % identification, code
|
# -*- coding: utf-8 -*-
from Itinerary import Itinerary
from AirportAtlas import AirportAtlas
from AircraftList import AircraftList
from CurrencyList import CurrencyList
from CurrencyRateList import CurrencyRateList
from itertools import permutations
import sys
from tkinter import messagebox
class ItineraryList:
def __init__(self,Data):
self.__routelist=self.loadData(Data)
def loadData(self,Data):
numofairports=len(Data)-1 #minus one because last index is aircraftcode
airportlist=[]
for i in range(numofairports): #seperates airports from aircraft
airportlist.append(Data[i])
aircraftcode=Data[numofairports]
cheapestroutelist,mincost,cheapestroutedist,shortestroutelist,shortestroutecost,shortestroutedist=ItineraryList.calculationForFiveRoutes(self,airportlist,aircraftcode)
self.obj1=Itinerary(cheapestroutelist,mincost,cheapestroutedist,shortestroutelist,shortestroutecost,shortestroutedist,Data[5])
def getCheapestRouteList(self):
return self.obj1.getCheapestRouteList()
def getMinCost(self):
return self.obj1.getMinCost()
def getCheapestRouteDist(self):
return self.obj1.getCheapestRouteDist()
def getShortestRouteList(self):
return self.obj1.getShortestRouteList()
def getShortestRouteCost(self):
return self.obj1.getShortestRouteCost()
def getShortestRouteDist(self):
return self.obj1.getShortestRouteDist()
def getAircraft(self):
return self.obj1.getAircraft()
def permutations(homeairport,airportlist):
possairportlist=list(permutations(airportlist))
routelist=[]
for row in possairportlist:
row=list(row)
row.insert(0,homeairport)
row.insert(len(row),homeairport)
routelist.append(row)
return routelist
def calculationForFiveRoutes(self,airportlist,aircraftcode):
"""
The main code of the project. Takes the list of the airports and aircraft
given by the user, eliminates all the impossible routes and with trial and
error finds the optimum route. Returns the cheapest route, minimum cost,
distance of the optimum route and the impossible routes distance.
cheapestroutelist,mincost,routedist,impossible_routes_distancefuel
**Parameters**\n
----------\n
airportlist: A list consists of 5 airports (IATA codes) taken as an input
from the user.
aircraftcode: The aircraft's code which will carry on the itinerary.
**Returns**\n
-------\n
cheapestroutelist: The sequence of airports regarding to the optimum route.
mincost: The cost of the itinerary.
routedist: THe total distance of the itinerary.
impossible_routes_distancefuel: The list of the impossible routes.
"""
homeairport=airportlist.pop(0)
routelist=ItineraryList.permutations(homeairport,airportlist)
cheapestroutelist,mincost,routedist,bestroutelist,cost1,bestroute,impossible_routes_fullrefuel=ItineraryList.fullrefuel(airportlist,routelist,aircraftcode)
cheapestroutelistopt,mincostopt,routedistopt,impossible_routes_distancefuel=ItineraryList.distancerefuel(airportlist,routelist,aircraftcode)
if mincostopt<mincost:
mincost=mincostopt
cheapestroutelist=cheapestroutelistopt
routedist=routedistopt
if len(impossible_routes_fullrefuel)==24 and len(impossible_routes_distancefuel)==24: #if all possible routes=(4!)*2 are impossible print ...
messagebox.showinfo("Impossible route", "This plane cannot complete the specified routes please change the plane or routes")
elif len(impossible_routes_fullrefuel)==24:
mincost=mincostopt
cheapestroutelist=cheapestroutelistopt
routedist=routedistopt
else:
return cheapestroutelist,mincost,routedist,bestroutelist,cost1,bestroute
def fullrefuel(airportlist,routelist,aircraftcode): #finds shortest and cheapest routes by full refuel
try:
aircraft=AircraftList('aircraft.csv')
except FileNotFoundError:
messagebox.showinfo("Loading error aircraft.csv file", "Unable to load 'aircraft.csv' file please make sure this file and aircraft.csv located on the same file and filename is 'aircraft.csv'")
sys.exit(0)
try:
airport=AirportAtlas('airport.csv')
except FileNotFoundError:
messagebox.showinfo("Loading error airport.csv file", "Unable to load 'airport.csv' file please make sure this file and airport.csv located on the same file and filename is 'airport.csv'")
sys.exit(0)
try:
countrycurr=CurrencyList('countrycurrency.csv')
except FileNotFoundError:
messagebox.showinfo("Loading error countrycurrency.csv file", "Unable to load 'countrycurrency.csv' file please make sure this file and countrycurrency.csv located on the same file and filename is 'countrycurrency.csv'")
sys.exit(0)
try:
currencyrate=CurrencyRateList('currencyrates.csv')
except FileNotFoundError:
messagebox.showinfo("Loading error currencyrates.csv file", "Unable to load 'currencyrates.csv' file please make sure this file and currencyrates.csv located on the same file and filename is 'currencyrates.csv'")
sys.exit(0)
# some initial assignments
initialaircraftrange=aircraft.getAircraftRange(aircraftcode)
aircraftrange=initialaircraftrange
bestroute=10**10
mincost=10**10
tobefueled=0
cost=0
indx=0
currrate=0
impossible_routes_fullrefuel=[]
control=0 #for controling the impossible routes
for j in range(len(routelist)):
totalcost=0
totalroute=0
control=0 #sets control back to zero
for k in range(len(airportlist)+1):
distance=airport.distanceBetweenAirports(routelist[j][k],routelist[j][k+1])
totalroute+=distance
if routelist[j][k]==routelist[j][k+1]: #plane can visit one airport twice but departure and arrival cant be same airport
control=1
if distance>aircraftrange:
tobefueled=initialaircraftrange-aircraftrange
where=airport.getAirportCountry(routelist[j][k])
iseuro=countrycurr.getCurrencyCode(where)
currrate=currencyrate.getCurrencyRate(iseuro)
cost=tobefueled*currrate
aircraftrange+=tobefueled
totalcost+=cost
if distance>aircraftrange: #still distance is bigger than range even when gas tank is full
control=1 #if control is 1 bestroute and cheapest is not calculated
indx+=1
aircraftrange=aircraftrange-distance
totalroute1=totalroute
aircraftrange=initialaircraftrange
#================COMPARING===============================
if control == 0:
if bestroute>totalroute:
bestroute=totalroute
bestroutelist=routelist[j]
cost1=totalcost
if mincost>totalcost:
mincost=totalcost
cheapestroutelist=routelist[j]
routedist=totalroute1
#=========================================================
else:
impossible_routes_fullrefuel.append(routelist[j]) # for counting impossible routes
bestroutelist=0
totalroute=0
cost1=0
if len(impossible_routes_fullrefuel)==24:
cheapestroutelist=0
mincost=10**10
routedist=0
bestroutelist=0
cost1=10**10
bestroute=0
return cheapestroutelist,mincost,routedist,bestroutelist,cost1,bestroute,impossible_routes_fullrefuel
else:
return cheapestroutelist,mincost,routedist,bestroutelist,cost1,bestroute,impossible_routes_fullrefuel
def distancerefuel(airportlist,routelist,aircraftcode): #Doing a small optimization by checking whether fully refueling or refueling enough for only that leg's distance is cheaper
try:
aircraft=AircraftList('aircraft.csv')
except FileNotFoundError:
messagebox.showinfo("Loading error aircraft.csv file", "Unable to load 'aircraft.csv' file please make sure this file and aircraft.csv located on the same file and filename is 'aircraft.csv'")
sys.exit(0)
try:
airport=AirportAtlas('airport.csv')
except FileNotFoundError:
messagebox.showinfo("Loading error airport.csv file", "Unable to load 'airport.csv' file please make sure this file and airport.csv located on the same file and filename is 'airport.csv'")
sys.exit(0)
try:
countrycurr=CurrencyList('countrycurrency.csv')
except FileNotFoundError:
messagebox.showinfo("Loading error countrycurrency.csv file", "Unable to load 'countrycurrency.csv' file please make sure this file and countrycurrency.csv located on the same file and filename is 'countrycurrency.csv'")
sys.exit(0)
try:
currencyrate=CurrencyRateList('currencyrates.csv')
except FileNotFoundError:
messagebox.showinfo("Loading error currencyrates.csv file", "Unable to load 'currencyrates.csv' file please make sure this file and currencyrates.csv located on the same file and filename is 'currencyrates.csv'")
sys.exit(0)
initialaircraftrange=aircraft.getAircraftRange(aircraftcode)
aircraftrange=initialaircraftrange
mincost=10**10
totalcostopt=0
indx=0
currrate=0
impossible_routes_distancefuel=[]
control=0
for j in range(len(routelist)):
totalroute=0
totalcostopt=0
control=0
for k in range(len(airportlist)+1):
distance=airport.distanceBetweenAirports(routelist[j][k],routelist[j][k+1])
totalroute+=distance
if routelist[j][k]==routelist[j][k+1]:
control=1
if distance>aircraftrange:# refuel enough to complete this leg
where=airport.getAirportCountry(routelist[j][k])
iseuro=countrycurr.getCurrencyCode(where)
currrate=currencyrate.getCurrencyRate(iseuro)
if distance<initialaircraftrange:
tobefueledopt=distance-aircraftrange
costopt=tobefueledopt*currrate
aircraftrange+=tobefueledopt
totalcostopt+=costopt
if distance>aircraftrange: #still distance is bigger than range even when gas tank is full
control=1 #if control is 1 bestroute and cheapest is not calculated
indx+=1
aircraftrange=aircraftrange-distance
totalroute1=totalroute
aircraftrange=initialaircraftrange
if control == 0:
#=======================COMPARING======================
if mincost>totalcostopt:
mincost=totalcostopt
cheapestroutelist=routelist[j]
routedist=totalroute1
#==================================================
else:
impossible_routes_distancefuel.append(routelist[j]) # for counting impossible routes
bestroutelist=0
totalroute=0
cost1=0
if len(impossible_routes_distancefuel)==24:
cheapestroutelist=0
mincost=10**10
routedist=0
return cheapestroutelist,mincost,routedist,impossible_routes_distancefuel
else:
return cheapestroutelist,mincost,routedist,impossible_routes_distancefuel
|
import os
from dotenv import load_dotenv
load_dotenv()
DISPLAY_NAME = os.getenv('display_name')
SENDER_EMAIL = os.getenv('sender_email')
PASSWORD = os.getenv('password')
try:
assert DISPLAY_NAME
assert SENDER_EMAIL
assert PASSWORD
except AssertionError:
print('Please set up credentials!')
else:
print('Credentials loaded successfully')
|
#!/usr/bin/env python3
"""Processes errors for pyincapsula
Handles all error that might occur with pyincapsula and returns a
JSON breakdown of what the error is. See github for more information
byond the description and details the error message returns
error -- Exception thrown by the script
data -- Extra data needed to diagnose the error (Default: None)
"""
import json
import requests
def errorProcess(error, data=None):
if type(error) is NameError:
outError = {
'error':0,
'description':'Required argument not provided',
'details':str(data)+' was not provided'
}
elif type(error) is AssertionError:
outError = {
'error':0,
'description':'Required argument not provided',
'details':str(data)+' was not provided or is incorrect'
}
elif type(error) is ValueError:
if data == 'int':
outError = {
'error':5,
'description':'A non-integer value was pass when an integer'
'was required',
'details':error
}
if data == 'str':
outError = {
'error':5,
'description':'A non-string value was pass when an string'
'was required',
'details':error
}
elif type(error) is ConnectionError:
outError = {
'error':1,
'description':'Connection error.',
'details':error
}
elif type(error) is TimeoutError:
outError = {
'error':2,
'description':'Connection timed-out.',
'details':error
}
elif type(error) is requests.exceptions.HTTPError:
outError = {
'error':3,
'description':'HTTP Exception',
'details':error
}
elif type(error) is EnvironmentError:
outError = {
'error':4,
'description':data,
'details':error
}
else:
outError = {
'error':99,
'description':'Unknown Error',
'details':error
}
return json.dumps(outError)
|
'''
File defining the global constants
'''
FRAMES_PER_SAMPLE = 100 # number of frames forming a chunk of data
SAMPLING_RATE = 8000
FRAME_SIZE = 256
NEFF = 129 # effective FFT points
# amplification factor of the waveform sig
AMP_FAC = 10000
MIN_AMP = 10000
# TF bins smaller than THRESHOLD will be
# considered inactive
THRESHOLD = 40
# embedding dimention
EMBBEDDING_D = 40
# prams for pre-whitening
GLOBAL_MEAN = 44
GLOBAL_STD = 15.5
# feed forward dropout prob
P_DROPOUT_FF = 0.5
# recurrent dropout prob
P_DROPOUT_RC = 0.2
N_HIDDEN = 300
LEARNING_RATE = 1e-3
MAX_STEP = 2000000
TRAIN_BATCH_SIZE = 128
|
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import backend as K
class FullyConvNets(keras.layers.Layer):
def __init__(self, nb_filters=128, **kwargs):
super(FullyConvNets, self).__init__(**kwargs)
self.conv1 = keras.Sequential([
keras.layers.Conv1D(nb_filters, 8, 1, padding='same', kernel_initializer='he_normal'),
# keras.layers.BatchNormalization(),
keras.layers.Activation('relu')
])
self.conv2 = keras.Sequential([
keras.layers.Conv1D(nb_filters*2, 5, 1, padding='same', kernel_initializer='he_normal'),
# keras.layers.BatchNormalization(),
keras.layers.Activation('relu')
])
self.conv3 = keras.Sequential([
keras.layers.Conv1D(nb_filters, 3, 1, padding='same', kernel_initializer='he_normal'),
# keras.layers.BatchNormalization(),
keras.layers.Activation('relu')
])
def call(self, inputs, **kwargs):
x = self.conv1(inputs)
x = self.conv2(x)
x = self.conv3(x)
return x
def FCN(init, input_shape):
# input_shape: [batch_size, time_steps, nb_time_series]
x = keras.layers.Input(input_shape)
y = FullyConvNets(init.CNNFilters)(x)
y = keras.layers.GlobalAveragePooling1D()(y)
# y = keras.layers.Dropout(0.1)(y)
y = keras.layers.Dense(init.FeatDims)(y)
if init.task == 'classification':
y = keras.layers.Activation('softmax')(y)
model = keras.models.Model(inputs=x, outputs=y)
return model
|
import unittest
import cupy
from cupy import testing
from cupyx.scipy import sparse
import numpy
import pytest
@testing.parameterize(*testing.product({
'format': ['csr', 'csc'],
'density': [0.1, 0.4, 0.9],
'dtype': ['float32', 'float64', 'complex64', 'complex128'],
'n_rows': [25, 150],
'n_cols': [25, 150]
}))
@testing.with_requires('scipy>=1.4.0')
@testing.gpu
class TestIndexing(unittest.TestCase):
def _run(self, maj, min=None, flip_for_csc=True,
compare_dense=False):
a = sparse.random(self.n_rows, self.n_cols,
format=self.format,
density=self.density)
if self.format == 'csc' and flip_for_csc:
tmp = maj
maj = min
min = tmp
# None is not valid for major when minor is not None
maj = slice(None) if maj is None else maj
# sparse.random doesn't support complex types
# so we need to cast
a = a.astype(self.dtype)
expected = a.get()
maj_h = maj.get() if isinstance(maj, cupy.ndarray) else maj
min_h = min.get() if isinstance(min, cupy.ndarray) else min
if min is not None:
actual = a[maj, min]
expected = expected[maj_h, min_h]
else:
actual = a[maj]
expected = expected[maj_h]
if compare_dense:
actual = actual.toarray()
expected = expected.toarray()
if sparse.isspmatrix(actual):
actual.sort_indices()
expected.sort_indices()
testing.assert_array_equal(
actual.indptr, expected.indptr)
testing.assert_array_equal(
actual.indices, expected.indices)
testing.assert_array_equal(
actual.data, expected.data)
actual = actual.toarray()
expected = expected.toarray()
testing.assert_array_equal(actual, expected)
@staticmethod
def _get_index_combos(idx):
return [dict['arr_fn'](idx, dtype=dict['dtype'])
for dict in testing.product({
"arr_fn": [numpy.array, cupy.array],
"dtype": [numpy.int32, numpy.int64]
})]
# 2D Slicing
def test_major_slice(self):
self._run(slice(5, 9))
self._run(slice(9, 5))
def test_major_all(self):
self._run(slice(None))
def test_major_scalar(self):
self._run(10)
self._run(-10)
self._run(numpy.array(10))
self._run(numpy.array(-10))
self._run(cupy.array(10))
self._run(cupy.array(-10))
def test_major_slice_minor_slice(self):
self._run(slice(1, 5), slice(1, 5))
self._run(slice(1, 20, 2), slice(1, 5, 1))
self._run(slice(20, 1, 2), slice(1, 5, 1))
self._run(slice(1, 15, 2), slice(1, 5, 1))
self._run(slice(15, 1, 5), slice(1, 5, 1))
self._run(slice(1, 15, 5), slice(1, 5, 1))
self._run(slice(20, 1, 5), slice(None))
self._run(slice(1, 20, 5), slice(None))
self._run(slice(1, 5, 1), slice(1, 20, 2))
self._run(slice(1, 5, 1), slice(20, 1, 2))
self._run(slice(1, 5, 1), slice(1, 15, 2))
self._run(slice(1, 5, 1), slice(15, 1, 5))
self._run(slice(1, 5, 1), slice(1, 15, 5))
self._run(slice(None), slice(20, 1, 5))
self._run(slice(None), slice(1, 20, 5))
def test_major_slice_minor_all(self):
self._run(slice(1, 5), slice(None))
self._run(slice(5, 1), slice(None))
def test_major_slice_minor_scalar(self):
self._run(slice(1, 5), 5)
self._run(slice(5, 1), 5)
self._run(slice(5, 1, -1), 5)
self._run(5, slice(5, 1, -1))
def test_major_scalar_minor_slice(self):
self._run(5, slice(1, 5))
self._run(numpy.array(5), slice(1, 5))
self._run(cupy.array(5), slice(1, 5))
def test_major_scalar_minor_all(self):
self._run(5, slice(None))
self._run(numpy.array(5), slice(None))
def test_major_scalar_minor_scalar(self):
self._run(5, 5)
self._run(numpy.array(5), numpy.array(5))
self._run(cupy.array(5), cupy.array(5))
def test_major_all_minor_scalar(self):
self._run(slice(None), 5)
def test_major_all_minor_slice(self):
self._run(slice(None), slice(5, 10))
def test_major_all_minor_all(self):
self._run(slice(None), slice(None))
def test_ellipsis(self):
self._run(Ellipsis, flip_for_csc=False)
self._run(Ellipsis, 1, flip_for_csc=False)
self._run(1, Ellipsis, flip_for_csc=False)
self._run(Ellipsis, slice(None), flip_for_csc=False)
self._run(slice(None), Ellipsis, flip_for_csc=False)
self._run(Ellipsis, slice(1, None), flip_for_csc=False)
self._run(slice(1, None), Ellipsis, flip_for_csc=False)
# Major Indexing
def test_major_bool_fancy(self):
size = self.n_rows if self.format == 'csr' else self.n_cols
a = numpy.random.random(size)
self._run(cupy.array(a).astype(cupy.bool)) # Cupy
self._run(a.astype(numpy.bool)) # Numpy
@testing.with_requires('scipy>=1.5.0')
def test_major_bool_list_fancy(self):
# In older environments (e.g., py35, scipy 1.4), scipy sparse arrays
# are crashing when indexed with native Python boolean list.
size = self.n_rows if self.format == 'csr' else self.n_cols
a = numpy.random.random(size)
self._run(a.astype(numpy.bool).tolist()) # List
def test_major_fancy_minor_all(self):
self._run([1, 5, 4, 2, 5, 1], slice(None))
for idx in self._get_index_combos([1, 5, 4, 2, 5, 1]):
self._run(idx, slice(None))
def test_major_fancy_minor_scalar(self):
self._run([1, 5, 4, 5, 1], 5)
for idx in self._get_index_combos([1, 5, 4, 2, 5, 1]):
self._run(idx, 5)
def test_major_fancy_minor_slice(self):
self._run([1, 5, 4, 5, 1], slice(1, 5))
self._run([1, 5, 4, 5, 1], slice(5, 1, 1))
for idx in self._get_index_combos([1, 5, 4, 5, 1]):
self._run(idx, slice(5, 1, 1))
for idx in self._get_index_combos([1, 5, 4, 5, 1]):
self._run(idx, slice(1, 5))
# Minor Indexing
def test_major_all_minor_bool(self):
size = self.n_cols if self.format == 'csr' else self.n_rows
a = numpy.random.random(size)
self._run(slice(None), cupy.array(a).astype(cupy.bool)) # Cupy
self._run(slice(None), a.astype(numpy.bool)) # Numpy
@testing.with_requires('scipy>=1.5.0')
def test_major_all_minor_bool_list(self):
# In older environments (e.g., py35, scipy 1.4), scipy sparse arrays
# are crashing when indexed with native Python boolean list.
size = self.n_cols if self.format == 'csr' else self.n_rows
a = numpy.random.random(size)
self._run(slice(None), a.astype(numpy.bool).tolist()) # List
def test_major_slice_minor_bool(self):
size = self.n_cols if self.format == 'csr' else self.n_rows
a = numpy.random.random(size)
self._run(slice(1, 10, 2), cupy.array(a).astype(cupy.bool)) # Cupy
self._run(slice(1, 10, 2), a.astype(numpy.bool)) # Numpy
@testing.with_requires('scipy>=1.5.0')
def test_major_slice_minor_bool_list(self):
# In older environments (e.g., py35, scipy 1.4), scipy sparse arrays
# are crashing when indexed with native Python boolean list.
size = self.n_cols if self.format == 'csr' else self.n_rows
a = numpy.random.random(size)
self._run(slice(1, 10, 2), a.astype(numpy.bool).tolist()) # List
def test_major_all_minor_fancy(self):
self._run(slice(None), [1, 5, 2, 3, 4, 5, 4, 1, 5])
self._run(slice(None), [0, 3, 4, 1, 1, 5, 5, 2, 3, 4, 5, 4, 1, 5])
self._run(slice(None), [1, 5, 4, 5, 2, 4, 1])
for idx in self._get_index_combos([1, 5, 4, 5, 2, 4, 1]):
self._run(slice(None), idx, compare_dense=True)
def test_major_slice_minor_fancy(self):
self._run(slice(1, 10, 2), [1, 5, 4, 5, 2, 4, 1], compare_dense=True)
for idx in self._get_index_combos([1, 5, 4, 5, 2, 4, 1]):
self._run(slice(1, 10, 2), idx, compare_dense=True)
def test_major_scalar_minor_fancy(self):
self._run(5, [1, 5, 4, 1, 2], compare_dense=True)
for idx in self._get_index_combos([1, 5, 4, 1, 2]):
self._run(5, idx, compare_dense=True)
# Inner Indexing
def test_major_fancy_minor_fancy(self):
for idx in self._get_index_combos([1, 5, 4]):
self._run(idx, idx)
self._run([1, 5, 4], [1, 5, 4])
maj = self._get_index_combos([2, 0, 10, 0, 2])
min = self._get_index_combos([9, 2, 1, 0, 2])
for (idx1, idx2) in zip(maj, min):
self._run(idx1, idx2)
self._run([2, 0, 10, 0], [9, 2, 1, 0])
maj = self._get_index_combos([2, 0, 2])
min = self._get_index_combos([2, 1, 1])
for (idx1, idx2) in zip(maj, min):
self._run(idx1, idx2)
self._run([2, 0, 2], [2, 1, 2])
# Bad Indexing
def test_bad_indexing(self):
with pytest.raises(IndexError):
self._run("foo")
with pytest.raises(IndexError):
self._run(2, "foo")
with pytest.raises(ValueError):
self._run([1, 2, 3], [1, 2, 3, 4])
with pytest.raises(IndexError):
self._run([[0, 0], [1, 1]])
|
from flask_restful import Resource
from flask_restful import abort
from flask_restful import marshal_with, marshal
from flask_restful import fields
from flask_restful import reqparse
from app.db import dbs
from app.models.user import User
import re
from datetime import datetime
from app.resources.auth import validate_protected_action_permission_jwt, validate_login_jwt, get_login_jwt
user_fields = {
'id': fields.Integer,
'firstname': fields.String,
'lastname': fields.String,
'nickname': fields.String,
'birthday': fields.DateTime,
'size': fields.Float,
'sex': fields.String,
'email': fields.String,
'administrator': fields.Boolean,
'moderator': fields.Boolean,
'uri': fields.Url('area', absolute=True),
'time_created': fields.DateTime,
'time_updated': fields.DateTime
}
parser = reqparse.RequestParser()
parser.add_argument('firstname')
parser.add_argument('lastname')
parser.add_argument('nickname')
parser.add_argument('birthday')
parser.add_argument('password')
parser.add_argument('size', type=float)
parser.add_argument('sex')
parser.add_argument('email', required=True, help="Email cannot be blank!")
parser.add_argument('installAdmin', type=bool)
promotion_parser = reqparse.RequestParser()
promotion_parser.add_argument('promoteToAdmin', type=bool)
promotion_parser.add_argument('promoteToMod', type=bool)
sexes = [None, 'male', 'female']
class UserResource(Resource):
@marshal_with(user_fields, envelope='data')
def get(self, id):
user = dbs.query(User).filter(User.id == id).first()
if not user:
abort(404, message="(Code 001) User {} doesn't exist".format(id))
if user.birthday:
user.birthday = datetime.combine(user.birthday, datetime.min.time()) # Can't marshall date, only datetime
return user
@validate_protected_action_permission_jwt
def delete(self, id, **kwargs):
if kwargs['protected_action_permission'] != 'delete':
abort(401, message='(Code 025) Wrong permissions!')
user = dbs.query(User).filter(User.id == id).first()
if not user:
abort(404, message="(Code 002) User {} doesn't exist".format(id))
dbs.delete(user)
dbs.commit()
return {}, 204
@validate_protected_action_permission_jwt
@validate_login_jwt
def put(self, id, **kwargs):
parsed_args = parser.parse_args()
if not (parsed_args['nickname'] or (parsed_args['firstname'] and parsed_args['lastname'])):
abort(400, message="(Code 003) Either a nickname or a firstname and lastname need to be given!")
if not re.match(r"[^@]+@[^@]+\.[^@]+", parsed_args['email']):
abort(400, message="(Code 004) Email field is invalid!")
if not parsed_args['sex'] in sexes:
abort(400, message="(Code 027) Invalid sex!")
user = dbs.query(User).filter(User.id == id).first()
if not user:
abort(404, message="(Code 080) User {} doesn't exist".format(id))
if kwargs['user'].email != user.email:
abort(401, message="(Code 005) Unauthorized!")
user.firstname = parsed_args['firstname']
user.lastname = parsed_args['lastname']
user.nickname = parsed_args['nickname']
user.size = parsed_args['size']
user.sex = parsed_args['sex']
user.birthday = datetime.strptime(parsed_args['birthday'], '%Y-%m-%d') if parsed_args[
'birthday'] else None
if user.email != parsed_args['email']: # Changing the email address needs special permissions
if kwargs['protected_action_permission'] != 'put':
abort(401, message='(Code 006) Unauthorized!')
else:
user.email = parsed_args['email']
generate_refreshed_jwt = True
else:
generate_refreshed_jwt = False
if parsed_args['password']: # So does changing the password
if kwargs['protected_action_permission'] != 'put':
abort(401, message='(Code 007) Unauthorized!')
else:
user.password = parsed_args['password']
dbs.add(user)
dbs.commit()
if user.birthday:
user.birthday = datetime.combine(user.birthday, datetime.min.time()) # Can't marshall date, only datetime
marshalled_response = marshal(user, user_fields, envelope='data')
# When email changed, the login JWT is now invalid and a new one has to be sent
if generate_refreshed_jwt:
marshalled_response['refreshedJWT'] = get_login_jwt(user.email)
return marshalled_response, 201
class UserListResource(Resource):
@marshal_with(user_fields, envelope='data')
def get(self):
users = dbs.query(User).all()
for user in users:
if user.birthday:
user.birthday = datetime.combine(user.birthday, datetime.min.time())
return users
@marshal_with(user_fields, envelope='data')
def post(self):
parsed_args = parser.parse_args()
# If there is not admin in the system yet, an admin can be created this way. This will only work for the first
# admin in the system!
is_admin = False
if parsed_args['installAdmin']:
if not dbs.query(User).filter(User.administrator).all():
is_admin = True
else:
abort(401, message="(Code 036) Falsely attempted to create initial administrator!")
if not (parsed_args['nickname'] or (parsed_args['firstname'] and parsed_args['lastname'])):
abort(400, message="(Code 008) Either a nickname or a firstname and lastname need to be given!")
if not re.match(r"[^@]+@[^@]+\.[^@]+", parsed_args['email']):
abort(400, message="(Code 009) Email field is invalid!")
if not parsed_args['password']:
abort(400, message="(Code 010) Password cannot be blank!")
if not parsed_args['sex'] in sexes:
abort(400, message="(Code 026) Invalid sex!")
birthday = datetime.strptime(parsed_args['birthday'], '%Y-%m-%d') if parsed_args[
'birthday'] else None
user = User(firstname=parsed_args['firstname'],
lastname=parsed_args['lastname'],
nickname=parsed_args['nickname'],
birthday=birthday,
size=parsed_args['size'],
sex=parsed_args['sex'],
email=parsed_args['email'],
password=parsed_args['password'],
administrator=is_admin,
moderator=is_admin)
dbs.add(user)
dbs.commit()
if user.birthday:
user.birthday = datetime.combine(user.birthday, datetime.min.time()) # Can't marshall date, only datetime
return user, 201
class PromotionResource(Resource):
@marshal_with(user_fields, envelope='data')
@validate_login_jwt
def put(self, id, **kwargs):
parsed_args = promotion_parser.parse_args()
user = dbs.query(User).filter(User.id == id).first()
if not user:
abort(404, message="(Code 037) User {} doesn't exist".format(id))
if parsed_args['promoteToAdmin']:
if user.administrator:
abort(404, message="(Code 040) User is already an administrator!")
else:
if kwargs['user'].administrator:
user.administrator = True
user.moderator = True
else:
abort(401, message="(Code 038) Unauthorized!")
if parsed_args['promoteToMod']:
if user.administrator:
abort(404, message="(Code 041) User is already an administrator!")
elif user.moderator:
abort(404, message="(Code 042) User is already a moderator!")
else:
if kwargs['user'].moderator or kwargs['user'].administrator:
user.moderator = True
else:
abort(401, message="(Code 039) Unauthorized!")
dbs.add(user)
dbs.commit()
return user, 201
|
# Copyright (c) 2012--2014 King's College London
# Created by the Software Development Team <http://soft-dev.org/>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from __future__ import print_function
try:
import cPickle as pickle
except:
import pickle
import time, os
from grammar_parser.gparser import Parser, Nonterminal, Terminal, Epsilon, IndentationTerminal
from syntaxtable import SyntaxTable, FinishSymbol, Reduce, Accept, Shift
from stategraph import StateGraph
from constants import LR0, LALR
from astree import AST, TextNode, BOS, EOS
from ip_plugins.plugin import PluginManager
import logging
Node = TextNode
def printc(text, color):
print("\033[%sm%s\033[0m" % (color, text))
def printline(start):
start = start.next_term
l = []
while True:
l.append(start.symbol.name)
if start.lookup == "<return>" or isinstance(start, EOS):
break
start = start.next_term
return "".join(l)
class IncParser(object):
"""
The incremental parser
"""
def __init__(self, grammar=None, lr_type=LR0, whitespaces=False, startsymbol=None):
if grammar:
logging.debug("Parsing Grammar")
parser = Parser(grammar, whitespaces)
parser.parse()
filename = "".join([os.path.dirname(__file__), "/../pickle/", str(hash(grammar) ^ hash(whitespaces)), ".pcl"])
try:
logging.debug("Try to unpickle former stategraph")
f = open(filename, "r")
start = time.time()
self.graph = pickle.load(f)
end = time.time()
logging.debug("unpickling done in %s", end-start)
except IOError:
logging.debug("could not unpickle old graph")
logging.debug("Creating Stategraph")
self.graph = StateGraph(parser.start_symbol, parser.rules, lr_type)
logging.debug("Building Stategraph")
self.graph.build()
logging.debug("Pickling")
pickle.dump(self.graph, open(filename, "w"))
if lr_type == LALR:
self.graph.convert_lalr()
logging.debug("Creating Syntaxtable")
self.syntaxtable = SyntaxTable(lr_type)
self.syntaxtable.build(self.graph)
self.stack = []
self.ast_stack = []
self.all_changes = []
self.undo = []
self.last_shift_state = 0
self.validating = False
self.last_status = False
self.error_node = None
self.whitespaces = whitespaces
self.status_by_version = {}
self.errornode_by_version = {}
self.indentation_based = False
self.pm = PluginManager()
self.pm.loadplugins(self)
self.pm.do_incparse_init()
self.previous_version = None
logging.debug("Incremental parser done")
def from_dict(self, rules, startsymbol, lr_type, whitespaces, pickle_id, precedences):
self.graph = None
self.syntaxtable = None
if pickle_id:
filename = "".join([os.path.dirname(__file__), "/../pickle/", str(pickle_id ^ hash(whitespaces)), ".pcl"])
try:
f = open(filename, "r")
self.syntaxtable = pickle.load(f)
except IOError:
pass
if self.syntaxtable is None:
self.graph = StateGraph(startsymbol, rules, lr_type)
self.graph.build()
self.syntaxtable = SyntaxTable(lr_type)
self.syntaxtable.build(self.graph, precedences)
if pickle_id:
pickle.dump(self.syntaxtable, open(filename, "w"))
self.whitespaces = whitespaces
self.pm.do_incparse_from_dict(rules)
def init_ast(self, magic_parent=None):
bos = BOS(Terminal(""), 0, [])
eos = EOS(FinishSymbol(), 0, [])
bos.magic_parent = magic_parent
eos.magic_parent = magic_parent
bos.next_term = eos
eos.prev_term = bos
root = Node(Nonterminal("Root"), 0, [bos, eos])
self.previous_version = AST(root)
root.save(0)
bos.save(0)
eos.save(0)
def reparse(self):
self.inc_parse([], True)
def inc_parse(self, line_indents=[], reparse=False):
logging.debug("============ NEW INCREMENTAL PARSE ================= ")
self.validating = False
self.error_node = None
self.stack = []
self.undo = []
self.current_state = 0
self.stack.append(Node(FinishSymbol(), 0, []))
bos = self.previous_version.parent.children[0]
self.loopcount = 0
USE_OPT = True
self.pm.do_incparse_inc_parse_top()
la = self.pop_lookahead(bos)
while(True):
logging.debug("\x1b[35mProcessing\x1b[0m %s %s %s %s", la, la.changed, id(la), la.indent)
self.loopcount += 1
if isinstance(la.symbol, Terminal) or isinstance(la.symbol, FinishSymbol) or la.symbol == Epsilon():
if la.changed:
assert False # with prelexing you should never end up here!
else:
lookup_symbol = self.get_lookup(la)
result = self.parse_terminal(la, lookup_symbol)
if result == "Accept":
self.last_status = True
return True
elif result == "Error":
self.last_status = False
return False
elif result != None:
la = result
else: # Nonterminal
if la.changed or reparse:
# deconstruct the
#la.changed = False # as all nonterminals that have changed are being rebuild, there is no need to change this flag (this also solves problems with comments)
self.undo.append((la, 'changed', True))
la = self.left_breakdown(la)
else:
if USE_OPT:
#Follow parsing/syntax table
goto = self.syntaxtable.lookup(self.current_state, la.symbol)
if goto: # can we shift this Nonterminal in the current state?
logging.debug("OPTShift: %s in state %s -> %s", la.symbol, self.current_state, goto)
self.pm.do_incparse_optshift(la)
follow_id = goto.action
self.stack.append(la)
la.state = follow_id #XXX this fixed goto error (I should think about storing the states on the stack instead of inside the elements)
self.current_state = follow_id
logging.debug("USE_OPT: set state to %s", self.current_state)
la = self.pop_lookahead(la)
self.validating = True
continue
else:
#XXX can be made faster by providing more information in syntax tables
first_term = la.find_first_terminal()
lookup_symbol = self.get_lookup(first_term)
element = self.syntaxtable.lookup(self.current_state, lookup_symbol)
if isinstance(element, Reduce):
self.reduce(element)
else:
la = self.left_breakdown(la)
else:
# PARSER WITHOUT OPTIMISATION
if la.lookup != "":
lookup_symbol = Terminal(la.lookup)
else:
lookup_symbol = la.symbol
element = self.syntaxtable.lookup(self.current_state, lookup_symbol)
if self.shiftable(la):
logging.debug("\x1b[37mis shiftable\x1b[0m")
self.stack.append(la)
self.current_state = la.state
self.right_breakdown()
la = self.pop_lookahead(la)
else:
la = self.left_breakdown(la)
logging.debug("============ INCREMENTAL PARSE END ================= ")
def parse_terminal(self, la, lookup_symbol):
"""
Take in one terminal and set it's state to the state the parsing is in at the moment this terminal
has been read.
:param la: lookahead
:param lookup_symbol:
:return: "Accept" is the code was accepted as valid, "Error" if the syntax table does not provide a next state
"""
element = None
if isinstance(la, EOS):
element = self.syntaxtable.lookup(self.current_state, Terminal("<eos>"))
if isinstance(element, Shift):
self.current_state = element.action
return la
if element is None:
element = self.syntaxtable.lookup(self.current_state, lookup_symbol)
logging.debug("\x1b[34mparse_terminal\x1b[0m: %s in %s -> %s", lookup_symbol, self.current_state, element)
if isinstance(element, Accept):
#XXX change parse so that stack is [bos, startsymbol, eos]
bos = self.previous_version.parent.children[0]
eos = self.previous_version.parent.children[-1]
self.previous_version.parent.set_children([bos, self.stack[1], eos])
logging.debug("loopcount: %s", self.loopcount)
logging.debug ("\x1b[32mAccept\x1b[0m")
return "Accept"
elif isinstance(element, Shift):
self.validating = False
self.shift(la, element)
return self.pop_lookahead(la)
elif isinstance(element, Reduce):
logging.debug("\x1b[33mReduce\x1b[0m: %s -> %s", la, element)
self.reduce(element)
return self.parse_terminal(la, lookup_symbol)
elif element is None:
if self.validating:
logging.debug("Was validating: Right breakdown and return to normal")
logging.debug("Before breakdown: %s", self.stack[-1])
self.right_breakdown()
logging.debug("After breakdown: %s", self.stack[-1])
self.validating = False
else:
return self.do_undo(la)
def get_lookup(self, la):
"""
Retrurn the lookup of a node as Terminal. The lookup is name of the regular expression that mached the
token in the lexing phase.
Note: indentation terminals are handled in a special manner
:param la: node to find lookup of
:return: the lookup of the node wraped in a Terminal
"""
if la.lookup != "":
lookup_symbol = Terminal(la.lookup)
else:
lookup_symbol = la.symbol
if isinstance(lookup_symbol, IndentationTerminal):
#XXX hack: change parsing table to accept IndentationTerminals
lookup_symbol = Terminal(lookup_symbol.name)
return lookup_symbol
def do_undo(self, la):
"""
Restore changes
Loop over self.undo and for the tupel (a,b,c) do a.b = c
:param la:
:return:
"""
while len(self.undo) > 0:
node, attribute, value = self.undo.pop(-1)
setattr(node, attribute, value)
self.error_node = la
logging.debug ("\x1b[31mError\x1b[0m: %s %s %s", la, la.prev_term, la.next_term)
logging.debug("loopcount: %s", self.loopcount)
return "Error"
def reduce(self, element):
"""
Execute the reduction given on the current stack.
Reduces elements from the stack to a Nonterminal subtree. special:
COMMENT subtrees that are found on the stack during reduction are
added "silently" to the subtree (they don't count to the amount of
symbols of the reduction)
:type element: Reduce
:param element: reduction to apply
:except Exception rule not applicable
"""
#Fill a children array with nodes that are on the stack
children = []
i = 0
while i < element.amount():
c = self.stack.pop()
# apply folding information from grammar to tree nodes
fold = element.action.right[element.amount()-i-1].folding
c.symbol.folding = fold
children.insert(0, c)
i += 1
logging.debug(" Element on stack: %s(%s)", self.stack[-1].symbol, self.stack[-1].state)
self.current_state = self.stack[-1].state #XXX don't store on nodes, but on stack
logging.debug(" Reduce: set state to %s (%s)", self.current_state, self.stack[-1].symbol)
goto = self.syntaxtable.lookup(self.current_state, element.action.left)
if goto is None:
raise Exception("Reduction error on %s in state %s: goto is None" % (element, self.current_state))
assert goto != None
# save childrens parents state
for c in children:
self.undo.append((c, 'parent', c.parent))
self.undo.append((c, 'left', c.left))
self.undo.append((c, 'right', c.right))
self.undo.append((c, 'log', c.log.copy()))
c.mark_version() # XXX with node reuse we only have to do this if the parent changes
new_node = Node(element.action.left.copy(), goto.action, children)
self.pm.do_incparse_reduce(new_node)
logging.debug(" Add %s to stack and goto state %s", new_node.symbol, new_node.state)
self.stack.append(new_node)
self.current_state = new_node.state # = goto.action
logging.debug("Reduce: set state to %s (%s)", self.current_state, new_node.symbol)
if getattr(element.action.annotation, "interpret", None):
# eco grammar annotations\
self.interpret_annotation(new_node, element.action)
else:
# johnstone annotations
self.add_alternate_version(new_node, element.action)
def interpret_annotation(self, node, production):
annotation = production.annotation
if annotation:
astnode = annotation.interpret(node)
node.alternate = astnode
def add_alternate_version(self, node, production):
# add alternate (folded) versions for nodes to the tree
alternate = TextNode(node.symbol.__class__(node.symbol.name), node.state, [])
alternate.children = []
teared = []
for i in range(len(node.children)):
if production.inserts.has_key(i):
# insert tiered nodes at right position
value = production.inserts[i]
for t in teared:
if t.symbol.name == value.name:
alternate.children.append(t)
c = node.children[i]
if c.symbol.folding == "^^^":
c.symbol.folding = None
teared.append(c)
continue
elif c.symbol.folding == "^^":
while c.alternate is not None:
c = c.alternate
alternate.symbol = c.symbol
for child in c.children:
alternate.children.append(child)
elif c.symbol.folding == "^":
while c.alternate is not None:
c = c.alternate
for child in c.children:
alternate.children.append(child)
else:
alternate.children.append(c)
node.alternate = alternate
def left_breakdown(self, la):
if len(la.children) > 0:
return la.children[0]
else:
return self.pop_lookahead(la)
def right_breakdown(self):
node = self.stack.pop() # optimistically shifted Nonterminal
# after the breakdown, we need to properly shift the left over terminal
# using the (correct) current state from before the optimistic shift of
# it's parent tree
self.current_state = self.stack[-1].state
logging.debug("right breakdown(%s): set state to %s", node.symbol.name, self.current_state)
while(isinstance(node.symbol, Nonterminal)):
for c in node.children:
self.shift(c, rb=True)
c = c.right
node = self.stack.pop()
# after undoing an optimistic shift (through pop) we need to revert
# back to the state before the shift (which can be found on the top
# of the stack after the "pop"
if isinstance(node.symbol, FinishSymbol):
# if we reached the end of the stack, reset to state 0 and push
# FinishSymbol pack onto the stack
self.current_state = 0
self.stack.append(node)
return
else:
logging.debug("right breakdown else: set state to %s", self.stack[-1].state)
self.current_state = self.stack[-1].state
self.shift(node, rb=True) # pushes previously popped terminal back on stack
def shift(self, la, element=None, rb=False):
if not element:
lookup_symbol = self.get_lookup(la)
element = self.syntaxtable.lookup(self.current_state, lookup_symbol)
logging.debug("\x1b[32m" + "%sShift(%s)" + "\x1b[0m" + ": %s -> %s", "rb" if rb else "", self.current_state, la, element)
la.state = element.action
self.stack.append(la)
self.current_state = la.state
if not la.lookup == "<ws>":
# last_shift_state is used to predict next symbol
# whitespace destroys correct behaviour
self.last_shift_state = element.action
self.pm.do_incparse_shift(la, rb)
def pop_lookahead(self, la):
"""
Get next (right) Node
:rtype: Node
:param la:
:return:
"""
org = la
while(la.right_sibling() is None):
la = la.parent
logging.debug("pop_lookahead(%s): %s", org.symbol, la.right_sibling().symbol)
return la.right_sibling()
def shiftable(self, la):
if self.syntaxtable.lookup(self.current_state, la.symbol):
return True
return False
def has_changed(self, node):
return node in self.all_changes
def prepare_input(self, _input):
l = []
# XXX need an additional lexer to do this right
if _input != "":
for i in _input.split(" "):
l.append(Terminal(i))
l.append(FinishSymbol())
return l
def get_ast(self):
bos = Node(Terminal("bos"), 0, [])
eos = Node(FinishSymbol(), 0, [])
root = Node(Nonterminal("Root"), 0, [bos, self.ast_stack[0], eos])
return AST(root)
def get_next_possible_symbols(self, state_id):
l = set()
for (state, symbol) in self.syntaxtable.table.keys():
if state == state_id:
l.add(symbol)
return l
def get_next_symbols_list(self, state = -1):
if state == -1:
state = self.last_shift_state
lookahead = self.get_next_possible_symbols(state)
s = []
for symbol in lookahead:
s.append(symbol.name)
return s
def get_next_symbols_string(self, state = -1):
l = self.get_next_symbols_list(state)
return ", ".join(l)
def get_expected_symbols(self, state_id):
#XXX if state of a symbol is nullable, return next symbol as well
#XXX if at end of state, find state we came from (reduce, stack) and get next symbols from there
if state_id != -1:
stateset = self.graph.state_sets[state_id]
symbols = stateset.get_next_symbols_no_ws()
return symbols
return []
def reset(self):
self.stack = []
self.ast_stack = []
self.all_changes = []
self.undo = []
self.last_shift_state = 0
self.validating = False
self.last_status = False
self.error_node = None
self.previous_version = None
self.init_ast()
def load_status(self, version):
try:
self.last_status = self.status_by_version[version]
except KeyError:
logging.warning("Could not find status for version %s", version)
try:
self.error_node = self.errornode_by_version[version]
except KeyError:
logging.warning("Could not find errornode for version %s", version)
def save_status(self, version):
self.status_by_version[version] = self.last_status
self.errornode_by_version[version] = self.error_node
|
import socket
from base64 import encode
sock = socket.socket()
print("创建连接,连接服务器")
sock.connect(('127.0.0.1',5050))
print("准备发送数据给客户端")
content = 'hello smallqiang'
print("发送的内容为........")
print(content)
sock.send(content.encode())
|
# !/usr/bin/python
# coding=utf-8
#
# @Author: LiXiaoYu
# @Time: 2013-10-17
# @Info: Redis Library.
import redis, Log, sys, json
from Config import Config
from Cache import Cache
class Redis(Cache):
config = None
def __init__(self, options={}):
self.config = Config()
if "redis" not in sys.modules:
Log.error(L('_NO_MODULE_')+":redis")
if len(options) == 0:
options = {
"host":self.config.get("redis.host") if self.config.get("redis.host") else "127.0.0.1",
"port":self.config.get("redis.port") if self.config.get("redis.port") else "6397"
}
self.options = options
self.handler = redis.Redis(**options)
#set prefix
if 'prefix' in options:
self.options['prefix'] = options['prefix']
else:
self.options['prefix'] = self.config.get("redis.prefix")
##
# 读取缓存
# @access public
# @param string $name 缓存变量名
# @return mixed None
##
def get(self, name=""):
value = self.handler.get(self.options['prefix']+name)
value = value.decode('UTF-8')
try:
jsonData = json.loads(value)
except:
jsonData = value
return jsonData
##
# 写入缓存
# @access public
# @param string $name 缓存变量名
# @param mixed $value 存储数据
# @param integer $expire 有效时间(秒)
# @return boolean
##
def set(self, name="", value="", expire=0):
name = self.options['prefix']+name
if isinstance(value, (dict,tuple,list)):
value = json.dumps(value)
if isinstance(expire, int) and expire>0:
result = self.handler.setex(name, value, expire)
else:
result = self.handler.set(name, value)
return result
##
# 删除缓存
# @access public
# @param string $name 缓存变量名
# @return boolean
##
def rm(self, name=""):
self.handler.delete(self.options['prefix']+name)
##
# 清除缓存
# @access public
# @return boolean
##
def clear(self):
return self.handler.flushdb()
if __name__ == "__main__":
r = Redis()
r.set("list", {"a":1,"b":2,"c":(3,4)})
#r.set("list", "ddd")
data = r.get("list")
print(data)
for i in data:
print(i)
|
#!/usr/bin/python
import os
import sys
import re
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
import requests
CONFIG_FILENAME = 'pac.ini'
SECTION_PAC = 'PAC'
OPTION_PAC_SAVE_PATH = 'SavePath'
OPTION_PAC_TEMPLATE_PATH = 'TemplatePath'
OPTION_DOMAIN_LIST_PATH = 'DomainListPath'
OPTION_PROXY_BLACK_LIST_PATH = 'ProxyBlackListPath'
DEFAULT_PAC_SAVE_PATH = 'proxy.pac'
DEFAULT_DOMAIN_LIST_PATH = 'domain.list'
DEFAULT_PAC_TEMPLATE_PATH = 'pac.template'
DEFAULT_PROXY_BLACK_LIST_PATH = 'black.list'
PROXY_TXT_LIST_ADDRESS = 'http://txt.proxyspy.net/proxy.txt'
PROXY_TXT_LIST_SPLITTER = ' '
PROXY_PATTERN = '(.+?) US-[AH]'
PROXY_TEST_URL = 'http://example.org'
PROXY_TEST_TIMEOUT = 5
STATUS_OK = 0
STATUS_NO_PROXIES = 1
STATUS_NO_DOMAIN_LIST = 2
STATUS_NO_PAC_TEMPLATE = 3
STATUS_KEYBOARD_INTERRUPT = 4
class ConfigWrapper:
def __init__(self, filename):
self.filename = filename
self.config = ConfigParser()
self.config.optionxform = str
self.config.read(filename)
def get(self, section, option, default=None):
try:
return self.config.get(section, option)
except (NoSectionError, NoOptionError):
if not self.config.has_section(section):
self.config.add_section(section)
self.config.set(section, option, default)
self.save()
return default
def save(self):
with open(self.filename, 'wb') as f:
self.config.write(f)
def test_address(address):
proxies = {
"http": "http://%s" % address,
}
try:
response = requests.get(PROXY_TEST_URL, proxies=proxies, timeout=PROXY_TEST_TIMEOUT)
return 'Example Domain' in response.text
except IOError:
return False
def print_status(status):
status_desc_table = {
STATUS_OK: "Ok",
STATUS_NO_PROXIES: "No proxies",
STATUS_NO_DOMAIN_LIST: "Domain list is missing",
STATUS_NO_PAC_TEMPLATE: "PAC template is missing",
STATUS_KEYBOARD_INTERRUPT: "Cancelled",
}
print(status_desc_table.get(status, "Unknown status (%d)" % status))
def get_config():
config = ConfigWrapper(CONFIG_FILENAME)
return config
def get_list(path):
with open(path, 'r') as f:
items = [x.strip() for x in f.xreadlines() if x]
return items
def get_proxy_list():
result = []
response = requests.get(PROXY_TXT_LIST_ADDRESS)
if response:
proxies = re.findall(PROXY_PATTERN, response.text)
for address in proxies:
if test_address(address):
result.append(address)
if result:
config = get_config()
blacklist_path = config.get(SECTION_PAC, OPTION_PROXY_BLACK_LIST_PATH, DEFAULT_PROXY_BLACK_LIST_PATH)
if os.path.exists(blacklist_path):
proxy_black_list = get_list(blacklist_path)
else:
proxy_black_list = []
proxy_list = ['PROXY %s' % x for x in result if x not in proxy_black_list]
return proxy_list
return None
def generate_pac():
config = get_config()
pac_save_path = config.get(SECTION_PAC, OPTION_PAC_SAVE_PATH, DEFAULT_PAC_SAVE_PATH)
domain_list_path = config.get(SECTION_PAC, OPTION_DOMAIN_LIST_PATH, DEFAULT_DOMAIN_LIST_PATH)
pac_template_path = config.get(SECTION_PAC, OPTION_PAC_TEMPLATE_PATH, DEFAULT_PAC_TEMPLATE_PATH)
if not os.path.exists(domain_list_path):
return STATUS_NO_DOMAIN_LIST
if not os.path.exists(pac_template_path):
return STATUS_NO_PAC_TEMPLATE
proxy_list = get_proxy_list()
if not proxy_list:
return STATUS_NO_PROXIES
else:
with open(pac_template_path, 'r') as f:
pac_template = f.read()
domain_list = get_list(domain_list_path)
domain_list_str = str(domain_list)
proxy_list_str = '; '.join(proxy_list)
with open(pac_save_path, 'w') as f:
f.write(pac_template % (domain_list_str, proxy_list_str))
return STATUS_OK
def main():
try:
status = generate_pac()
except KeyboardInterrupt:
status = STATUS_KEYBOARD_INTERRUPT
print_status(status)
return status
if '__main__' == __name__:
sys.exit(main())
|
#!/usr/bin/env
############################################
# exercise_7_2.py
# Author: Paul Yang
# Date: June, 2016
# Brief:
############################################
data = open("dialogue_chinese.txt", encoding="utf-8")
for line in data:
try:
(role,line_spoken) = line.split(":",maxsplit=1)
print(role,end="")
print("說: ", end="")
print(line_spoken, end="")
except:
pass
data.close()
|
class Solution(object):
def maxNumber(self, nums1, nums2, k):
def get_max_sub_array(nums, k):
res , n = [] ,len(nums)
for i in xrange(n):
while res and len(res) + n - i > k and nums[i] > res[-1]:
res.pop()
if len(res) < k:
res.append(nums[i])
return res
ans = [0] * k
for i in xrange(max(0, k - len(nums2)), min(k, len(nums1)) + 1):
res1 = get_max_sub_array(nums1, i)
res2 = get_max_sub_array(nums2, k - i)
ans = max(ans, [max(res1, res2).pop(0) for _ in xrange(k)])
return ans
|
from django.http import JsonResponse, HttpResponse
from wrapper import *
#####################################
#### FUNCIONES AUXILIARES ####
#####################################
def complain_about_get():
data = {
'message': "La peticion tiene que ser GET",
}
return JsonResponse(data,safe=False)
def complain_about_program():
data = {
'message': "No puedo entender el nombre el programa",
}
return JsonResponse(data,safe=False)
def no_result_found():
data = {
'message': "No se han encontrado resultados",
}
return JsonResponse(data,safe=False)
def return_program(resultado):
print(resultado)
data = {
'registro': resultado[3],
'nombre': resultado[0],
'tipo': resultado[1],
'cinta': resultado[2]
}
return JsonResponse(data,safe=False)
def descompose_program(resultado):
data = {
'registro': resultado[3],
'nombre': resultado[0],
'tipo': resultado[1],
'cinta': resultado[2]
}
return data
def clean_result(lista):
jsonlist = []
for i in lista:
jsonlist.append(descompose_program(i))
return jsonlist
def form_list_names(lista):
list_json = []
for program in lista:
data={
'nombre':program
}
list_json.append(data)
return list_json
#####################################
#### FUNCIONES PRINCIPALES ####
#####################################
""" Find a program by its name"""
def find_by_name(request):
if request.method == 'GET' :
if request.GET.get('program') != '' and request.GET.get('program') != None:
singleton = WindowMgr()
programa = request.GET.get('program')
resultado = singleton.find_program_by_name(programa)
if resultado != []:
return return_program(resultado)
else:
return no_result_found()
else:
return complain_about_program()
else:
return complain_about_get()
"""Return a list with all the names of the programs"""
def get_them_all(request):
singleton = WindowMgr()
# De aqui se obtienen todos los programas
programa =singleton.get_name_programs()
data = form_list_names(programa)
return JsonResponse(data,safe=False)
"""Return a list with all the programs on a tape"""
def get_tape_all(request):
if request.method == 'GET':
if request.GET.get('cinta') != '' and request.GET.get('cinta') != None:
singleton = WindowMgr()
cinta = request.GET.get('cinta')
#De aqui se obtienen las cintas
resultado = singleton.get_all_programs_tape(cinta)
if resultado != []:
data = clean_result(resultado)
return JsonResponse(data,safe=False)
else:
return no_result_found()
else:
return complain_about_program()
else:
return complain_about_get()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django import template
from extra_settings.models import Setting
register = template.Library()
@register.simple_tag(takes_context=True)
def get_setting(context, name, default=''):
return Setting.get(name, default)
|
#import sys input = sys.stdin.readline
def main():
S, T = input().split()
print(T+S)
if __name__ == '__main__':
main()
|
#HOUSE PRICE PREDICTIONS
# PART 1 :-
# PART 1- Getting the Data
#Import the libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#Import the Dataset
train=pd.read_csv('train.csv')
test=pd.read_csv('test.csv')
#PART 2:-
#PART 2- Exploratory Data Analysis
#sales price info
info_train=train.SalePrice.describe()
#to plot the skewness of sales price
print ("Skew is:", train.SalePrice.skew())
plt.hist(train.SalePrice, color='blue')
plt.show()
#log transform the target variable since its skewed
target_y = np.log(train.SalePrice)
print ("Skew is:", target_y.skew())
plt.hist(target_y, color='blue')
plt.show()
#get the numeric features from the dataset
numeric_features_train = train.select_dtypes(include=[np.number])
numeric_features_test = test.select_dtypes(include=[np.number])
#getting the categorical features and its description
categorical_features = train.select_dtypes(exclude=[np.number]) #exclude all numeric features
categorical_feature_description=categorical_features.describe()
#correlation matrix
corr = numeric_features_train.corr()
print(corr['SalePrice'].sort_values(ascending=False)[:38], '\n') #38 most +vely correlated features with SalesPrice
#print(corr['SalePrice'].sort_values(ascending=False)[-10:]) #10 most -vely correlated
#Getting the heatmap
import seaborn as sns
sns.heatmap(corr)
#remove one of two features that have a correlation higher than 0.9
columns = np.full((corr.shape[0],), True, dtype=bool)
for i in range(corr.shape[0]):
for j in range(i+1, corr.shape[0]):
if corr.iloc[i,j] >= 0.9:
if columns[j]:
columns[j] = False
selected_columns = numeric_features_train.columns[columns]
train_corr = numeric_features_train[selected_columns]
#dataset(train_corr) has only those columns with correlation less than 0.9
#Getting the numeric features with null values
nulls_train = pd.DataFrame(train.isnull().sum().sort_values(ascending=False)[:80]) #features with null values
nulls_train.columns = ['Null Count']
nulls_train.index.name = 'Feature'
nulls_test = pd.DataFrame(test.isnull().sum().sort_values(ascending=False)[:80]) #features with null values
nulls_test.columns = ['Null Count']
nulls_test.index.name = 'Feature'
# PART 3- Data Preprocessing
#Taking care of missing data
num_null_train = pd.DataFrame(numeric_features_train.isnull().sum().sort_values(ascending=False)[:80]) #features with null values
num_null_test = pd.DataFrame(numeric_features_test.isnull().sum().sort_values(ascending=False)[:80]) #features with null values
#Taking care of categorical data
#1 -MSZoning
print ("Original: \n")
print (train.MSZoning.value_counts(), "\n") #Counts
def encode(x): #Encoding RL as 1 and others as 0.
return 1 if x == 'RL' else 0 #to encode
train['enc_MSZoning'] = train.MSZoning.apply(encode)
test['enc_MSZoning'] = test.MSZoning.apply(encode)
print (train.enc_MSZoning.value_counts()) #Check encoded value
#to check barplot
condition_pivot = train.pivot_table(index='enc_MSZoning', values='SalePrice', aggfunc=np.median)
condition_pivot.plot(kind='bar', color='blue')
plt.xlabel('Encoded Sale Condition')
plt.ylabel('Median Sale Price')
plt.xticks(rotation=0)
plt.show()
# 2- Street
print ("Original: \n")
print (train.Street.value_counts(), "\n")
train['enc_street'] = pd.get_dummies(train.Street, drop_first=True)
test['enc_street'] = pd.get_dummies(test.Street, drop_first=True)
print ('Encoded: \n')
print (train.enc_street.value_counts())
#3 -GarageCond
print (train.GarageCond.value_counts())
def encode(x):
return 1 if x == 'TA' else 0 #to encode
train['enc_GarageCond'] = train.GarageCond.apply(encode)
test['enc_GarageCond'] = test.GarageCond.apply(encode)
print (train.enc_GarageCond.value_counts())
# 4- Central Air #when only 2 categories are present
print ("Original: \n")
print (train.CentralAir.value_counts(), "\n")
train['enc_CentralAir'] = pd.get_dummies(train.CentralAir, drop_first=True)
test['enc_CentralAir'] = pd.get_dummies(test.CentralAir, drop_first=True)
print ('Encoded: \n')
print (train.enc_CentralAir.value_counts())
train_corr = train.corr() #correlation between numerical features and target.
print(train_corr['SalePrice'].sort_values(ascending=False)[:15], '\n') #10 most +vely correlated features with SalesPrice
print(train_corr['SalePrice'].sort_values(ascending=False)[-10:])
# PART 4 Bulding a linear model
#DV and IDV features
"""X=train.iloc[:,[17,46,61,62,38,43,49,54,19,20,59,26,56,34]].values
#Both are very correlated (38,43)
19,20,59 very correlated
59-Nan
26-Nan
"""
#Not the best method to take care of missing values
data = train.select_dtypes(include=[np.number]).interpolate().dropna()
sum(data.isnull().sum() != 0) #Check if the all of the columns have 0 null values.
"""
X=train.iloc[:,[17,46,61,62,38,49,54,19,26,56,34]].values
y=train.iloc[:,-5].values
#Missing values
from sklearn.impute import SimpleImputer
imputer=SimpleImputer(missing_values=np.nan,strategy='mean')
X[:,8:9]=imputer.fit_transform(X[:,8:9])
"""
X = data.drop(['SalePrice', 'Id'], axis=1) #From train.csv
y = np.log(train.SalePrice)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42, test_size=.25)
# Fitting Simple Linear Regression to the Training set
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
model=regressor.fit(X_train, y_train)
#Predicting the test set results
y_pred=regressor.predict(X_test) #using train set
# Applying k-Fold Cross Validation (model evaluation)
from sklearn.model_selection import cross_val_score
accuracies = cross_val_score(estimator = regressor, X = X_train, y = y_train, cv = 10)
accuracies.mean()
accuracies.std()
# PART 4 Evaluate the model
print ("R^2 is: \n", model.score(X_test, y_test))
from sklearn.metrics import mean_squared_error
print ('RMSE is: \n', mean_squared_error(y_test, y_pred))
#RMSE measures the distance between our predicted values and actual values.
actual_values = y_test
plt.scatter(y_pred, actual_values, alpha=.7,color='b') #alpha helps to show overlapping data
plt.xlabel('Predicted Price')
plt.ylabel('Actual Price')
plt.title('Linear Regression Model')
plt.show()
#Predicting the test.csv results
"""features = test.select_dtypes(include=[np.number]).drop(['Id'], axis=1).interpolate()
features_X=features.iloc[:,[3,15,25,26,11,18,22,5,7,23,8]]
predictions = model.predict(features_X)
final_predictions = np.exp(predictions)"""
#Getting the results on test.csv file
test_features = test.select_dtypes(include=[np.number]).drop(['Id'], axis=1).interpolate()
predictions = model.predict(test_features) #using test
final_predictions = np.exp(predictions)
#compare the result
print ("Original predictions are: \n", predictions[:5], "\n")
print ("Final predictions are: \n", final_predictions[:5])
#Getting a csv file
output=pd.DataFrame({'Id':test.Id, 'SalePrice':final_predictions})
output.to_csv('my_submission_SLR.csv', index=False)
|
from collective.rooter.navroot import setNavigationRoot
from plone.app.layout.navigation.interfaces import INavigationRoot
from zope.component import adapter
from zope.publisher.interfaces import IEndRequestEvent
from zope.traversing.interfaces import IBeforeTraverseEvent
@adapter(INavigationRoot, IBeforeTraverseEvent)
def record_navigation_root(obj, event):
"""When traversing over a site manager that is a navigation root,
record the navigation root in a thread-local.
"""
setNavigationRoot(obj)
@adapter(IEndRequestEvent)
def clean_navigation_root(event):
"""When traversal is over, clear the navigation root thread-local
"""
setNavigationRoot(None)
|
#!/usr/bin/env python
# coding: utf-8
# ## Run LRP on all test and validation results
import sys
import os
import json
import time
import torch
import pandas as pd
import numpy as np
import time
import matplotlib.pyplot as plt
import seaborn as sns
from collections import Counter
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from urllib.parse import urlparse
import tarfile
import pickle
import shutil
import matplotlib.pyplot as plt
import shap
import numpy as np
import deep_id_pytorch
from lstm_models import *
from lstm_lrp_models import *
from lstm_att_models import *
from lstm_self_att_models import *
from lstm_utils import *
from imp_utils import *
IS_SYNTHETIC = True # If dataset is synthetic/real
# MODEL_NAME = 'lstm'
MODEL_NAME = "lstm"
USE_SELF_ATTENTION = True
NROWS = 1e9
TRAIN_MODEL = True
SEQ_LEN = 30
DATA_TYPE = "event" # event/sequence
TRAIN_DATA_PATH = f"../data/synthetic/sample_dataset/{DATA_TYPE}/{SEQ_LEN}/train.csv"
VALID_DATA_PATH = f"../data/synthetic/sample_dataset/{DATA_TYPE}/{SEQ_LEN}/val.csv"
TEST_DATA_PATH = f"../data/synthetic/sample_dataset/{DATA_TYPE}/{SEQ_LEN}/test.csv"
VOCAB_PATH = f"../data/synthetic/sample_dataset/{DATA_TYPE}/{SEQ_LEN}/vocab.pkl"
MODEL_SAVE_PATH_PATTERN = f"./output/synthetic/{DATA_TYPE}/{SEQ_LEN}/{MODEL_NAME}/model_weights/model_{'{}'}.pkl"
IMP_SAVE_DIR_PATTERN = f"./output/synthetic/{DATA_TYPE}/{SEQ_LEN}/{MODEL_NAME}/importances/{'{}'}_imp_{'{}'}.pkl" # Feature importance values path for a given dataset split
OUTPUT_RESULTS_PATH = (
f"./output/synthetic/{DATA_TYPE}/{SEQ_LEN}/{MODEL_NAME}/train_results/results.csv"
)
PARAMS_PATH = f"./output/synthetic/{DATA_TYPE}/{SEQ_LEN}/{MODEL_NAME}/train_results/model_params.json"
BEST_EPOCH = 2
TOTAL_EXAMPLES = 7000 # Total patients in val/test data
TARGET_COLNAME = "label"
UID_COLNAME = "patient_id"
TARGET_VALUE = "1"
# Results path for val & test data
output_dir = os.path.dirname(IMP_SAVE_DIR_PATTERN)
VAL_RESULTS_PATH = os.path.join(output_dir, f"val_all_lrp_{BEST_EPOCH}.pkl")
TEST_RESULTS_PATH = os.path.join(output_dir, f"test_all_lrp_{BEST_EPOCH}.pkl")
# ### Load Vocab and Dataset
# Load model params
MODEL_PARAMS = None
with open(PARAMS_PATH, "r") as fp:
MODEL_PARAMS = json.load(fp)
MODEL_PARAMS
if os.path.exists(VOCAB_PATH):
with open(VOCAB_PATH, "rb") as fp:
vocab = pickle.load(fp)
print(f"vocab len: {len(vocab)}") # vocab + padding + unknown
else:
raise ValueError(
"Vocab path does not exist! Please create vocab from training data and save it first."
)
valid_dataset, vocab = build_lstm_dataset(
VALID_DATA_PATH,
min_freq=MODEL_PARAMS["min_freq"],
uid_colname=UID_COLNAME,
target_colname=TARGET_COLNAME,
max_len=SEQ_LEN,
target_value=TARGET_VALUE,
vocab=vocab,
nrows=NROWS,
rev=MODEL_PARAMS["rev"],
)
test_dataset, _ = build_lstm_dataset(
TEST_DATA_PATH,
min_freq=MODEL_PARAMS["min_freq"],
uid_colname=UID_COLNAME,
target_colname=TARGET_COLNAME,
max_len=SEQ_LEN,
target_value=TARGET_VALUE,
vocab=vocab,
nrows=NROWS,
rev=MODEL_PARAMS["rev"],
)
valid_dataloader = DataLoader(
valid_dataset, batch_size=MODEL_PARAMS["batch_size"], shuffle=False, num_workers=2
)
test_dataloader = DataLoader(
test_dataset, batch_size=MODEL_PARAMS["batch_size"], shuffle=False, num_workers=2
)
# ### Load Best Model
model_path = MODEL_SAVE_PATH_PATTERN.format(f"{BEST_EPOCH:02}")
# Check if cuda is available
print(f"Cuda available: {torch.cuda.is_available()}")
model_device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
lstm_model_best = AttNoHtLSTM(
MODEL_PARAMS["embedding_dim"],
MODEL_PARAMS["hidden_dim"],
vocab,
model_device,
bidi=MODEL_PARAMS["bidirectional"],
nlayers=MODEL_PARAMS["nlayers"],
dropout=MODEL_PARAMS["dropout"],
init_type=MODEL_PARAMS["init_type"],
linear_bias=MODEL_PARAMS["linear_bias"],
)
lstm_model_best.load_state_dict(torch.load(model_path))
valid_results_best = {}
valid_results_best[BEST_EPOCH] = {}
test_results_best = {}
test_results_best[BEST_EPOCH] = {}
# calculate relevancy and SHAP
lstm_model_best.eval()
lrp_model = LSTM_LRP_MultiLayer(lstm_model_best.cpu())
# Get test/val data
val_patient_ids, val_labels, val_idxed_text = get_eval_data(
valid_dataloader, TOTAL_EXAMPLES
)
test_patient_ids, test_labels, test_idxed_text = get_eval_data(
test_dataloader, TOTAL_EXAMPLES
)
start = time.time()
print("Processing validation data...")
for sel_idx in range(len(val_labels)):
one_text = [
int(token.numpy())
for token in val_idxed_text[sel_idx]
if int(token.numpy()) != 0
]
lrp_model.set_input(one_text)
lrp_model.forward_lrp()
Rx, Rx_rev, _ = lrp_model.lrp(one_text, 0, eps=1e-6, bias_factor=0)
R_words = np.sum(Rx + Rx_rev, axis=1)
df = pd.DataFrame()
df["lrp_scores"] = R_words
df["idx"] = one_text
df["seq_idx"] = [x for x in range(len(one_text))]
df["token"] = [lrp_model.vocab.itos(x) for x in one_text]
df["att_weights"] = lrp_model.get_attn_values()
if val_patient_ids[sel_idx] not in valid_results_best[BEST_EPOCH]:
valid_results_best[BEST_EPOCH][val_patient_ids[sel_idx]] = {}
valid_results_best[BEST_EPOCH][val_patient_ids[sel_idx]] = {}
valid_results_best[BEST_EPOCH][val_patient_ids[sel_idx]]["label"] = val_labels[
sel_idx
]
valid_results_best[BEST_EPOCH][val_patient_ids[sel_idx]]["pred"] = lrp_model.s[0]
valid_results_best[BEST_EPOCH][val_patient_ids[sel_idx]]["imp"] = df.copy()
if sel_idx % 500 == 0:
print(f"{sel_idx} of {TOTAL_EXAMPLES}")
end = time.time()
mins, secs = epoch_time(start, end)
print(f"Total Time: {mins}min: {secs}sec")
valid_results_best[BEST_EPOCH][val_patient_ids[sel_idx]]
with open(VAL_RESULTS_PATH, "wb") as fp:
pickle.dump(valid_results_best, fp)
print("Processing test data...")
start = time.time()
for sel_idx in range(len(test_labels)):
one_text = [
int(token.numpy())
for token in test_idxed_text[sel_idx]
if int(token.numpy()) != 0
]
lrp_model.set_input(one_text)
lrp_model.forward_lrp()
Rx, Rx_rev, _ = lrp_model.lrp(one_text, 0, eps=1e-6, bias_factor=0)
R_words = np.sum(Rx + Rx_rev, axis=1)
df = pd.DataFrame()
df["lrp_scores"] = R_words
df["idx"] = one_text
df["seq_idx"] = [x for x in range(len(one_text))]
df["token"] = [lstm_model_best.vocab.itos(x) for x in one_text]
df["att_weights"] = lrp_model.get_attn_values()
if test_patient_ids[sel_idx] not in test_results_best[BEST_EPOCH]:
test_results_best[BEST_EPOCH][test_patient_ids[sel_idx]] = {}
test_results_best[BEST_EPOCH][test_patient_ids[sel_idx]] = {}
test_results_best[BEST_EPOCH][test_patient_ids[sel_idx]]["label"] = test_labels[
sel_idx
]
test_results_best[BEST_EPOCH][test_patient_ids[sel_idx]]["pred"] = lrp_model.s[0]
test_results_best[BEST_EPOCH][test_patient_ids[sel_idx]]["imp"] = df.copy()
if sel_idx % 500 == 0:
print(f"{sel_idx} of {TOTAL_EXAMPLES}")
end = time.time()
mins, secs = epoch_time(start, end)
print(f"Total Time: {mins}min: {secs}sec")
with open(TEST_RESULTS_PATH, "wb") as fp:
pickle.dump(test_results_best, fp)
print("Success!")
|
# --------------
# Importing header files
import numpy as np
#New record
new_record=[[50, 9, 4, 1, 0, 0, 40, 0]]
#Code starts here
#Loading data file and saving it into a new numpy array
data = np.genfromtxt(path, delimiter=",", skip_header=1)
print(data.shape)
#Concatenating the new record to the existing numpy array
census=np.concatenate((data, new_record),axis = 0)
print(census.shape)
#Code ends here
# --------------
#Code starts here
age = census[:,0]
max_age =age.max()
print(max_age)
min_age = age.min()
print(min_age)
age_mean = age.mean()
print(age_mean)
age_std = np.std(age)
print(age_std)
# --------------
#Code starts here
race_0 = census[census[:,2]==0]
race_1 = census[census[:,2]==1]
race_2 = census[census[:,2]==2]
race_3 = census[census[:,2]==3]
race_4 = census[census[:,2]==4]
len_0 = len(race_0)
print(len_0)
len_1 = len(race_1)
print(len_1)
len_2 = len(race_2)
print(len_2)
len_3 = len(race_3)
print(len_3)
len_4 = len(race_4)
print(len_4)
#Storing the different race lengths with appropriate indexes
race_list=[len_0, len_1,len_2, len_3, len_4]
#Storing the race with minimum length into a variable
minority_race=race_list.index(min(race_list))
# --------------
#Code starts here
senior_citizens = census[census[:,0]>60]
working_hours_sum=senior_citizens.sum(axis=0)[6]
senior_citizens_len = len(senior_citizens)
avg_working_hours = working_hours_sum/senior_citizens_len
print(avg_working_hours)
# --------------
#Code starts here
high=census[census[:,1]>10]
low = census[census[:,1]<=10]
avg_pay_high=high[:,7].mean()
avg_pay_low = low[:,7].mean()
print(avg_pay_high)
print(avg_pay_low)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-28 06:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('player', '0002_auto_20161228_1215'),
]
operations = [
migrations.RemoveField(
model_name='player',
name='isAdmin',
),
]
|
import report
#import axteriz
|
#!/usr/bin/env python
"""
This script downloads article information of one URL. The results are stored in JSON-files in a sub-folder.
You need to adapt the variables url and basepath in order to use the script.
"""
import json
from newsplease import NewsPlease
url = 'https://www.rt.com/news/203203-ukraine-russia-troops-border/'
basepath = '/Users/felix/Downloads/'
article = NewsPlease.from_url(url)
with open(basepath + article['filename'] + '.json', 'w') as outfile:
json.dump(article, outfile, indent=4, sort_keys=True)
|
"""
Contains the class to color code and value code the cards.
"""
class Card():
"""
Creates a deck of cards (52 cards).
"""
def __init__(self, suit, value):
"""
Initializes the suit and value for each card.
"""
self.suit = suit
self.value = value
def __repr__(self):
"""
Returns a string with the value and suit.
"""
return str(self.value) + " of " + self.suit
def value_score(self):
"""
Assigns an integer value to each card to be used in the __lt__ and
__gt__ methods.
"""
points = {
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
"7": 7,
"8": 8,
"9": 9,
"10": 10,
"J": 11,
"Q": 12,
"K": 13,
"A": 14
}
return points[self.value]
def __lt__(self, other):
"""
Compares if player's current card value is less than the other player's.
"""
return self.value_score() < other.value_score()
def __gt__(self, other):
"""
Compares if player's current card value is greater than the other
player's.
"""
return self.value_score() > other.value_score()
|
class Solution:
def permuteUnique(self, nums: List[int]) -> List[List[int]]:
# import itertools
# return list(set(itertools.permutations(nums)))
if not nums:
return []
result = set()
visited = [0] * len(nums)
def do_permute(nums, track):
if len(nums) == len(track):
result.add(tuple(track))
return
for i, num in enumerate(nums):
if visited[i] != 0:
continue
visited[i] = 1
do_permute(nums, track+[num])
visited[i] = 0
do_permute(nums, [])
return list(result)
class Solution:
def permuteUnique(self, nums: List[int]) -> List[List[int]]:
# import itertools
# return list(set(itertools.permutations(nums)))
if not nums:
return []
result = set()
def do_permute(nums, track):
if len(nums) == 0:
result.add(tuple(track))
return
for i, num in enumerate(nums):
do_permute(nums[:i] + nums[i+1:], track+[num])
do_permute(nums, [])
return list(result)
|
import json
from tools import playlists as p
from tools import tracks_data as t
if __name__ == '__main__':
playlists = p.Playlists()
playlists.formatter()
playlists.save_to_local()
# open playlist's info
with open('data/playlists_info.json') as f:
playlists_info = json.load(f)
#info = playlists_info[0]
# Extract track's info for all playlists
for info in playlists_info:
tracks = t.TracksData(info)
tracks.formatter()
tracks.save_to_local()
|
import scipy.io as sio
import os
import numpy as np
import matplotlib.pyplot as plt
path_loss = os.path.join('result','0','loss_list.mat')
data_loss = sio.loadmat(path_loss)
total_loss = data_loss['loss_t'][0]
mse_loss = data_loss['mse'][0]
ce_loss = data_loss['ce'][0]
for id in range(1):
print('--------------',id)
plt.plot(total_loss)
plt.savefig(os.path.join('result',str(id),'total_loss.png'))
plt.close()
plt.plot(mse_loss)
plt.savefig(os.path.join('result',str(id),'mse_loss.png'))
plt.close()
plt.plot(ce_loss)
plt.savefig(os.path.join('result',str(id),'ce_loss.png'))
plt.close()
path_loss = os.path.join('result',str(id),'result_list.mat')
data_loss = sio.loadmat(path_loss)
oa = data_loss['oa'][0]
aa = data_loss['aa'][0]
kappa = data_loss['kappa'][0]
plt.plot(oa)
plt.savefig(os.path.join('result',str(id),'oa.png'))
plt.close()
plt.plot(aa)
plt.savefig(os.path.join('result',str(id),'aa.png'))
plt.close()
plt.plot(kappa)
plt.savefig(os.path.join('result',str(id),'kappa.png'))
plt.close()
print('max oa',np.max(oa))
print('max aa',np.max(aa))
print('max kappa',np.max(kappa))
path_loss = os.path.join('result',str(id),'psnr_list.mat')
data_loss = sio.loadmat(path_loss)
psnr = data_loss['psnr'][0]
plt.plot(psnr)
plt.savefig(os.path.join('result',str(id),'psnr.png'))
plt.close()
print('max psnr',np.max(psnr))
|
import os
import unittest
import mox
import WMCore_t.Storage_t.Plugins_t.PluginTestBase_t
from WMCore.Storage.Plugins.FNALImpl import FNALImpl as ourPlugin
from WMCore.Storage.Plugins.CPImpl import CPImpl as ourFallbackPlugin
import subprocess
from WMCore.WMBase import getWMBASE
from WMCore.Storage.StageOutError import StageOutError, StageOutFailure
from nose.plugins.attrib import attr
class RunCommandThing:
def __init__(self, target):
self.target = target
def runCommand(self,things):
return ("dummy1", "dummy2")
class DCCPFNALImplTest(unittest.TestCase):
def setUp(self):
self.commandPrepend = os.path.join(getWMBASE(),'src','python','WMCore','Storage','Plugins','DCCPFNAL','wrapenv.sh')
self.runMocker = mox.MockObject(RunCommandThing)
self.copyMocker = mox.MockObject(ourFallbackPlugin)
def runCommandStub(command):
(num1, num2) = self.runMocker.runCommand(command)
return (num1, num2)
def getImplStub(command, useNewVersion = None):
return self.copyMocker
pass
def tearDown(self):
pass
@attr("integration")
def testFail(self):
#first try to make a non existant file (regular)
self.runMocker.runCommand(
[self.commandPrepend,'dccp', '-o', '86400', '-d', '0', '-X', '-role=cmsprod', '/store/NONEXISTANTSOURCE', '/store/NONEXISTANTTARGET' ]\
).AndReturn(("1", "This was a test of the fail system"))
#then try to make a non existant file on lustre
# -- fake making a directory
self.runMocker.runCommand(
[self.commandPrepend, 'mkdir', '-m', '755', '-p', '/store/unmerged']\
).AndReturn(("0", "we made a directory, yay"))
# -- fake the actual copy
self.copyMocker.doTransfer( \
'/store/unmerged/lustre/NONEXISTANTSOURCE', '/store/unmerged/lustre/NONEXISTANTTARGET', True, None, None, None, None\
).AndRaise(StageOutFailure("testFailure"))
# do one with a real pfn
self.runMocker.runCommand(\
[self.commandPrepend, 'mkdir', '-m', '755', '-p',\
'/pnfs/cms/WAX/11/store/temp/WMAgent/unmerged/RECO/WMAgentCommissioning10-v7newstageout']).AndReturn(("0",""))
self.runMocker.runCommand([self.commandPrepend, 'dccp', '-o', '86400', '-d', '0', '-X', '-role=cmsprod', 'file:///etc/hosts', 'dcap://cmsdca.fnal.gov:24037/pnfs/fnal.gov/usr/cms/WAX/11/store/temp/WMAgent/unmerged/RECO/WMAgentCommissioning10-v7newstageout/0000/0661D749-DD95-DF11-8A0F-00261894387C.root ']).AndReturn(("0",""))
# now try to delete it (pnfs)
self.runMocker.runCommand(
['rm', '-fv', '/pnfs/cms/WAX/11/store/tmp/testfile' ]\
).AndReturn(("1", "This was a test of the fail system"))
# try to delete it (lustre)
self.runMocker.runCommand(
['/bin/rm', '/lustre/unmerged/NOTAFILE']\
).AndReturn(("1", "This was a test of the fail system"))
mox.Replay(self.runMocker)
mox.Replay(self.copyMocker)
#ourPlugin.runCommand = runMocker.runCommand()
testObject = ourPlugin()
self.assertRaises(StageOutFailure,
testObject.doTransfer,'/store/NONEXISTANTSOURCE',
'/store/NONEXISTANTTARGET',
True,
None,
None,
None,
None)
self.assertRaises(StageOutFailure,
testObject.doTransfer,'/store/unmerged/lustre/NONEXISTANTSOURCE',
'/store/unmerged/lustre/NONEXISTANTTARGET',
True,
None,
None,
None,
None)
self.assertRaises(StageOutFailure,
testObject.doTransfer,'file:///etc/hosts',
'dcap://cmsdca.fnal.gov:24037/pnfs/fnal.gov/usr/cms/WAX/11/store/temp/WMAgent/unmerged/RECO/WMAgentCommissioning10-v7newstageout/0000/0661D749-DD95-DF11-8A0F-00261894387C.root ',
True,
None,
None,
None,
None)
testObject.doDelete('/store/tmp/testfile', None, None, None, None )
testObject.doDelete('/store/unmerged/lustre/NOTAFILE',None, None, None, None )
mox.Verify(self.runMocker)
mox.Verify(self.copyMocker)
@attr("integration")
def testWin(self):
#first try to make a file (regular). this one works
self.runMocker.runCommand(
[self.commandPrepend,'dccp', '-o', '86400', '-d', '0', '-X', '-role=cmsprod', '/store/NONEXISTANTSOURCE', '/store/NONEXISTANTTARGET' ]\
).AndReturn((0, "This transfer works"))
self.runMocker.runCommand(
[self.commandPrepend,'/opt/d-cache/dcap/bin/check_dCachefilecksum.sh', '/store/NONEXISTANTTARGET', '/store/NONEXISTANTSOURCE']\
).AndReturn((0, "Oh, the checksum was checked"))
# now make a file and have the checksum fail
self.runMocker.runCommand(
[self.commandPrepend,'dccp', '-o', '86400', '-d', '0', '-X', '-role=cmsprod', '/store/NONEXISTANTSOURCE', '/store/NONEXISTANTTARGET' ]\
).AndReturn((0, "This transfer works"))
self.runMocker.runCommand(
[self.commandPrepend,'/opt/d-cache/dcap/bin/check_dCachefilecksum.sh', '/store/NONEXISTANTTARGET', '/store/NONEXISTANTSOURCE']\
).AndReturn((1, "Oh, the checksum was checked. Things look bad"))
self.runMocker.runCommand(
[self.commandPrepend, 'mkdir', '-m', '755', '-p', '/store/unmerged']\
).AndReturn((0, ""))
#then try to make a non existant file on lustre
# -- fake making a directory
self.runMocker.runCommand(
[self.commandPrepend, 'mkdir', '-m', '755', '-p', '/store/unmerged']\
).AndReturn((0, "we made a directory, yay"))
# -- fake the actual copy
self.copyMocker.doTransfer( \
'/store/unmerged/lustre/NONEXISTANTSOURCE', '/store/unmerged/lustre/NONEXISTANTTARGET', True, None, None, None, None\
).AndReturn("balls")
mox.Replay(self.runMocker)
mox.Replay(self.copyMocker)
#ourPlugin.runCommand = runMocker.runCommand()
testObject = ourPlugin()
# copy normally and have it work
newPfn = testObject.doTransfer('/store/NONEXISTANTSOURCE',
'/store/NONEXISTANTTARGET',
True,
None,
None,
None,
None)
self.assertEqual(newPfn, '/store/NONEXISTANTTARGET')
# second time fails the checksum
self.assertRaises(StageOutFailure,
testObject.doTransfer,'/store/NONEXISTANTSOURCE',
'/store/NONEXISTANTTARGET',
True,
None,
None,
None,
None)
# copy to lustre normally and have it work
newPfn = testObject.doTransfer('/store/unmerged/lustre/NONEXISTANTSOURCE',
'/store/unmerged/lustre/NONEXISTANTTARGET',
True,
None,
None,
None,
None)
self.assertEqual(newPfn, "balls")
mox.Verify(self.runMocker)
mox.Verify(self.copyMocker)
if __name__ == "__main__":
unittest.main()
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import render,redirect
from django.contrib.auth.forms import UserCreationForm
from django.views.generic import View
from django.shortcuts import get_object_or_404
from .forms import PostForm
from .models import Posts
from django.contrib.auth import authenticate,login,logout
from django.http import HttpResponse,JsonResponse
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
# Create your views here.
class Register(View):
template = "register.html"
def get(self,request):
if request.user.is_authenticated():
return redirect("index")
userform = UserCreationForm()
context = {
"userform":userform
}
return render(request, self.template, context)
def post(self,request):
userform = UserCreationForm(data=request.POST)
if userform.is_valid():
user = userform.save()
return redirect("home:login")
else:
context = {
"userform":userform
}
return render(request,self.template,context)
class Login(View):
template = "login.html"
def get(self,request):
return render(request,self.template,{})
def post(self,request):
if request.user.is_authenticated():
#messages.warning(request,"You're already logged in.")
return redirect("index")
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username,password=password)
if user:
if user.is_active:
login(request,user)
return redirect("index")
else:
return HttpResponse("your account is disabled")
else:
return HttpResponse("Invalid details")
class Index(View):
template = "index.html"
def get(self,request):
posts_list = Posts.objects.all().order_by('-created_at')
paginator = Paginator(posts_list, 10)
page = request.GET.get('page')
try:
posts_list = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
posts_list = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
posts_list = paginator.page(paginator.num_pages)
context = {
"posts":posts_list
}
return render(request, self.template, context)
class Create(LoginRequiredMixin,View):
template = "create.html"
def get(self,request):
postform = PostForm(initial={'user':request.user})
context = {
"form":postform
}
return render(request, self.template, context)
def post(self,request):
postform = PostForm(request.POST)
# post = Posts.objects.get()
# post = [p.to_jason() for p in post]
if postform.is_valid():
new_post = postform.save()
return redirect("index")
# return JsonResponse(new_post.to_json())
# return JsonResponse({'post':post})
class Edit(View):
template = "edit.html"
def get(self,request,pk):
news = get_object_or_404(Posts,slug=slug)
form = PostForm(instance=news)
context = {
"form":form,
"post":news
}
return render(request, self.template, context)
def post(self,request,slug):
news = get_object_or_404(Posts,slug=slug)
form = PostForm(request.POST,instance=news)
if form.is_valid():
form.save()
return redirect("index")
else:
context = {
"form":form,
"post":news
}
return render(request, self.template, context)
class Delete(View):
def post(self,request,slug):
news = get_object_or_404(Posts,slug=slug)
news.delete()
return redirect("index")
@login_required
def user_logout(request):
# Since we know the user is logged in, we can now just log them out.
logout(request)
# Take the user back to the homepage.
return redirect('index')
|
from PyQt5.QtWidgets import QMainWindow, QWidget
from PyQt5.QtWidgets import QApplication, QDialog, QTreeWidget, QTreeWidgetItem
from PyQt5.QtGui import QIcon, QFont
import PyQt5
from PyQt5.QtWidgets import QMessageBox
from PyQt5.QtCore import QDir
from PyQt5.QtCore import Qt, pyqtSlot, pyqtSignal
from libopenimu.qt.Charts import IMUChartView
from libopenimu.models.ProcessedData import ProcessedData
from libopenimu.models.Base import Base
import gc
# UI
from resources.ui.python.MainWindow_ui import Ui_MainWindow
from libopenimu.qt.ImportWindow import ImportWindow
from libopenimu.qt.GroupWindow import GroupWindow
from libopenimu.qt.ParticipantWindow import ParticipantWindow
from libopenimu.qt.RecordsetWindow import RecordsetWindow
from libopenimu.qt.ResultWindow import ResultWindow
from libopenimu.qt.StartWindow import StartWindow
from libopenimu.qt.ImportBrowser import ImportBrowser
from libopenimu.qt.ImportManager import ImportManager
from libopenimu.qt.ExportWindow import ExportWindow
from libopenimu.qt.StreamWindow import StreamWindow
from libopenimu.qt.BackgroundProcess import BackgroundProcess, SimpleTask, ProgressDialog
from libopenimu.qt.ProcessSelectWindow import ProcessSelectWindow
# Models
from libopenimu.models.Participant import Participant
from libopenimu.models.DataSet import DataSet
from libopenimu.models.LogTypes import LogTypes
from libopenimu.streamers.streamer_types import StreamerTypes
# Database
from libopenimu.db.DBManager import DBManager
# Python
import sys
from datetime import datetime
class MainWindow(QMainWindow):
currentFileName = ''
dbMan = []
currentDataSet = DataSet()
currentRecordsets = []
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent=parent)
self.UI = Ui_MainWindow()
self.UI.setupUi(self)
self.UI.dockToolBar.setTitleBarWidget(QWidget())
self.UI.dockLog.hide()
self.add_to_log("OpenIMU - Prêt à travailler.", LogTypes.LOGTYPE_INFO)
# Setup signals and slots
self.setup_signals()
self.show_start_window()
def __del__(self):
# Restore sys.stdout
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
def show_start_window(self):
self.clear_main_widgets()
self.showMinimized()
start_window = StartWindow(self)
if start_window.exec() == QDialog.Rejected:
# User closed the dialog - exits!
sys.exit(0)
# Init database manager
self.currentFileName = start_window.fileName
self.dbMan = DBManager(self.currentFileName)
# Maximize window
self.showMaximized()
# Load data
self.add_to_log("Chargement des données...", LogTypes.LOGTYPE_INFO)
self.currentDataSet = self.dbMan.get_dataset()
self.load_data_from_dataset()
self.UI.treeDataSet.setCurrentItem(None)
self.UI.treeDataSet.owner = self
# self.loadDemoData()
self.add_to_log("Données chargées!", LogTypes.LOGTYPE_DONE)
# If we need to import data, show the import dialog
if start_window.importing:
self.importRequested()
gc.collect()
def setup_signals(self):
self.UI.treeDataSet.itemClicked.connect(self.tree_item_clicked)
self.UI.btnDataSetInfos.clicked.connect(self.infos_requested)
self.UI.btnAddGroup.clicked.connect(self.new_group_requested)
self.UI.btnAddParticipant.clicked.connect(self.new_participant_requested)
self.UI.treeDataSet.participantDragged.connect(self.participant_was_dragged)
self.UI.btnDelete.clicked.connect(self.delete_requested)
self.UI.btnImport.clicked.connect(self.import_requested)
self.UI.btnExportCSV.clicked.connect(self.export_csv_requested)
self.UI.dockDataset.visibilityChanged.connect(self.UI.btnShowDataset.setChecked)
self.UI.dockLog.visibilityChanged.connect(self.toggle_log)
self.UI.btnShowDataset.clicked.connect(self.toggle_dataset)
self.UI.btnShowLog.clicked.connect(self.toggle_log)
self.UI.btnTransfer.clicked.connect(self.transfer_requested)
self.UI.btnClose.clicked.connect(self.db_close_requested)
self.UI.btnCompact.clicked.connect(self.db_compact_requested)
self.UI.btnProcess.clicked.connect(self.process_data_requested)
def console_log_normal(self, text):
self.add_to_log(text, LogTypes.LOGTYPE_DEBUG)
def console_log_error(self, text):
self.add_to_log(text, LogTypes.LOGTYPE_ERROR)
@pyqtSlot()
def load_data_from_dataset(self):
self.UI.treeDataSet.clear()
self.clear_main_widgets()
# Groups
groups = self.dbMan.get_all_groups()
for group in groups:
self.UI.treeDataSet.update_group(group)
# Participants
participants = self.dbMan.get_all_participants()
for participant in participants:
self.UI.treeDataSet.update_participant(participant)
# Recordsets
recordsets = self.dbMan.get_all_recordsets()
for recordset in recordsets:
self.UI.treeDataSet.update_recordset(recordset)
# Results
results = self.dbMan.get_all_processed_data()
for result in results:
self.UI.treeDataSet.update_result(result)
def update_group(self, group):
item = self.UI.treeDataSet.update_group(group)
self.UI.treeDataSet.setCurrentItem(item)
def update_participant(self, participant):
item = self.UI.treeDataSet.update_participant(participant)
self.UI.treeDataSet.setCurrentItem(item)
def clear_main_widgets(self):
for i in reversed(range(self.UI.frmMain.layout().count())):
self.UI.frmMain.layout().itemAt(i).widget().setParent(None)
def show_group(self, group=None):
self.clear_main_widgets()
group_widget = GroupWindow(dbManager=self.dbMan, group=group)
self.UI.frmMain.layout().addWidget(group_widget)
group_widget.dataSaved.connect(self.data_was_saved)
group_widget.dataCancelled.connect(self.data_was_cancelled)
def show_participant(self, participant=None, base_group=None):
self.clear_main_widgets()
part_widget = ParticipantWindow(dbManager=self.dbMan, participant=participant, default_group=base_group)
self.UI.frmMain.layout().addWidget(part_widget)
part_widget.dataSaved.connect(self.data_was_saved)
part_widget.dataCancelled.connect(self.data_was_cancelled)
@pyqtSlot('QString', int)
def add_to_log(self, text, log_type):
if text == ' ' or text == '\n':
return
log_format = ""
if log_type == LogTypes.LOGTYPE_INFO:
log_format = "<span style='color:black'>"
if log_type == LogTypes.LOGTYPE_WARNING:
log_format = "<span style='color:orange;font-style:italic'>"
if log_type == LogTypes.LOGTYPE_ERROR:
log_format = "<span style='color:red;font-weight:bold'>"
if log_type == LogTypes.LOGTYPE_DEBUG:
log_format = "<span style='color:grey;font-style:italic'>"
if log_type == LogTypes.LOGTYPE_DONE:
log_format = "<span style='color:green;font-weight:bold'>"
self.UI.txtLog.append("<span style='color:grey'>" + datetime.now().strftime(
"%H:%M:%S.%f") + " </span>" + log_format + text + "</span>")
self.UI.txtLog.ensureCursorVisible()
QApplication.processEvents()
def get_current_widget_data_type(self):
# TODO: checks!
return self.UI.frmMain.layout().itemAt(0).widget().data_type
######################
@pyqtSlot(bool)
def toggle_dataset(self, visibility):
self.UI.dockDataset.setVisible(visibility)
@pyqtSlot(bool)
def toggle_log(self, visibility):
self.UI.dockLog.setVisible(visibility)
self.UI.btnShowLog.setChecked(visibility)
if visibility:
sys.stdout = EmittingStream(textWritten=self.console_log_normal)
sys.stderr = EmittingStream(textWritten=self.console_log_error)
else:
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
@pyqtSlot()
def import_requested(self):
importer = ImportBrowser(data_manager=self.dbMan)
importer.participant_added.connect(self.load_data_from_dataset)
importer.log_request.connect(self.add_to_log)
importer.setStyleSheet(self.styleSheet())
if importer.exec() == QDialog.Accepted:
self.load_data_from_dataset()
gc.collect()
@pyqtSlot()
def export_csv_requested(self):
exporter = ExportWindow(self.dbMan, self)
exporter.setStyleSheet(self.styleSheet())
if exporter.exec() == QDialog.Accepted:
print("Accepted")
@pyqtSlot()
def infos_requested(self):
infos_window = ImportWindow(dataset=self.currentDataSet, filename=self.currentFileName)
infos_window.setStyleSheet(self.styleSheet())
infos_window.noImportUI = True
infos_window.infosOnly = True
if infos_window.exec() != QDialog.Rejected:
# TODO: Save data
self.currentDataSet.name = infos_window.dataSet.name
@pyqtSlot()
def process_data_requested(self):
if self.currentRecordsets:
# Display Process Window
proc_window = ProcessSelectWindow(data_manager=self.dbMan, recordsets=self.currentRecordsets, parent=self)
if proc_window.exec() == QDialog.Accepted:
self.UI.treeDataSet.update_item("result", proc_window.processed_data)
self.UI.treeDataSet.select_item("result", proc_window.processed_data.id_processed_data)
@pyqtSlot()
def db_close_requested(self):
msg = QMessageBox(self)
msg.setIcon(QMessageBox.Question)
msg.setStyleSheet("QPushButton{min-width: 100px; min-height: 40px;}")
msg.setText("Cet ensemble de données sera fermé. Désirez-vous poursuivre?")
msg.setWindowTitle("Fermeture?")
msg.setStandardButtons(QMessageBox.Yes | QMessageBox.No)
rval = msg.exec()
if rval == QMessageBox.Yes:
self.dbMan.close()
self.add_to_log("Fichier " + self.currentFileName + " fermé.", LogTypes.LOGTYPE_INFO)
self.hide()
self.show_start_window()
@pyqtSlot()
def db_compact_requested(self):
msg = QMessageBox(self)
msg.setIcon(QMessageBox.Question)
msg.setStyleSheet("QPushButton{min-width: 100px; min-height: 40px;}")
msg.setText("Le fichier de données sera nettoyé. Ceci peut prendre un certain temps. \nDésirez-vous poursuivre?")
msg.setWindowTitle("Compactage des données")
msg.setStandardButtons(QMessageBox.Yes | QMessageBox.No)
rval = msg.exec()
if rval == QMessageBox.Yes:
task = SimpleTask("Compactage des données", self.dbMan.compact)
process = BackgroundProcess([task])
dialog = ProgressDialog(process, 'Nettoyage', self)
process.start()
dialog.exec()
@pyqtSlot()
def new_group_requested(self):
self.show_group()
@pyqtSlot()
def new_participant_requested(self):
# Check if we can get a root item (group) for the current selected item or not
item = self.UI.treeDataSet.currentItem()
if item is not None:
while item.parent() is not None:
item = item.parent()
default_group = None
if self.UI.treeDataSet.get_item_type(item) == "group":
default_group = self.UI.treeDataSet.groups[self.UI.treeDataSet.get_item_id(item)]
self.show_participant(base_group=default_group)
@pyqtSlot(Participant)
def participant_was_dragged(self,participant):
self.dbMan.update_participant(participant)
self.update_participant(participant)
@pyqtSlot(QTreeWidgetItem, int)
def tree_item_clicked(self, item, column):
# print(item.text(column))
item_id = self.UI.treeDataSet.get_item_id(item)
item_type = self.UI.treeDataSet.get_item_type(item)
# Clear all widgets
self.clear_main_widgets()
self.UI.btnProcess.setEnabled(False)
self.currentRecordsets = []
if item_type == "group":
self.show_group(self.UI.treeDataSet.groups[item_id])
# groupWidget = GroupWindow(dbManager=self.dbMan, group = self.UI.treeDataSet.groups[item_id])
# self.UI.frmMain.layout().addWidget(groupWidget)
if item_type == "participant":
self.show_participant(self.UI.treeDataSet.participants[item_id])
if item_type == "recordsets" or item_type == "recordset" or item_type == "subrecord":
if item_type == "recordsets":
part = self.UI.treeDataSet.participants[self.UI.treeDataSet.get_item_id(item.parent())]
self.currentRecordsets = self.dbMan.get_all_recordsets(part)
else:
self.currentRecordsets = [self.UI.treeDataSet.recordsets[item_id]]
records_widget = RecordsetWindow(manager=self.dbMan, recordset=self.currentRecordsets, parent=self)
# records_widget.setStyleSheet(self.styleSheet() + records_widget.styleSheet())
self.UI.frmMain.layout().addWidget(records_widget)
records_widget.dataDisplayRequest.connect(self.UI.treeDataSet.select_item)
records_widget.dataUpdateRequest.connect(self.UI.treeDataSet.update_item)
self.UI.btnProcess.setEnabled(True)
if item_type == "result":
result_widget = ResultWindow(manager=self.dbMan, results=self.UI.treeDataSet.results[item_id], parent=self)
self.UI.frmMain.layout().addWidget(result_widget)
self.UI.frmMain.update()
@pyqtSlot()
def data_was_saved(self):
item_type = self.get_current_widget_data_type()
if item_type == "group":
group = self.UI.frmMain.layout().itemAt(0).widget().group
self.update_group(group)
self.add_to_log("Groupe " + group.name + " mis à jour.", LogTypes.LOGTYPE_DONE)
if item_type == "participant":
part = self.UI.frmMain.layout().itemAt(0).widget().participant
self.update_participant(part)
self.add_to_log("Participant " + part.name + " mis à jour.", LogTypes.LOGTYPE_DONE)
@pyqtSlot()
def data_was_cancelled(self):
item_type = self.get_current_widget_data_type()
if item_type == "group":
if self.UI.frmMain.layout().itemAt(0).widget().group is None:
self.clear_main_widgets()
if item_type == "participant":
if self.UI.frmMain.layout().itemAt(0).widget().participant is None:
self.clear_main_widgets()
@pyqtSlot()
def delete_requested(self):
item_id = self.UI.treeDataSet.get_item_id(self.UI.treeDataSet.currentItem())
item_type = self.UI.treeDataSet.get_item_type(self.UI.treeDataSet.currentItem())
if item_type == "recordsets" or item_type == "results":
return
msg = QMessageBox(self)
msg.setIcon(QMessageBox.Question)
msg.setStyleSheet("QPushButton{min-width: 100px; min-height: 40px;}")
msg.setText("Désirez-vous vraiment supprimer \"" + self.UI.treeDataSet.currentItem().text(0) +
"\" et tous les éléments associés?")
msg.setWindowTitle("Confirmation de suppression")
msg.setStandardButtons(QMessageBox.Yes | QMessageBox.No)
rval = msg.exec()
if rval == QMessageBox.Yes:
item_name = self.UI.treeDataSet.currentItem().text(0)
tasks = []
if item_type == "group":
group = self.UI.treeDataSet.groups[item_id]
self.UI.treeDataSet.remove_group(group)
task = SimpleTask("Suppression de '" + group.name + "'", self.dbMan.delete_group, group)
tasks.append(task)
if item_type == "participant":
part = self.UI.treeDataSet.participants[item_id]
self.UI.treeDataSet.remove_participant(part)
task = SimpleTask("Suppression de '" + part.name + "'", self.dbMan.delete_participant, part)
tasks.append(task)
if item_type == "recordset":
# Find and remove all related results
for result in self.UI.treeDataSet.results.values():
if result is not None:
for ref in result.processed_data_ref:
if ref.recordset.id_recordset == item_id:
self.UI.treeDataSet.remove_result(result)
task = SimpleTask("Suppression de '" + result.name + "'",
self.dbMan.delete_processed_data, result)
tasks.append(task)
# self.dbMan.delete_processed_data(result)
break
recordset = self.UI.treeDataSet.recordsets[item_id]
task = SimpleTask("Suppression de '" + recordset.name + "'", self.dbMan.delete_recordset, recordset)
tasks.append(task)
# self.dbMan.delete_recordset(recordset)
self.UI.treeDataSet.remove_recordset(recordset)
if item_type == "result":
result = self.UI.treeDataSet.results[item_id]
task = SimpleTask("Suppression de '" + result.name + "'", self.dbMan.delete_processed_data, result)
tasks.append(task)
self.UI.treeDataSet.remove_result(result)
# self.dbMan.delete_processed_data(result)
if tasks:
process = BackgroundProcess(tasks)
# Create progress dialog
dialog = ProgressDialog(process, 'Suppression', self)
# Start tasks
process.start()
dialog.exec()
self.add_to_log(item_name + " a été supprimé.", LogTypes.LOGTYPE_DONE)
self.clear_main_widgets()
# def closeEvent(self, event):
# return
def create_chart_view(self, test_data=False):
chart_view = IMUChartView(self)
if test_data is True:
chart_view.add_test_data()
return chart_view
@pyqtSlot()
def transfer_requested(self):
import_man = ImportManager(dbmanager=self.dbMan, dirs=True, stream=True, parent=self)
# TODO: More intelligent refresh!
import_man.participant_added.connect(self.load_data_from_dataset)
if import_man.exec() == QDialog.Accepted:
stream_diag = StreamWindow(stream_type=import_man.filetype_id, path=import_man.filename, parent=self)
stream_diag.exec()
# Do the actual import
msg = QMessageBox(self)
msg.setIcon(QMessageBox.Question)
msg.setStyleSheet("QPushButton{min-width: 100px; min-height: 40px;}")
msg.setText("Procéder à l'importation des données?")
msg.setWindowTitle("Importer?")
msg.setStandardButtons(QMessageBox.Yes | QMessageBox.No)
rval = msg.exec()
if rval == QMessageBox.Yes:
# Start import process
import_browser = ImportBrowser(data_manager=self.dbMan, parent=self)
import_browser.log_request.connect(self.add_to_log)
# Build import list
files = import_man.get_file_list()
importer_id = StreamerTypes.value_importer_types[import_man.filetype_id]
for file_name, file_part in files.items():
import_browser.add_file_to_list(file_name, import_man.filetype, importer_id, file_part)
import_browser.ok_clicked()
self.load_data_from_dataset()
########################################################################################################################
class Treedatawidget(QTreeWidget):
groups = {}
participants = {}
recordsets = {}
results = {}
items_groups = {}
items_participants = {}
items_recordsets = {}
items_results = {}
participantDragged = pyqtSignal(Participant)
owner = None
def __init__(self, parent=None):
super(Treedatawidget, self).__init__(parent=parent)
def remove_group(self,group):
item = self.items_groups.get(group.id_group, None)
# Remove all participants items in that group
for i in range(0, item.childCount()):
child = item.child(i)
child_id = self.get_item_id(child)
self.remove_participant(self.participants[child_id])
for i in range(0, self.topLevelItemCount()):
if self.topLevelItem(i) == item:
self.takeTopLevelItem(i)
self.groups[group.id_group] = None
self.items_groups[group.id_group] = None
break
def remove_participant(self,participant):
item = self.items_participants.get(participant.id_participant, None)
# Remove all recordsets and results items from participant
for i in range(0, item.childCount()):
child_type = self.get_item_type(item.child(i))
for j in range(0, item.child(i).childCount()):
child = item.child(i).child(j)
child_id = self.get_item_id(child)
if child_type == "recordsets":
try:
self.remove_recordset(self.recordsets[child_id])
except KeyError:
continue
if child_type == "results":
try:
self.remove_result(self.results[child_id])
except KeyError:
continue
if participant.id_group is None: # Participant without a group
for i in range(0, self.topLevelItemCount()):
if self.topLevelItem(i) == item:
self.takeTopLevelItem(i)
break
else:
for i in range(0, item.parent().childCount()):
if item.parent().child(i) == item:
item.parent().takeChild(i)
break
self.participants[participant.id_participant] = None
self.items_participants[participant.id_participant] = None
def remove_recordset(self, recordset):
item = self.items_recordsets.get(recordset.id_recordset, None)
for i in range(0, item.parent().childCount()):
if item.parent().child(i) == item:
item.parent().takeChild(i)
break
self.recordsets[recordset.id_recordset] = None
self.items_recordsets[recordset.id_recordset] = None
def remove_result(self, result):
item = self.items_results.get(result.id_processed_data, None)
for i in range(0, item.parent().childCount()):
if item.parent().child(i) == item:
item.parent().takeChild(i)
break
self.results[result.id_processed_data] = None
self.items_results[result.id_processed_data] = None
def update_group(self, group):
item = self.items_groups.get(group.id_group, None)
if item is None:
item = QTreeWidgetItem()
item.setText(0, group.name)
item.setIcon(0, QIcon(':/OpenIMU/icons/group.png'))
item.setData(0, Qt.UserRole, group.id_group)
item.setData(1, Qt.UserRole, 'group')
item.setFont(0, QFont('Helvetica', 12, QFont.Bold))
self.addTopLevelItem(item)
self.groups[group.id_group] = group
self.items_groups[group.id_group] = item
else:
item.setText(0, group.name)
return item
def update_participant(self, part):
item = self.items_participants.get(part.id_participant, None)
group_item = self.items_groups.get(part.id_group, None)
if item is None:
item = QTreeWidgetItem()
item.setText(0, part.name)
item.setIcon(0, QIcon(':/OpenIMU/icons/participant.png'))
item.setData(0, Qt.UserRole, part.id_participant)
item.setData(1, Qt.UserRole, 'participant')
item.setFont(0, QFont('Helvetica', 12, QFont.Bold))
if group_item is None: # Participant without a group
self.addTopLevelItem(item)
else:
group_item.addChild(item)
parent = item
# Recordings
item = QTreeWidgetItem()
item.setText(0, 'Enregistrements')
item.setIcon(0, QIcon(':/OpenIMU/icons/records.png'))
item.setData(1, Qt.UserRole, 'recordsets')
item.setFont(0, QFont('Helvetica', 11, QFont.Bold))
parent.addChild(item)
# Results
item = QTreeWidgetItem()
item.setText(0, 'Résultats')
item.setIcon(0, QIcon(':/OpenIMU/icons/results.png'))
item.setData(1, Qt.UserRole, 'results')
item.setFont(0, QFont('Helvetica', 11, QFont.Bold))
parent.addChild(item)
item = parent
else:
item.setText(0, part.name)
# Check if we must move it or not, if the group changed
if item.parent() != group_item:
# Old group - find and remove current item
if item.parent() is None: # No parent...
for i in range(0, self.topLevelItemCount()):
if self.topLevelItem(i) == item:
item = self.takeTopLevelItem(i)
break
else:
# Had a group...
for i in range(0, item.parent().childCount()):
if item.parent().child(i) == item:
item = item.parent().takeChild(i)
break
# New group
if group_item is None: # Participant without a group
self.addTopLevelItem(item)
else:
group_item.addChild(item)
self.participants[part.id_participant] = part
self.items_participants[part.id_participant] = item
return item
def update_recordset(self, recordset):
item = self.items_recordsets.get(recordset.id_recordset, None)
if item is None:
item = QTreeWidgetItem()
item.setText(0, recordset.name)
item.setIcon(0, QIcon(':/OpenIMU/icons/recordset.png'))
item.setData(0, Qt.UserRole, recordset.id_recordset)
item.setData(1, Qt.UserRole, 'recordset')
item.setFont(0, QFont('Helvetica', 11, QFont.Bold))
part_item = self.items_participants.get(recordset.id_participant,None)
if part_item is not None:
for i in range(0, part_item.childCount()):
if self.get_item_type(part_item.child(i)) == "recordsets":
part_item.child(i).addChild(item)
else:
item.setText(0, recordset.name)
self.recordsets[recordset.id_recordset] = recordset
self.items_recordsets[recordset.id_recordset] = item
return item
def update_result(self, result: ProcessedData):
item = self.items_results.get(result.id_processed_data, None)
if item is None:
item = QTreeWidgetItem()
item.setText(0, result.name)
item.setIcon(0, QIcon(':/OpenIMU/icons/result.png'))
item.setData(0, Qt.UserRole, result.id_processed_data)
item.setData(1, Qt.UserRole, 'result')
item.setFont(0, QFont('Helvetica', 11, QFont.Bold))
part_item = None
if len(result.processed_data_ref)>0:
part_item = self.items_participants.get(result.processed_data_ref[0].recordset.id_participant,None)
if part_item is not None:
# TODO: subrecords...
for i in range(0, part_item.childCount()):
if self.get_item_type(part_item.child(i)) == "results":
part_item.child(i).addChild(item)
else:
item.setText(0, result.name)
self.results[result.id_processed_data] = result
self.items_results[result.id_processed_data] = item
return item
@classmethod
def get_item_type(cls, item):
if item is not None:
return item.data(1, Qt.UserRole)
else:
return ""
@classmethod
def get_item_id(cls, item):
if item is not None:
return item.data(0, Qt.UserRole)
else:
return ""
@pyqtSlot(str, int)
def select_item(self, item_type, item_id):
# print ("Selecting " + item_type + ", ID " + str(item_id))
item = None
if item_type == "group":
item = self.items_groups.get(item_id, None)
if item_type == "participant":
item = self.items_participants.get(item_id, None)
if item_type == "recordset":
item = self.items_recordsets.get(item_id, None)
if item_type == "result":
item = self.items_results.get(item_id, None)
if item is not None:
self.setCurrentItem(item)
self.owner.tree_item_clicked(item, 0)
@pyqtSlot(str, Base)
def update_item(self, item_type, data):
# print ("Selecting " + item_type + ", ID " + str(item_id))
# item = None
if item_type == "group":
self.update_group(data)
if item_type == "participant":
self.update_participant(data)
if item_type == "recordset":
self.update_recordset(data)
if item_type == "result":
self.update_result(data)
def clear(self):
self.groups = {}
self.participants = {}
self.recordsets = {}
self.results = {}
self.items_groups = {}
self.items_participants = {}
self.items_recordsets = {}
self.items_results = {}
super().clear()
def dropEvent(self, event):
index = self.indexAt(event.pos())
source_item = self.currentItem()
source_type = source_item.data(1, Qt.UserRole)
source_id = source_item.data(0, Qt.UserRole)
target_item = self.itemFromIndex(index)
if target_item is not None:
target_type = target_item.data(1, Qt.UserRole)
target_id = target_item.data(0, Qt.UserRole)
if source_type == "participant":
# Participant can only be dragged over groups or no group at all
if not index.isValid():
# Clear source and set to no group
self.participants[source_id].group = None
self.participants[source_id].id_group = None
# new_item = source_item.clone()
# self.addTopLevelItem(new_item)
self.participantDragged.emit(self.participants[source_id])
event.accept()
return
else:
if target_type == "group":
self.participants[source_id].group = self.groups[target_id]
self.participants[source_id].id_group = self.groups[target_id].id_group
# new_item = source_item.clone()
# target_item.addChild(new_item)
self.participantDragged.emit(self.participants[source_id])
event.accept()
return
event.ignore()
class EmittingStream(PyQt5.QtCore.QObject):
textWritten = PyQt5.QtCore.pyqtSignal(str)
flushRequest = PyQt5.QtCore.pyqtSignal()
def write(self, text):
self.textWritten.emit(str(text))
def flush(self):
pass
# Main
if __name__ == '__main__':
# Must be done before starting the app
QApplication.setAttribute(Qt.AA_EnableHighDpiScaling)
app = QApplication(sys.argv)
# qInstallMessageHandler(qt_message_handler)
# Set current directory to home path
QDir.setCurrent(QDir.homePath())
print(PyQt5.__file__)
# paths = [x for x in dir(QLibraryInfo) if x.endswith('Path')]
# pprint({x: QLibraryInfo.location(getattr(QLibraryInfo, x)) for x in paths})
# WebEngine settings
# QWebEngineSettings.globalSettings().setAttribute(QWebEngineSettings.PluginsEnabled, True)
# QWebEngineSettings.globalSettings().setAttribute(QWebEngineSettings.JavascriptCanOpenWindows, True)
# QWebEngineSettings.globalSettings().setAttribute(QWebEngineSettings.JavascriptEnabled, True)
# QWebEngineSettings.globalSettings().setAttribute(QWebEngineSettings.LocalContentCanAccessRemoteUrls,True)
# QWebEngineSettings.globalSettings().setAttribute(QWebEngineSettings.AllowRunningInsecureContent, True)
window = MainWindow()
# Never executed (exec already in main)...
sys.exit(app.exec_())
|
from .person import Person
class MITPerson(Person):
nextIdNum = 0
def __init__(self, name):
Person.__init__(self, name)
self.idNum = MITPerson.nextIdNum
MITPerson.nextIdNum += 1
def getIdNum(self):
return self.idNum
def __lt__(self, other):
return self.idNum < other.idNum
def speak(self, utterance):
return (self.getLastName() + " says: " + utterance)
class Student(MITPerson):
pass
class UG(Student):
def __init__(self, name, classYear):
MITPerson.__init__(self, name)
self.year = classYear
def getClass(self):
return self.year
def speak(self, utterance):
return MITPerson.speak(self, " Dude, " + utterance)
class Grad(Student):
pass
class TransferStudent(Student):
pass
def isStudent(obj):
return isinstance(obj, Student)
class Professor(MITPerson):
def __init__(self, name, department):
MITPerson.__init__(self, name):
self.department = department
def speak(self, utterance):
new = 'In course ' + self.department + ' we say '
return MITPerson.speak(self, new + utterance)
def lecture(self, topic):
return self.speak("It is obbious that " + topic)
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from json import loads
from glob import glob
from os.path import join
from tornado.web import HTTPError
from .oauth2 import OauthBaseHandler, authenticate_oauth
from qiita_core.qiita_settings import qiita_config
import qiita_db as qdb
def _get_plugin(name, version):
"""Returns the plugin with the given name and version
Parameters
----------
name : str
The name of the plugin
version : str
The version of the plugin
Returns
-------
qiita_db.software.Software
The requested plugin
Raises
------
HTTPError
If the plugin does not exist, with error code 404
If there is a problem instantiating the plugin, with error code 500
"""
try:
plugin = qdb.software.Software.from_name_and_version(name, version)
except qdb.exceptions.QiitaDBUnknownIDError:
raise HTTPError(404)
except Exception as e:
raise HTTPError(500, reason='Error instantiating plugin %s %s: %s'
% (name, version, str(e)))
return plugin
class PluginHandler(OauthBaseHandler):
@authenticate_oauth
def get(self, name, version):
"""Retrieve the plugin information
Parameters
----------
name : str
The plugin name
version : str
The plugin version
Returns
-------
dict
The plugin information:
'name': the plugin name
'version': the plugin version
'description': the plugin description
'commands': list of the plugin commands
'publications': list of publications
'default_workflows': list of the plugin default workflows
'type': the plugin type
'active': whether the plugin is active or not
"""
with qdb.sql_connection.TRN:
plugin = _get_plugin(name, version)
response = {
'name': plugin.name,
'version': plugin.version,
'description': plugin.description,
'commands': [c.name for c in plugin.commands],
'publications': [{'DOI': doi, 'PubMed': pubmed}
for doi, pubmed in plugin.publications],
'default_workflows': [w.name
for w in plugin.default_workflows],
'type': plugin.type,
'active': plugin.active}
self.write(response)
class CommandListHandler(OauthBaseHandler):
@authenticate_oauth
def post(self, name, version):
"""Create new command for a plugin
Parameters
----------
name : str
The name of the plugin
version : str
The version of the plugin
"""
with qdb.sql_connection.TRN:
plugin = _get_plugin(name, version)
cmd_name = self.get_argument('name')
cmd_desc = self.get_argument('description')
req_params = loads(self.get_argument('required_parameters'))
opt_params = loads(self.get_argument('optional_parameters'))
for p_name, vals in opt_params.items():
if vals[0].startswith('mchoice'):
opt_params[p_name] = [vals[0], loads(vals[1])]
if len(vals) == 2:
opt_params[p_name] = [vals[0], loads(vals[1])]
elif len(vals) == 4:
opt_params[p_name] = [vals[0], loads(vals[1]), vals[2],
vals[3]]
else:
raise qdb.exceptions.QiitaDBError(
"Malformed parameters dictionary, the format "
"should be either {param_name: [parameter_type, "
"default]} or {parameter_name: (parameter_type, "
"default, name_order, check_biom_merge)}. Found: "
"%s for parameter name %s"
% (vals, p_name))
# adding an extra element to make sure the parser knows this is
# an optional parameter
opt_params[p_name].extend(['qiita_optional_parameter'])
outputs = self.get_argument('outputs', None)
if outputs:
outputs = loads(outputs)
dflt_param_set = loads(self.get_argument('default_parameter_sets'))
analysis_only = self.get_argument('analysis_only', False)
parameters = req_params
parameters.update(opt_params)
cmd = qdb.software.Command.create(
plugin, cmd_name, cmd_desc, parameters, outputs,
analysis_only=analysis_only)
if dflt_param_set is not None:
for name, vals in dflt_param_set.items():
qdb.software.DefaultParameters.create(name, cmd, **vals)
self.finish()
def _get_command(plugin_name, plugin_version, cmd_name):
"""Returns the command with the given name within the given plugin
Parameters
----------
plugin_name : str
The name of the plugin
plugin_version : str
The version of the plugin
cmd_name : str
The name of the command in the plugin
Returns
-------
qiita_db.software.Command
The requested command
Raises
------
HTTPError
If the command does not exist, with error code 404
If there is a problem instantiating the command, with error code 500
"""
plugin = _get_plugin(plugin_name, plugin_version)
try:
cmd = plugin.get_command(cmd_name)
except qdb.exceptions.QiitaDBUnknownIDError:
raise HTTPError(404)
except Exception as e:
raise HTTPError(500, reason='Error instantiating cmd %s of plugin '
'%s %s: %s' % (cmd_name, plugin_name,
plugin_version, str(e)))
return cmd
class CommandHandler(OauthBaseHandler):
@authenticate_oauth
def get(self, plugin_name, plugin_version, cmd_name):
"""Retrieve the command information
Parameters
----------
plugin_name : str
The plugin name
plugin_version : str
The plugin version
cmd_name : str
The command name
Returns
-------
dict
The command information
'name': the command name
'description': the command description
'required_parameters': dict with the required parameters, in the
format {parameter_name: [type, [subtypes]]}
'optional_parameters': dict with the optional parameters, in the
format {parameter_name: [type, default value]}
'default_parameter_sets': dict with the default parameter sets, in
the format {parameter set name: {parameter_name: value}}
"""
with qdb.sql_connection.TRN:
cmd = _get_command(plugin_name, plugin_version, cmd_name)
response = {
'name': cmd.name,
'description': cmd.description,
'required_parameters': cmd.required_parameters,
'optional_parameters': cmd.optional_parameters,
'default_parameter_sets': {
p.name: p.values for p in cmd.default_parameter_sets}}
self.write(response)
class CommandActivateHandler(OauthBaseHandler):
@authenticate_oauth
def post(self, plugin_name, plugin_version, cmd_name):
"""Activates the command
Parameters
----------
plugin_name : str
The plugin name
plugin_version : str
The plugin version
cmd_name : str
The command name
"""
with qdb.sql_connection.TRN:
cmd = _get_command(plugin_name, plugin_version, cmd_name)
cmd.activate()
self.finish()
class ReloadPluginAPItestHandler(OauthBaseHandler):
@authenticate_oauth
def post(self):
"""Reloads the plugins"""
conf_files = sorted(glob(join(qiita_config.plugin_dir, "*.conf")))
for fp in conf_files:
software = qdb.software.Software.from_file(fp, update=True)
software.activate()
software.register_commands()
self.finish()
|
from django.db import models
# Create your models here.
class ClientModel(models.Model):
lastname = models.CharField(max_length=120,blank=True, null=True, default=None,verbose_name="Фамилия")
email = models.EmailField(blank=True, null=True, default=None)
firstname = models.CharField(max_length=120,blank=True, null=True, default=None,verbose_name="Имя")
phone = models.CharField(max_length=50, blank=True, null=True, default=None,verbose_name="Тел.")
address = models.CharField(max_length=128, blank=True, null=True, default=None,verbose_name="Адрес")
created = models.DateTimeField(auto_now_add=True,auto_now=False,verbose_name="Созд.")
updated = models.DateTimeField(auto_now_add=False,auto_now=True,verbose_name="Обнов.")
token_key = models.CharField(max_length=128,blank=True, null=True, default=None)
# вывод одного поля
def __str__(self):
return "Клиент %s " % (self.id )
class Meta:
verbose_name = 'Клиент'
verbose_name_plural = 'Клиенты'
|
# -*- coding: utf-8 -*-
class GrapheNO:
"""graphe code par liste d'adjacence. Les sommets devront etre numerotes 0,1,...,n-1"""
def __init__(self, n, l_adj):
"""initialise un graphe d'apres la liste d'adjacence l_adj et son ordre n"""
self.ordre = n # attribut ordre = nb de sommets
self.adj = l_adj # attribut liste adjacence
def affiche(self):
"""affiche dans le terminal la liste d'adjacence"""
print "ordre : ", self.ordre
for v in range(self.ordre):
print "voisins de", v," : ",
for voisin in self.adj[v]:
print voisin,
print
def degre(self,v):
"""renvoie le degre du sommet v"""
return len(self.adj[v])
def taille(self):
"""renvoie le nombre d'arêtes du graphe"""
sommedegres = 0
for v in range(self.ordre):
sommedegres += len(self.adj[v])
return sommedegres/2
# petits malins : return sum([len(self.adj[v]) for v in range(self.ordre)])
def nbTriangles(self):
"""renvoie le nb de triangles du graphe. Une version très naïve consisterait à faire une triple
boucle et tester les trois adjacences
Mieux chercher les voisins des voisins d'un sommet v donné et regarder si v est voisin à nouveau.
Chaque triangle aura été compté six fois."""
nbT = 0
for v in range(self.ordre):
for v1 in self.adj[v]:
for v2 in self.adj[v1]:
if v in self.adj[v2]:
nbT += 1
return nbT/6
####################################################################################################################
def aretes_vers_liste_adjacence(n,aretes):
"""n est l'ordre du graphe non oriente, sommets 0,1, ..., n-1
aretes la liste de ses aretes
renvoie la liste d'adjacence du graphe"""
adjacence = [ [] for i in range(n) ] #liste contenant n listes vides
for arete in aretes:
adjacence[ arete[0] ].append( arete[1] )
adjacence[ arete[1] ].append( arete[0] )
return adjacence
def grapheComplet(n):
"""renvoie un objet GrapheNO correspondant au graphe complet d'ordre n"""
l_adj = []
for v in range(n):
voisinage = []
for voisin in range(n):
if voisin != v:
voisinage.append(voisin)
l_adj.append(voisinage)
return GrapheNO(n, l_adj)
"""remarque : on peut également si on est en forme écrire une seule ligne :
return GrapheNO([ [j for j in range(n) if j!=i] for i in range(n) ])"""
def cycle(n) :
"""renvoie un objet GrapheNO correspondant au cycle d'ordre n"""
l_adj = [[1,n-1]]
for v in range(1,n-1):
l_adj.append([v-1,v+1])
l_adj.append([n-2,0])
return GrapheNO(n, l_adj)
def lireAretesEtOrdre(nomdufichier):
"""lit le fichier et renvoie la liste des aretes qui s'y trouvent"""
f = file(nomdufichier, 'r')
lignes = f.readlines()
#on extrait les lignes qui commencent par 'E'
#si c'est bon on cree une nouvelle arete
aretes = []
ordre = 0
for l in lignes:
mots = l.split()
if len(mots) >= 3 and mots[0]=='E':
aretes.append([int(mots[1]), int(mots[2])]) #tout la monde va oublier la conversion en int
if len(mots) > 0 and mots[0]=="ordre":
ordre = int(mots[1])
return aretes, ordre
def lireGrapheNO(nomdufichier):
"""renvoie l'objet GrapheNO contenu par liste d'arêtes dans le fichier"""
aretes, n = lireAretesEtOrdre(nomdufichier)
return GrapheNO(n, aretes_vers_liste_adjacence(n,aretes))
def test1():
for nom in ["petitgraphe.txt","copperfield.txt","erdos.txt","levures.txt","metro.txt"]:
g = lireGrapheNO(nom)
print "*"*20
print nom
print "ordre :", g.ordre
print "taille :", g.taille()
print "triangles :", g.nbTriangles()
def parcours_pargeur(g, v):
"""effectue un parcours en largeur du grapheNO g depuis le sommet v ;
renvoie le tableau d des distances et le tableau pred des predecesseurs ;
utilise une file LIFO basique avec une liste (queue)
ici version sans les couleurs"""
#initialisation
pred = [None] * g.ordre
d = [-1] * g.ordre
d[v] = 0
queue = [v]
#on y va
while queue:
courant = queue.pop()
for voisin in g.adj[courant]:
if d[voisin] == -1:
pred[voisin] = courant
d[voisin] = d[courant] + 1
queue.insert(0,voisin)
#fin
return d, pred
def test3():
"""distance du sommet 0 au 'dernier' sommet"""
for nom in ["petitgraphe.txt","copperfield.txt","erdos.txt","levures.txt","metro.txt"]:
g = lireGrapheNO(nom)
d, pred = parcours_pargeur(g,0)
print nom, d[g.ordre-1]
def nb_composantes_connexes(g):
inconnu = [True] * g.ordre
nb_composantes = 0
for depart in range(g.ordre):
if inconnu[depart]:
nb_composantes +=1
queue = [depart]
inconnu[depart] = False
while queue:
courant = queue.pop()
for voisin in g.adj[courant]:
if inconnu[voisin]:
inconnu[voisin] = False
queue.insert(0,voisin)
inconnu[voisin] = False
return nb_composantes
def test2():
for n in range(10):
nomfichier = "composantes" + str(n) + ".txt"
print nomfichier, nb_composantes_connexes(lireGrapheNO(nomfichier))
def mem_comp(g, a, b):
"""affiche si les sommets sont dans la meme composante connexe"""
d, p = parcours_pargeur(g,a)
return d[b] != -1
g = lireGrapheNO("levures.txt")
for i in range(g.ordre):
if not mem_comp(g,0,i):
print i
#programme du making off
def retrouveordre(nomdufichier):
f = file(nomdufichier, 'r')
lignes = f.readlines()
#on extrait les lignes qui commencent par 'E'
#si c'est bon on cree une nouvelle arete
max = -1
for l in lignes:
mots = l.split()
if len(mots) >= 3 and mots[0]=='E':
if int(mots[1]) > max:
max = int(mots[1])
if int(mots[2]) > max:
max = int(mots[2])
return max + 1
|
import pika
from src.settings import RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_QUEUE, \
RABBITMQ_PREFETCH_COUNT
from src.contracts.event_consumer import EventConsumer
class RabbitConsumer(EventConsumer):
def start_consumption(self, callback):
credentials = pika.PlainCredentials(RABBITMQ_USERNAME, RABBITMQ_PASSWORD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
host=RABBITMQ_HOST,
port=RABBITMQ_PORT,
credentials=credentials
))
channel = connection.channel()
channel.basic_qos(prefetch_count=RABBITMQ_PREFETCH_COUNT)
channel.basic_consume(on_message_callback=callback,
queue=RABBITMQ_QUEUE)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
|
from timeit import default_timer as timer
import tkinter
from PIL import Image
from PIL import ImageTk
import cv2
import numpy as np
from math import sqrt
MAX_FEATURES = 5000
GOOD_MATCH_PERCENT = 0.25
MIN_MATCHES = 10
camindex = 2
ransacReprojThreshold = 25.0
im1 = cv2.imread('template.jpg')
im1Gray = cv2.cvtColor(im1, cv2.COLOR_BGR2GRAY)
orb = cv2.ORB_create(MAX_FEATURES)
keypoints1, descriptors1 = orb.detectAndCompute(im1Gray, None)
matcher = cv2.DescriptorMatcher_create(cv2.DESCRIPTOR_MATCHER_BRUTEFORCE_HAMMING)
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks = 100)
flann = cv2.FlannBasedMatcher(index_params, search_params)
stdpix = 500
stdval = 0.1
cap = cv2.VideoCapture(camindex)
if not cap.isOpened():
print("Camera not found at index ", camindex)
exit()
cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 960)
font = cv2.FONT_HERSHEY_SIMPLEX
devthresh = 0.5
accurate = 0
detected = 1
undetected = 0
#lgdev = 100
lgdst = []
lgcen = ()
lgsz = 1
drops = 0
def eucliddist(p1, p2):
dx = p1[0]-p2[0]
dy = p1[1]-p2[1]
dist = sqrt(dx*dx + dy*dy)
return dist
def sign(x):
if x >= 0:
return 1
else:
return -1
def check_convexity(points):
val = [0,0,0,0]
for i in range(4):
x0, y0 = points[i][0], points[i][1]
x1, y1 = points[(i+1)%4][0], points[(i+1)%4][1]
x2, y2 = points[(i+2)%4][0], points[(i+2)%4][1]
val[i] = sign((x1-x0)*(y2-y0)-(y1-y0)*(x2-x0))
if 0 in val:
return False
if(val == [1,1,1,1] or val == [-1,-1,-1,-1]):
return True
return False
def validate(corners):
points = [np.array(corners[i][0]) for i in range(4)]
cvx = check_convexity(points)
sides = [eucliddist(points[i], points[(i+1)%4]) for i in range(4)]
#print(sides)
dev = np.std(sides)/np.mean(sides)
#print(dev)
if(dev<devthresh and cvx):
return 3
elif(dev>=devthresh and cvx):
return 2
elif(dev<devthresh and not cvx):
return 1
else:
return 0
def quadcentroid(corners): #returns centre of quadrilateral
sumx=0.0
sumy=0.0
for i in range(4):
sumx+=corners[i][0][0]
sumy+=corners[i][0][1]
return (int(sumx//4) , int(sumy//4))
def quadsize(corners): #returns longest diagonal
diag1 = eucliddist(corners[0][0], corners[2][0])
diag2 = eucliddist(corners[1][0], corners[3][0])
return max(diag1, diag2)
def triangulate(quantity, currpix):
qx = quantity[0]*stdval/stdpix
qy = quantity[1]*stdval/stdpix
return (qx,qy)
def reqacc (rvel , cdist, taracc):
if(cdist[0]!=0):
a0 = taracc[0] + 0.5*np.dot(rvel[0], rvel[0])/cdist[0]
else:
a0=taracc[0]
if(cdist[1]!=0):
a1 = taracc[1] + 0.5*np.dot(rvel[1], rvel[1])/cdist[1]
else:
a1 = taracc[1]
acc = [a0, a1]
return acc
def cendist(mcen, frame0):
h,w,c = frame0.shape
cen = (w//2, h//2)
off=np.subtract(np.array(cen), np.array(mcen))
return off
def displacement(pos0, pos1):
pixshift=(pos1[0]-pos0[0], pos1[1]-pos0[1])
return pixshift
def pixvel (pos0, pos1, framerate):
svect = displacement(pos0, pos1)
vvect = (svect[0]*framerate , svect[1]*framerate)
return vvect
def pixacc (pos0, pos1, pos2, framerate):
vel0 = pixvel(pos0, pos1, framerate)
vel1 = pixvel(pos1, pos2, framerate)
avect = ((vel1[0]-vel0[0])*framerate, (vel1[1]-vel0[1])*framerate)
return avect
def pixacc (vel0, vel1, framerate):
avect = ((vel1[0]-vel0[0])*framerate, (vel1[1]-vel0[1])*framerate)
return avect
def pformat(pair):
x = str(pair[0])[0:8]
y = str(pair[1])[0:8]
return '('+x+','+y+')'
def findmatch(im2):
global accurate
global detected
global undetected
#global lgdev
global lgdst
global lgcen
global lgsz
global drops
try:
im2Gray = cv2.cvtColor(im2, cv2.COLOR_BGR2GRAY)
keypoints2, descriptors2 = orb.detectAndCompute(im2Gray, None)
matches = matcher.match(descriptors1, descriptors2, None)
matches.sort(key=lambda x: x.distance, reverse=False)
numGoodMatches = int(len(matches) * GOOD_MATCH_PERCENT)
matches = matches[:numGoodMatches]
if(len(matches)<MIN_MATCHES):
raise Exception('Not enough matches')
points1 = np.zeros((len(matches), 2), dtype=np.float32)
points2 = np.zeros((len(matches), 2), dtype=np.float32)
for i, match in enumerate(matches):
points1[i, :] = keypoints1[match.queryIdx].pt
points2[i, :] = keypoints2[match.trainIdx].pt
# Find homography
h, mask = cv2.findHomography(points1, points2, cv2.RANSAC, ransacReprojThreshold = ransacReprojThreshold)
height, width, channels = im1.shape
pts = np.float32([ [0,0],[0,height-1],[width-1,height-1],[width-1,0] ]).reshape(-1,1,2)
dst = cv2.perspectiveTransform(pts,h)
cen = quadcentroid(dst)
qsz = quadsize(dst)
if(fcount%10==0):
#lgdev = 0.6
iMatches = cv2.drawMatches(im1, keypoints1, im2, keypoints2, matches, None)
cv2.imwrite('frames/frame'+str(fcount)+'.jpg', iMatches)
v = validate(dst)
if v==3:
accurate +=1
detected +=1
undetected = 0
#if v==3:
lgdst,lgcen,lgsz = dst,cen,qsz
#cv2.polylines(im2,[np.int32(lgdst)],True,(0, 64, 0),3, cv2.LINE_AA)
cv2.polylines(im2,[np.int32(dst)],True,(0, 255, 0),3, cv2.LINE_AA)
cv2.putText(im2,'Success',(10,100), font, 1,(0,255,0),2,cv2.LINE_AA)
elif v==1 or v==0:
detected +=1
if(undetected<=10 and accurate>0):
undetected +=1
drops+=1
accurate+=1
cv2.polylines(im2,[np.int32(lgdst)],True,(0, 255, 255),3, cv2.LINE_AA)
cv2.putText(im2,'Failure',(10,100), font, 1,(0,0,255),2,cv2.LINE_AA)
if(accurate>0):
cen = lgcen
qsz = lgsz
elif v==0:
raise Exception('Non convex overskewed bounding box')
display = ImageTk.PhotoImage(image = Image.fromarray(cv2.cvtColor(im2, cv2.COLOR_BGR2RGB)))
return cen, qsz, display
except:
undetected+=1
if(undetected<=10 and accurate>0):
drops+=1
detected+=1
accurate+=1
cv2.polylines(im2,[np.int32(lgdst)],True,(128, 128, 128),3, cv2.LINE_AA)
cv2.putText(im2,'Success',(10,100), font, 1,(0,255,0),2,cv2.LINE_AA)
else:
cv2.putText(im2,'Not detected',(10,100), font, 1,(255,100,0),2,cv2.LINE_AA)
display = ImageTk.PhotoImage(image = Image.fromarray(cv2.cvtColor(im2, cv2.COLOR_BGR2RGB)))
if(accurate>0):
return lgcen, lgsz, display
return (0,0), 1, display
try:
print('Starting system, Capturing camera ', camindex)
fcount = 0
prev = timer()
window = tkinter.Tk()
start = timer()
window.title('Live Stream')
ret, frame0 = cap.read()
pos0, sz0, mat = findmatch(frame0)
now = timer()
framerate = 1/(now-prev)
prev = now
ret, frame1 = cap.read()
pos1, sz1, mat = findmatch(frame1)
h1, w1, c1 = im1.shape
h2, w2, c2 = frame0.shape
width = w1+w2
height = max(h1, h2)
canvas = tkinter.Canvas(window, width = width, height = height)
canvas.pack()
vel0 = pixvel(pos0, pos1, framerate)
fcount = 2
while(True):
ret, frame2 = cap.read()
fcount+=1
pos2, sz2, mat = findmatch(frame2)
now = timer()
framerate = 1/(now-prev)
prev = now
canvas.create_image(0, 0, image = mat, anchor = tkinter.NW)
window.update()
vel1 = pixvel(pos1, pos2, framerate)
acc0 = pixacc(vel0, vel1, framerate) #acceleration between frames
cdist = cendist(pos0, frame0) #distance of target from centre of field of view
avgsize = (sz0+sz1+sz2)/3 #average size of target in 3 frames
racc = reqacc(vel0, cdist, acc0) #required pixel acceleration according to current data
realacc = triangulate(racc, avgsize) #required actual acceleration
print('\033[K Frame: ',fcount, 'Framerate: ',int(framerate), 'Average FPS: ', (fcount//(now-start)), 'Accuracy: ', (accurate*100//detected), ' %')
print('\033[K Position: ',pformat(pos2),' pixel')
print('\033[K Shift: ',pformat(displacement(pos0, pos1)),' pixel')
print('\033[K Velocity: ',pformat(vel1),' pixel/s')
print('\033[K Acceleration: ',pformat(acc0),' pixel/s^2')
print('\033[K Required Accceleration: ',pformat(realacc),' metre/s^2')
print('\033[A'*7)
pos0, sz0 = pos1, sz1
pos1, sz1 = pos2, sz2
vel0 = vel1
except KeyboardInterrupt:
print("\n"*7, "Releasing camera...")
cap.release()
print("Writing to log file")
with open('stats.txt', 'a') as logfile:
#total frames, Accuracy %, Buffered, Total detected, Total accurate, Actual accurate, Undetected
logfile.write(str(fcount)+'\t\t'+str((accurate*100//detected))+'\t\t'+str(drops)+'\t'+str(detected)+'\t'+str(accurate)+'\t'+str(accurate-drops)+'\t'+str(fcount-detected)+'\n')
print("Exiting...")
exit()
|
#!/usr/bin/env python
import logging
from typing import (
Any,
Dict,
List,
Optional,
)
from hummingbot.logger import HummingbotLogger
from hummingbot.core.event.events import TradeType
from hummingbot.connector.exchange.bittrex.bittrex_order_book_message import BittrexOrderBookMessage
from hummingbot.core.data_type.order_book cimport OrderBook
from hummingbot.core.data_type.order_book_message import (
OrderBookMessage,
OrderBookMessageType,
)
_btob_logger = None
cdef class BittrexOrderBook(OrderBook):
@classmethod
def logger(cls) -> HummingbotLogger:
global _btob_logger
if _btob_logger is None:
_btob_logger = logging.getLogger(__name__)
return _btob_logger
@classmethod
def snapshot_message_from_exchange(cls,
msg: Dict[str, any],
timestamp: float,
metadata: Optional[Dict] = None) -> OrderBookMessage:
if metadata:
msg.update(metadata)
return BittrexOrderBookMessage(
OrderBookMessageType.SNAPSHOT, {
"trading_pair": msg["marketSymbol"],
"update_id": int(msg["sequence"]),
"bids": msg["bid"],
"asks": msg["ask"]
}, timestamp=timestamp)
@classmethod
def diff_message_from_exchange(cls,
msg: Dict[str, any],
timestamp: Optional[float] = None,
metadata: Optional[Dict] = None):
if metadata:
msg.update(metadata)
return BittrexOrderBookMessage(
OrderBookMessageType.DIFF, {
"trading_pair": msg["marketSymbol"],
"update_id": int(msg["sequence"]),
"bids": msg["bidDeltas"],
"asks": msg["askDeltas"]
}, timestamp=timestamp)
@classmethod
def trade_message_from_exchange(cls,
msg: Dict[str, Any],
timestamp: Optional[float] = None,
metadata: Optional[Dict] = None) -> OrderBookMessage:
if metadata:
msg.update(metadata)
return BittrexOrderBookMessage(
OrderBookMessageType.TRADE, {
"trading_pair": msg["trading_pair"],
"trade_type": float(TradeType.BUY.value) if msg["takerSide"] == "BUY"
else float(TradeType.SELL.value),
"trade_id": msg["id"],
"update_id": msg["sequence"],
"price": msg["rate"],
"amount": msg["quantity"]
}, timestamp=timestamp)
@classmethod
def from_snapshot(cls, snapshot: OrderBookMessage):
raise NotImplementedError("Bittrex order book needs to retain individual order data.")
@classmethod
def restore_from_snapshot_and_diffs(self, snapshot: OrderBookMessage, diffs: List[OrderBookMessage]):
raise NotImplementedError("Bittrex order book needs to retain individual order data.")
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import smtplib
import sys
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
# message
lines = sys.stdin.readlines()
message = ""
for i in range(len(lines)):
message += lines[i]
# email setting
msg = MIMEMultipart()
msg['From'] = 'sender@email.com'
msg['To'] = 'your@email.com'
msg['Subject'] = '[ISIMA][ENT] Nouvelle note dispo !'
msg.attach(MIMEText(message))
mailserver = smtplib.SMTP('mail.gmx.com', 587)
mailserver.ehlo()
mailserver.starttls()
mailserver.ehlo()
mailserver.login('sender_login', 'sender_password')
mailserver.sendmail('sender@email.com', 'your@email.com', msg.as_string())
mailserver.quit()
|
"""module containing url patterns for the comments app"""
from django.urls import path
from authors.apps.comments.views import (
ListCreateCommentView, UpdateDestroyCommentView,
LikeComment, LikeCommentStatus, DislikeComment, CommentHistoryViewSet
)
urlpatterns = [
path(
"comments/",
ListCreateCommentView.as_view(),
name="list_create_comment",
),
path(
"comments/<pk>/",
UpdateDestroyCommentView.as_view(),
name="retrieve_update_destroy",
),
path('comments/<int:pk>/like',
LikeComment.as_view()),
path('comments/<int:pk>/likestatus',
LikeCommentStatus.as_view()),
path('comments/<int:pk>/dislike',
DislikeComment.as_view()),
path('comments/<int:pk>/history',
CommentHistoryViewSet.as_view({"get": "list"}))
]
|
from math import cos, inf
from random import uniform, random
from src.glTypes import V3, newColor
from src.glMath import angle, cross, divide, dot, matrixMult, matrixMult_4_1, mult, negative, norm, substract
def flat(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['textureCoords']
A, B, C = kwargs['vertices']
b, g, r = kwargs['color']
triangleNormal = kwargs['triangleNormal']
b/=255
g/=255
r/=255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
intensity = dot(triangleNormal, negative(render.directional_light))
b *= intensity
g *= intensity
r *= intensity
if intensity > 0: return r, g, b
else: return 0, 0, 0
def gourad(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b /= 255
g /= 255
r /= 255
intensityA = dot(nA, negative(render.directional_light))
intensityB = dot(nB, negative(render.directional_light))
intensityC = dot(nC, negative(render.directional_light))
intensity = intensityA*u + intensityB*v + intensityC*w
b *= intensity
g *= intensity
r *= intensity
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
def phong(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
b = b*intensity if b*intensity <=1 else 1
g = g*intensity if g*intensity <=1 else 1
r = r*intensity if r*intensity <=1 else 1
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
def unlit(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
return r, g, b
def toon(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
if intensity > 0.9:
intensity = 1
elif intensity > 0.6:
intensity = 0.6
elif intensity > 0.2:
intensity = 0.4
else:
intensity = 0.2
b *= intensity
g *= intensity
r *= intensity
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
def coolShader(render, **kwargs):
u, v, w = kwargs['baryCoords']
nA, nB, nC = kwargs['normals']
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
r, g, b = (0,0,0)
if intensity > 0.7:
r = 1
elif intensity > 0.3:
r = 0.5
b = 0.5
else:
b = 1
return r, g, b
def textureBlend(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
if intensity < 0:
intensity = 0
b *= intensity
g *= intensity
r *= intensity
if render.active_texture_2:
textureColor = render.active_texture_2.getColor(tx, ty)
b += (textureColor[0] / 255) * (1 - intensity)
g += (textureColor[1] / 255) * (1 - intensity)
r += (textureColor[2] / 255) * (1 - intensity)
b = 1 if b > 1 else (0 if b < 0 else b)
g = 1 if g > 1 else (0 if g < 0 else g)
r = 1 if r > 1 else (0 if r < 0 else r)
return r, g, b
return r, g, b
def gradient(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
A, B, C = kwargs['vertices']
upColor = kwargs['upColor']
downColor = kwargs['downColor']
y = A[1] * u + B[1] * v + C[1] * w
height = render.maxY - render.minY
b = (((y+abs(render.minY)) / height) * (upColor[0] - downColor[0]) + downColor[0]) / 255
g = (((y+abs(render.minY)) / height) * (upColor[1] - downColor[1]) + downColor[1]) / 255
r = (((y+abs(render.minY)) / height) * (upColor[2] - downColor[2]) + downColor[2]) / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
b = b*intensity if 0 <= b*intensity <=1 else (0 if b*intensity < 0 else 1)
g = g*intensity if 0 <= g*intensity <=1 else (0 if g*intensity < 0 else 1)
r = r*intensity if 0 <= r*intensity <=1 else (0 if r*intensity < 0 else 1)
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
def highlighter(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
color = kwargs['highColor']
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
b = b*intensity
g = g*intensity
r = r*intensity
forwardVector = V3(render.camMatrix[0][2], render.camMatrix[1][2], render.camMatrix[2][2])
parallel = dot(normal, forwardVector)
b = color[0]/255 * (1 - parallel) if color[0]/255 * (1 - parallel)>b else b
g = color[1]/255 * (1 - parallel) if color[1]/255 * (1 - parallel)>g else g
r = color[2]/255 * (1 - parallel) if color[2]/255 * (1 - parallel)>r else r
b = float(abs(b*(1-pow((intensity+1),-10))))
g = float(abs(g*(1-pow((intensity+1),-10))))
r = float(abs(r*(1-pow((intensity+1),-10))))
b = b if b <=1 else 1
g = g if g <=1 else 1
r = r if r <=1 else 1
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
def cut(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
v1, v2, v3 = kwargs['originalVertices']
axis = kwargs['axis']
interval = kwargs['interval']
if axis == 'x':
direction = v1[0] * u + v2[0] * v + v3[0] * w
elif axis == 'y':
direction = v1[1] * u + v2[1] * v + v3[1] * w
elif axis == 'z':
direction = v1[2] * u + v2[2] * v + v3[2] * w
else:
direction = v1[1] * u + v2[1] * v + v3[1] * w
if cos(direction*interval) >=0:
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
b = b*intensity if b*intensity <=1 else 1
g = g*intensity if g*intensity <=1 else 1
r = r*intensity if r*intensity <=1 else 1
else :
b = 0
g = 0
r = 0
b = b if b <=1 else 1
g = g if g <=1 else 1
r = r if r <=1 else 1
b = b if b >=0 else 0
g = g if g >=0 else 0
r = r if r >=0 else 0
return r, g, b
def noise(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
b = random() * b
g = random() * g
r = random() * r
b = b*intensity if b*intensity <=1 else 1
g = g*intensity if g*intensity <=1 else 1
r = r*intensity if r*intensity <=1 else 1
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
def outline(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
color = kwargs['highColor']
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
forwardVector = V3(render.camMatrix[0][2], render.camMatrix[1][2], render.camMatrix[2][2])
parallel = dot(normal, forwardVector)
if parallel < 0.3:
b = color[0]/255 * (1 - parallel)
g = color[1]/255 * (1 - parallel)
r = color[2]/255 * (1 - parallel)
else:
b = 0
g = 0
r = 0
b = b if b <=1 else 1
g = g if g <=1 else 1
r = r if r <=1 else 1
return r, g, b
def snow(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
A, B, C = kwargs['vertices']
color = newColor(1,1,1)
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
b = b*intensity
g = g*intensity
r = r*intensity
forwardVector = V3(0,-1,1)
parallel = dot(normal, forwardVector)
b = color[0]/255 * (1 - parallel) if color[0]/255 * (1 - parallel)>b else b
g = color[1]/255 * (1 - parallel) if color[1]/255 * (1 - parallel)>g else g
r = color[2]/255 * (1 - parallel) if color[2]/255 * (1 - parallel)>r else r
try:
b = float(abs(b*(1-pow((intensity+1),-10))))
g = float(abs(g*(1-pow((intensity+1),-10))))
r = float(abs(r*(1-pow((intensity+1),-10))))
except:
pass
b = b if b <=1 else 1
g = g if g <=1 else 1
r = r if r <=1 else 1
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
def accentuate(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
color = kwargs['highColor']
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
intensity = dot(normal, negative(render.directional_light))
b = color[0]/255 if color[0]/255 > b else b
g = color[1]/255 if color[1]/255 > g else g
r = color[2]/255 if color[2]/255 > r else r
b = b*intensity if b*intensity <=1 else 1
g = g*intensity if g*intensity <=1 else 1
r = r*intensity if r*intensity <=1 else 1
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
def normalMap(render, **kwargs):
u, v, w = kwargs['baryCoords']
b, g, r = kwargs['color']
A, B, C = kwargs['vertices']
tA, tB, tC = kwargs['textureCoords']
nA, nB, nC = kwargs['normals']
b /= 255
g /= 255
r /= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
textureColor = render.active_texture.getColor(tx, ty)
b *= textureColor[0] / 255
g *= textureColor[1] / 255
r *= textureColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = V3(nX, nY, nZ)
if render.normal_map:
textureNormal = render.normal_map.getColor(tx, ty)
textureNormal = V3((textureNormal[2] / 255) * 2 - 1,
(textureNormal[1] / 255) * 2 - 1,
(textureNormal[0] / 255) * 2 - 1)
textureNormal = divide(textureNormal, norm(textureNormal))
edge1 = substract(B,A)
edge2 = substract(C,A)
deltaUV1 = substract(V3(tB[0], tB[1], 0), V3(tA[0], tA[1], 0))
deltaUV2 = substract(V3(tC[0], tC[1], 0), V3(tA[0], tA[1], 0))
f = 1 / (deltaUV1[0] * deltaUV2[1] - deltaUV2[0] * deltaUV1[1])
tangent = [f * (deltaUV2[1] * edge1[0] - deltaUV1[1] * edge2[0]),
f * (deltaUV2[1] * edge1[1] - deltaUV1[1] * edge2[1]),
f * (deltaUV2[1] * edge1[2] - deltaUV1[1] * edge2[2])]
tangent = divide(V3(tangent[0], tangent[1], tangent[2]), norm(V3(tangent[0], tangent[1], tangent[2])))
tangent = substract(tangent, mult(normal, dot(tangent, normal)))
tangent = divide(tangent, norm(tangent))
bitangent = cross(normal, tangent)
bitangent = divide(bitangent, norm(bitangent))
tangentMatrix = [[tangent[0], bitangent[0], normal[0]],
[tangent[1], bitangent[1], normal[1]],
[tangent[2], bitangent[2], normal[2]]]
textureNormal = matrixMult_4_1(tangentMatrix, [textureNormal.x, textureNormal.y, textureNormal.z])
textureNormal = divide(V3(textureNormal[0], textureNormal[1], textureNormal[2]), norm(V3(textureNormal[0], textureNormal[1], textureNormal[2])))
intensity = dot(textureNormal, negative(render.directional_light))
else:
intensity = dot(normal, negative(render.directional_light))
b = b*intensity if b*intensity <=1 else 1
g = g*intensity if g*intensity <=1 else 1
r = r*intensity if r*intensity <=1 else 1
if intensity > 0:
return r, g, b
else:
return 0, 0, 0
|
"""
================================
Spectral analysis of the trials
================================
This example demonstrates how to perform spectral
analysis on epochs extracted from a specific subject
within the :class:`moabb.datasets.Cattan2019_PHMD` dataset.
"""
# Authors: Pedro Rodrigues <pedro.rodrigues01@gmail.com>
# Modified by: Gregoire Cattan <gcattan@hotmail.fr>
# License: BSD (3-clause)
import warnings
import matplotlib.pyplot as plt
import numpy as np
from moabb.datasets import Cattan2019_PHMD
from moabb.paradigms import RestingStateToP300Adapter
warnings.filterwarnings("ignore")
###############################################################################
# Initialization
# ---------------
#
# 1) Specify the channel and subject to compute the power spectrum.
# 2) Create an instance of the :class:`moabb.datasets.Cattan2019_PHMD` dataset.
# 3) Create an instance of the :class:`moabb.paradigms.RestingStateToP300Adapter` paradigm.
# By default, the data is filtered between 1-35 Hz,
# and epochs are extracted from 10 to 50 seconds after event tagging.
# Select channel and subject for the remaining of the example.
channel = "Cz"
subject = 1
dataset = Cattan2019_PHMD()
events = ["on", "off"]
paradigm = RestingStateToP300Adapter(events=events, channels=[channel])
###############################################################################
# Estimate Power Spectral Density
# ---------------
# 1) Obtain the epochs for the specified subject.
# 2) Use Welch's method to estimate the power spectral density.
f, S, _, y = paradigm.psd(subject, dataset)
###############################################################################
# Display of the data
# ---------------
#
# Plot the averaged Power Spectral Density (PSD) for each label condition,
# using the selected channel specified at the beginning of the script.
fig, ax = plt.subplots(facecolor="white", figsize=(8.2, 5.1))
for condition in events:
mean_power = np.mean(S[y == condition], axis=0).flatten()
ax.plot(f, 10 * np.log10(mean_power), label=condition)
ax.set_xlim(paradigm.fmin, paradigm.fmax)
ax.set_ylim(100, 135)
ax.set_ylabel("Spectrum Magnitude (dB)", fontsize=14)
ax.set_xlabel("Frequency (Hz)", fontsize=14)
ax.set_title("PSD for Channel " + channel, fontsize=16)
ax.legend()
fig.show()
|
from django.apps import AppConfig
class AppCrontabConfig(AppConfig):
name = 'app_crontab'
|
"""
Enough is enough!
Alice and Bob were on a holiday. Both of them took many pictures of the places they've been,
and now they want to show Charlie their entire collection. However, Charlie doesn't like these sessions,
since the motive usually repeats. He isn't fond of seeing the Eiffel tower 40 times. He tells them that
he will only sit during the session if they show the same motive at most N times. Luckily,
Alice and Bob are able to encode the motive as a number. Can you help them to remove numbers such
that their list contains each number only up to N times, without changing the order?
Task
Given a list lst and a number N, create a new list that contains each number of lst at
most N times without reordering. For example if N = 2, and the input is [1,2,3,1,2,1,2,3],
you take [1,2,3,1,2], drop the next [1,2] since this would lead to 1 and 2 being in the result 3 times,
and then take 3, which leads to [1,2,3,1,2,3].
"""
def delete_nth(array, n):
new = []
for i in range(len(array)):
new.append(array[i]) if new.count(array[i] ) < n else ""
return new
print(delete_nth([1,1,1,1],2) )
print(delete_nth([20,37,20,21],1))
|
from distutils.core import setup
import pathlib
HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(
name = 'hitomi_py',
packages = ['hitomi_py'],
version = '1.0',
license='MIT',
description = 'hitomi api',
long_description=README,
long_description_content_type="text/markdown",
author = 'VORZAM',
author_email = 'dayomosiu2@gmail.com',
url = 'https://github.com/VORZAW/hitomi_py',
download_url = 'https://github.com/VORZAW/hitomi_py/archive/refs/heads/main.zip',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
"Programming Language :: Python :: 3"
]
)
|
# Generated by Django 3.1.6 on 2021-07-03 15:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cabroozadmin', '0012_auto_20210703_1856'),
]
operations = [
migrations.RemoveField(
model_name='driverdetails',
name='approved',
),
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 11 18:22:50 2021
@author: chulke
"""
class Punto:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return f'({self.x}, {self.y})'
def __repr__(self):
return f'Punto({self.x}, {self.y})'
def __add__(self, b):
return Punto(self.x + b.x, self.y + b.y)
class Rectangulo:
def __init__(self, p1, p2):
self.a = p1
self.b = p2
def base(self):
self.base_rect = abs(self.a.x - self.b.x)
return self.base_rect
def altura(self):
self.altura_rect = abs(self.a.y - self.b.y)
return self.altura_rect
def area(self):
self.area_rect = self.base_rect * self.altura_rect
return self.area_rect
def rotar(self):
v1 = Punto(max(self.a.x, self.b.x), min(self.a.y, self.b.y))
v2 = v1.__add__(Punto(self.altura(), self.base()))
self.a = v1
self.b = v2
def __str__(self):
return f'({self.a}, {self.b})'
def __repr__(self):
return f'Rectangulo({self.a}, {self.b})'
|
import multiprocessing
workers = multiprocessing.cpu_count() * 2 + 1
bind = 'unix:rabbitholeapi.sock'
umask = 0o007
reload = True
#logging
accesslog = '-'
errorlog = '-'
|
from cgshop2021_pyutils.solution import Solution
from cgshop2021_pyutils.solution import TargetNotReachedError
def __last(iter):
last = None
for i in iter:
last = i
return last
def validate(solution: Solution):
"""
Attempts to validate an instance, raising an exception
derived from InvalidSolutionError with further information about the error
for invalid solutions.
"""
last_config = __last(solution.configuration_sequence())
if last_config.positions != solution.instance.target:
raise TargetNotReachedError(solution.instance, solution,
last_config.positions)
|
# Generated by Django 2.2.3 on 2020-02-04 08:45
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tracks', '0023_track_track_designer'),
]
operations = [
migrations.AlterField(
model_name='course',
name='succeeded_users',
field=models.ManyToManyField(blank=True, null=True, to=settings.AUTH_USER_MODEL),
),
]
|
class Initials:
def getInitials(self,name):
words = name.split()
out = []
for word in words:
out.append(word[0])
return "".join(out)
|
#import sys
#input = sys.stdin.readline
def main():
X = int( input())
print(X//500*1000 + X%500//5*5)
if __name__ == '__main__':
main()
|
# The Airport Kiosk - V 1.0.0
# Author: Dena, Rene
# Last Modified: 5/1/17
# _Misc.
# _Functions
# _Strictly Script
print('\n\t\t\tWelcome to "THE AIRPORT KIOSK" script!')
while True:
name = input('\nBefore we begin, what is your name?:')
if name.isalpha():
print('\nAwesome! We can now begin {}.'.format(name.title()))
break
else:
print('\nCANNOT continue if space was left empty.')
print('Also, please be sure to ENTER your name using \ a-z characters only.')
print('\n\t\t\tWelcome to the MAIN CONSOLE!')
print('\nHere, you will be given the option to to enter the price of your plane ticket, and how many bags you are checking in.')
print('\nLet\'s begin.')
print('\n{}, what was the price of your ticket?'.format(name.title()))
while True:
try:
ticket_price = int(input("Enter a number: "))
print('\nAwesome! You\'ve stated your ticket price was ${}.'.format(ticket_price))
break
except ValueError:
print("\nNot an integer value...")
print('\nNow we\'ll be calculating your total number of bags.')
print('\nHow many bags will you be checking in?')
while True:
try:
bags = int(input("Enter a number: "))
print('\nAwesome! You\'ve stated you\'ll be checking in {} bags.'.format(bags))
break
except ValueError:
print("\nNot an integer value...")
bags_cost = bags * 25 - 25
total_cost = ticket_price + bags_cost
print('\nGiven the information provided above, you total cost will accumulate to ${:.2f}'.format(total_cost))
print('Thank you for using "THE AIRPORT KIOSK" script! Hope to see you again soon :)')
|
"""
bir kitaba sayfa numaraları verilirken 689 tane 1 rakamı kullanılmıştır
bu kitabın sayfa sayısını bulunuz
"""
toplam=0
for i in range(1,2000):
sayfa_no=list(str(i))
for j in sayfa_no:
if(str(j)=="1"):
toplam+=1
if(toplam==689):
print("sayfa sayisi:",i)
break
|
#!/usr/bin/python3
"""This module creates a class State that inherits from BaseModel"""
from models.base_model import BaseModel
class State(BaseModel):
"""
This is a User class with the public class attributes:
- name: string - empty string
"""
name = ''
|
from django.contrib import admin
# Register your models here.
# since models.py is in the same folder we say from .models import class profile
from .models import profile
# to make profile model manageable through Django admin
class profileAdmin(admin.ModelAdmin):
class Meta:
model = profile
admin.site.register(profile, profileAdmin)
|
import os
import subprocess
from libqtile import layout, hook
from libqtile.config import Group
from keys import keys
from groups import groups
from screens import screens
layout_theme = {
"border_width": 2,
"margin": 6,
"border_focus": "e1acff",
"border_normal": "1D2330"
}
layouts = [
layout.Max(**layout_theme),
layout.Columns(**layout_theme),
layout.MonadTall(**layout_theme),
layout.Stack(num_stacks=2),
layout.VerticalTile(),
]
widget_defaults = {
'font': 'Sans',
'fontsize': 16,
'padding': 3
}
dgroups_key_binder = None
dgroups_app_rules = []
follow_mouse_focus = True
bring_front_click = False
cursor_warp = False
floating_layout = layout.Floating(float_rules=[
{'wmclass': 'confirm'},
{'wmclass': 'dialog'},
{'wmclass': 'download'},
{'wmclass': 'error'},
{'wmclass': 'file_progress'},
{'wmclass': 'notification'},
{'wmclass': 'splash'},
{'wmclass': 'toolbar'},
{'wmclass': 'confirmreset'}, # gitk
{'wmclass': 'makebranch'}, # gitk
{'wmclass': 'maketag'}, # gitk
{'wname': 'branchdialog'}, # gitk
{'wname': 'pinentry'}, # GPG key password entry
{'wmclass': 'ssh-askpass'}, # ssh-askpass
])
auto_fullscreen = True
focus_on_window_activation = "smart"
wmname = 'LG3D'
@hook.subscribe.startup_once
def startup_once():
home = os.path.expanduser('~')
subprocess.Popen([home + '/.config/qtile/autostart.sh'])
|
import networkx as nx
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
import scipy
import numpy as np
import math
import pickle
from tqdm import tqdm_notebook
from multiprocessing import Pool
np.set_printoptions(suppress=True)
pd.set_option('display.float_format', lambda x: '%.3f' % x)
import warnings
warnings.filterwarnings('ignore')
from sklearn.model_selection import train_test_split
from sklearn.metrics import precision_score, recall_score, f1_score, roc_auc_score
import xgboost as xgb
from xgboost.sklearn import XGBClassifier
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from torch.utils.data import DataLoader
from torch.autograd import Variable
from torch.optim.lr_scheduler import ExponentialLR
from torchvision import datasets, transforms
torch.cuda.set_device(2)
## Model Declaration
class ConvNet(nn.Module):
def __init__(self, in_dim=256, out_dim=1):
super(ConvNet, self).__init__()
self.in_dim = in_dim
self.outdim_en1 = in_dim
self.outdim_en2 = math.ceil(self.outdim_en1 / 2)
self.out_dim = out_dim
self.model_conv = nn.Sequential(
nn.Conv1d(in_channels=in_dim, out_channels=in_dim*2, kernel_size=2),
nn.BatchNorm1d(in_dim*2),
nn.ReLU(),
nn.Conv1d(in_channels=in_dim*2, out_channels=in_dim*4, kernel_size=2),
nn.BatchNorm1d(in_dim*4),
nn.ReLU(),
)
self.model_fc = nn.Sequential(
nn.Linear(in_features=self.in_dim*4, out_features=self.outdim_en1),
nn.BatchNorm1d(self.outdim_en1),
nn.ReLU(),
nn.Dropout(0.4),
nn.Linear(in_features=self.outdim_en1, out_features=self.outdim_en2),
nn.BatchNorm1d(self.outdim_en2),
nn.ReLU(),
nn.Dropout(0.2),
nn.Linear(in_features=self.outdim_en2, out_features=self.out_dim),
nn.Sigmoid()
)
def forward(self, x):
x = self.model_conv(x)
return self.model_fc(x.view(-1, self.in_dim*4))
class FocalLoss(nn.Module):
def __init__(self, alpha=0.01, gamma=2, logits=False, reduce=True):
super(FocalLoss, self).__init__()
self.alpha = alpha
self.gamma = gamma
self.logits = logits
self.reduce = reduce
def forward(self, inputs, targets):
if self.logits:
BCE_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduce=False)
else:
BCE_loss = F.binary_cross_entropy(inputs, targets, reduce=False)
pt = torch.exp(-BCE_loss)
F_loss = self.alpha * (1-pt)**self.gamma * BCE_loss
if self.reduce:
return torch.mean(F_loss)
else:
F_loss
class FocalLoss2(nn.Module):
def __init__(self, alpha=0.01, gamma_pos=3, gamma_neg=2, logits=False, reduce=True):
super(FocalLoss2, self).__init__()
self.alpha = alpha
self.gamma_pos = gamma_pos
self.gamma_neg = gamma_neg
self.logits = logits
self.reduce = reduce
def forward(self, inputs, targets):
if self.logits:
BCE_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduce=False)
else:
BCE_loss = F.binary_cross_entropy(inputs, targets, reduce=False)
pt = torch.exp(-BCE_loss)
gamma_diff = self.gamma_pos - self.gamma_neg
F_loss_pos = self.alpha * targets * (1-pt)**self.gamma_pos * BCE_loss
F_loss_pos = torch.mean(pt)**(-gamma_diff) * F_loss_pos
F_loss_neg = self.alpha * (1 - targets) * (1-pt)**self.gamma_neg * BCE_loss
F_loss = F_loss_pos + F_loss_neg
avg_F_loss_pos = torch.sum(F_loss_pos) / torch.sum(targets)
avg_F_loss_neg = torch.sum(F_loss_neg) / torch.sum(1-targets)
if self.reduce:
return torch.mean(F_loss), avg_F_loss_pos, avg_F_loss_neg
else:
return F_loss, F_loss_pos, F_loss_neg
## Parameters Settings
#
# GRU
# ---------------------
## focal loss
alpha = 1e-4
gamma = 2
gamma_pos = 4
gamma_neg = 2
learn_rate = 1e-4
train_batch_size = 128
test_batch_size = 256
max_epochs = 100
## Data Preparation
data = np.load('GRUArray_and_label_for_NewEmbedding_heter_superv_recur_focal_logisticMF.npz', allow_pickle=True)
GPUArray = data['arr_0']
label = data['arr_1']
GPUArray = GPUArray[-1033905:,:,:]
label = label[-1033905:]
X_train, X_test, y_train, y_test = train_test_split(GPUArray, label, random_state=42)
X_train = torch.FloatTensor(X_train)
X_test = torch.FloatTensor(X_test)
y_train = torch.FloatTensor(y_train)
y_test = torch.FloatTensor(y_test)
train_data = []
for i in range(len(X_train)):
train_data.append((X_train[i], y_train[i]))
test_data = []
for i in range(len(X_test)):
test_data.append((X_test[i], y_test[i]))
train_dataloader = DataLoader(train_data, shuffle=True, batch_size=train_batch_size)
test_dataloader = DataLoader(test_data, shuffle=False, batch_size=test_batch_size)
classifier = ConvNet(in_dim=X_train.shape[2], out_dim=1).cuda()
focal_loss = FocalLoss2(alpha, gamma_pos, gamma_neg)
optim_clsfr = optim.Adam(filter(lambda p: p.requires_grad, classifier.parameters()),
lr=learn_rate)
def train(epoch, dataloader):
label_list = []
pred_y_list = []
clsf_loss_batch = []
clsf_loss_pos_batch = []
clsf_loss_neg_batch = []
for batch_idx, (data, target) in enumerate(dataloader):
if data.size()[0] != dataloader.batch_size:
continue
data, target = Variable(data.cuda()), Variable(target.cuda())
# Update classifier
optim_clsfr.zero_grad()
pred_y = classifier(data.permute(0, 2, 1)).squeeze(-1)
clsf_loss, clsf_loss_pos, clsf_loss_neg = focal_loss(pred_y, target)
clsf_loss.backward()
optim_clsfr.step()
clsf_loss_batch.append(clsf_loss)
if torch.sum(target) > 0:
clsf_loss_pos_batch.append(clsf_loss_pos)
clsf_loss_neg_batch.append(clsf_loss_neg)
label_list += list(target.cpu().detach().numpy())
pred_y_list += list(pred_y.cpu().detach().numpy())
if batch_idx % 2000 == 0:
print(' Idx {} => clsf: {}'.format(batch_idx, clsf_loss))
clsf_loss_avg = sum(clsf_loss_batch) / len(clsf_loss_batch)
clsf_loss_pos_avg = sum(clsf_loss_pos_batch) / len(clsf_loss_pos_batch)
clsf_loss_neg_avg = sum(clsf_loss_neg_batch) / len(clsf_loss_neg_batch)
return np.array(label_list), np.array(pred_y_list), clsf_loss_avg, clsf_loss_pos_avg, clsf_loss_neg_avg
def infer(dataloader):
label_list = []
pred_y_list = []
clsf_loss_batch = []
clsf_loss_pos_batch = []
clsf_loss_neg_batch = []
for batch_idx, (data, target) in enumerate(dataloader):
if data.size()[0] != dataloader.batch_size:
continue
data, target = Variable(data.cuda()), Variable(target.cuda())
# Update classifier
pred_y = classifier(data.permute(0, 2, 1)).squeeze(-1)
clsf_loss, clsf_loss_pos, clsf_loss_neg = focal_loss(pred_y, target)
clsf_loss_batch.append(clsf_loss)
if torch.sum(target) > 0:
clsf_loss_pos_batch.append(clsf_loss_pos)
clsf_loss_neg_batch.append(clsf_loss_neg)
label_list += list(target.cpu().detach().numpy())
pred_y_list += list(pred_y.cpu().detach().numpy())
clsf_loss_avg = sum(clsf_loss_batch) / len(clsf_loss_batch)
clsf_loss_pos_avg = sum(clsf_loss_pos_batch) / len(clsf_loss_pos_batch)
clsf_loss_neg_avg = sum(clsf_loss_neg_batch) / len(clsf_loss_neg_batch)
return np.array(label_list), np.array(pred_y_list), clsf_loss_avg, clsf_loss_pos_avg, clsf_loss_neg_avg
def evaluate(y_true, y_pred):
prec = precision_score(y_true, y_pred)
recall = recall_score(y_true, y_pred)
f1 = f1_score(y_true, y_pred)
return prec, recall, f1
train_history_loss = []
train_history_auc = []
max_thres = 0.
max_train_auc = 0.
for epoch in range(max_epochs):
print('Epoch {} -------------------------------------------------------------------------'.format(epoch))
classifier.train()
label_train, pred_y_train, clsf_loss_train, clsf_loss_pos_train, clsf_loss_neg_train = train(epoch, train_dataloader)
auc_train = roc_auc_score(label_train, pred_y_train)
train_history_loss.append(clsf_loss_train)
train_history_auc.append(auc_train)
print('Training => auc: {:.6f}, clsf_pos: {}, clsf_neg: {}'.
format(auc_train, clsf_loss_pos_train, clsf_loss_neg_train))
if epoch % 1 == 0:
#
# Testing
# ------------------------------------------------------------------------------------
thres = np.min(pred_y_train[label_train==1])
print("Threshold is set to {}".format(thres))
with torch.no_grad():
classifier.eval()
label_test, pred_y_test, clsf_loss_test, clsf_loss_pos_test, clsf_loss_neg_test = infer(test_dataloader)
auc = roc_auc_score(label_test, pred_y_test)
print("Min. Probailities on test set with label 1: {}".format(np.min(pred_y_test[label_test==1])))
y_predict_bin = np.array(pred_y_test > thres, dtype=int)
prec, recall, f1 = evaluate(label_test, y_predict_bin)
print('Testing ==> auc: {:.6f}, prec: {:.4f}, rec: {:.4f}, F1score: {:.4f}, clsf_loss: {}'.
format(auc, prec, recall, f1, clsf_loss_test))
if auc_train > max_train_auc or thres > max_thres:
max_train_auc = auc_train if auc_train > max_train_auc else max_train_auc
max_thres = thres if thres > max_thres else max_thres
torch.save({'epoch': epoch,
'model_state_dict': classifier.state_dict(),
'optimizer_state_dict': optim_clsfr.state_dict(),
'loss': focal_loss,
},
'saved_models/conv1d2_heter_clsfr-auc{:.6f}-thres{:.4f}'.format(auc_train, thres))
'''
Epoch 0 -------------------------------------------------------------------------
Idx 0 => clsf: 3.180097337462939e-05
Idx 2000 => clsf: 4.1567297159872396e-08
Idx 4000 => clsf: 9.181596816176807e-09
Idx 6000 => clsf: 3.0312852228320253e-09
Training => auc: 0.984012, clsf_pos: 1.4177763659972697e-05, clsf_neg: 3.927018497051904e-07
Threshold is set to 0.042069803923368454
Min. Probailities on test set with label 1: 0.017674291506409645
Testing ==> auc: 0.974674, prec: 0.0035, rec: 0.9767, F1score: 0.0070, clsf_loss: 6.776513572503973e-09
Epoch 1 -------------------------------------------------------------------------
Idx 0 => clsf: 2.473788951462552e-09
Idx 2000 => clsf: 1.0929490645850137e-09
Idx 4000 => clsf: 5.3109711073418e-10
Idx 6000 => clsf: 7.051710881889051e-10
Training => auc: 0.986544, clsf_pos: 9.21737046155613e-06, clsf_neg: 1.228757096072286e-09
Threshold is set to 0.004770103842020035
Min. Probailities on test set with label 1: 0.057617560029029846
Testing ==> auc: 0.999823, prec: 0.0002, rec: 1.0000, F1score: 0.0003, clsf_loss: 2.8976829824500783e-09
Epoch 2 -------------------------------------------------------------------------
Idx 0 => clsf: 7.819139224984895e-10
Idx 2000 => clsf: 2.2328056070719526e-10
Idx 4000 => clsf: 2.0501557396190861e-10
Idx 6000 => clsf: 1.5500294103798495e-10
Training => auc: 0.987420, clsf_pos: 1.0768956599349622e-05, clsf_neg: 5.49017553641562e-10
Threshold is set to 0.00422847643494606
Min. Probailities on test set with label 1: 0.0124081801623106
Testing ==> auc: 0.999467, prec: 0.0002, rec: 1.0000, F1score: 0.0003, clsf_loss: 3.640037604668578e-09
Epoch 3 -------------------------------------------------------------------------
Idx 0 => clsf: 1.2664903847880993e-10
Idx 2000 => clsf: 1.9871397582971184e-10
Idx 4000 => clsf: 7.369310023319642e-11
Idx 6000 => clsf: 8.815074364898479e-11
Training => auc: 0.995489, clsf_pos: 8.526847523171455e-06, clsf_neg: 2.6755878068662753e-10
Threshold is set to 0.007625599857419729
Min. Probailities on test set with label 1: 0.01630779355764389
Testing ==> auc: 0.999787, prec: 0.0011, rec: 1.0000, F1score: 0.0022, clsf_loss: 3.6711322870530694e-09
Epoch 4 -------------------------------------------------------------------------
Idx 0 => clsf: 1.2046630359918709e-10
Idx 2000 => clsf: 1.047521722141731e-10
Idx 4000 => clsf: 6.175711331213307e-11
Idx 6000 => clsf: 6.46883102639606e-11
Training => auc: 0.998356, clsf_pos: 6.4510040829190984e-06, clsf_neg: 3.974020557073743e-10
Threshold is set to 0.009090877138078213
Min. Probailities on test set with label 1: 0.0392480306327343
Testing ==> auc: 0.999934, prec: 0.0095, rec: 1.0000, F1score: 0.0189, clsf_loss: 1.82703496776071e-09
Epoch 5 -------------------------------------------------------------------------
Idx 0 => clsf: 4.252895258183287e-11
Idx 2000 => clsf: 4.198571698643683e-11
Idx 4000 => clsf: 6.35648755853424e-11
Idx 6000 => clsf: 3.4695524231409536e-11
Training => auc: 0.999946, clsf_pos: 5.7035872487176675e-06, clsf_neg: 1.7216429670785516e-10
Threshold is set to 0.02147645317018032
Min. Probailities on test set with label 1: 0.042940910905599594
Testing ==> auc: 0.999966, prec: 0.0453, rec: 1.0000, F1score: 0.0866, clsf_loss: 3.120794955790984e-09
Epoch 6 -------------------------------------------------------------------------
Idx 0 => clsf: 5.530596813851929e-11
Idx 2000 => clsf: 4.3120944315244714e-11
Idx 4000 => clsf: 2.674554501480575e-11
Idx 6000 => clsf: 2.2346622330360333e-09
Training => auc: 0.999617, clsf_pos: 7.104088581399992e-06, clsf_neg: 1.7187742895607983e-10
Threshold is set to 0.011965308338403702
Min. Probailities on test set with label 1: 0.07618094235658646
Testing ==> auc: 0.999970, prec: 0.0187, rec: 1.0000, F1score: 0.0367, clsf_loss: 2.3029553819498005e-09
Epoch 7 -------------------------------------------------------------------------
Idx 0 => clsf: 2.7237417876690984e-10
Idx 2000 => clsf: 7.428187925873075e-11
Idx 4000 => clsf: 3.724244690417322e-09
Idx 6000 => clsf: 2.4350493843527943e-10
Training => auc: 0.997927, clsf_pos: 5.63932644581655e-06, clsf_neg: 1.7748567893161038e-10
Threshold is set to 0.006347938906401396
Min. Probailities on test set with label 1: 0.055862341076135635
Testing ==> auc: 0.999936, prec: 0.0053, rec: 1.0000, F1score: 0.0105, clsf_loss: 2.139596722017245e-09
Epoch 8 -------------------------------------------------------------------------
Idx 0 => clsf: 2.6747265513549223e-10
Idx 2000 => clsf: 3.6152091098529127e-08
Idx 4000 => clsf: 1.7938869140143865e-11
Idx 6000 => clsf: 5.644702760765341e-11
Training => auc: 0.999985, clsf_pos: 4.964941126672784e-06, clsf_neg: 3.1524913235436713e-10
Threshold is set to 0.036402639001607895
Min. Probailities on test set with label 1: 0.03797255456447601
Testing ==> auc: 0.999935, prec: 0.0498, rec: 1.0000, F1score: 0.0949, clsf_loss: 2.439758173267137e-09
Epoch 9 -------------------------------------------------------------------------
Idx 0 => clsf: 9.991685256949268e-11
Idx 2000 => clsf: 8.736086853922131e-11
Idx 4000 => clsf: 2.557553058224471e-10
Idx 6000 => clsf: 2.573189057664127e-11
Training => auc: 0.999975, clsf_pos: 5.078523827251047e-06, clsf_neg: 1.651299930127692e-10
Threshold is set to 0.026118585839867592
Min. Probailities on test set with label 1: 0.011123161762952805
Testing ==> auc: 0.999814, prec: 0.4516, rec: 0.9767, F1score: 0.6176, clsf_loss: 3.880967103242483e-09
Epoch 10 -------------------------------------------------------------------------
Idx 0 => clsf: 1.752840060598171e-11
Idx 2000 => clsf: 2.796759698830975e-11
Idx 4000 => clsf: 2.315705960320713e-11
Idx 6000 => clsf: 1.8534666856862003e-11
Training => auc: 0.999944, clsf_pos: 5.029150997870602e-06, clsf_neg: 1.586374087647613e-10
Threshold is set to 0.016078852117061615
Min. Probailities on test set with label 1: 0.07293397188186646
Testing ==> auc: 0.999937, prec: 0.0372, rec: 1.0000, F1score: 0.0717, clsf_loss: 2.0397372679781256e-09
Epoch 11 -------------------------------------------------------------------------
Idx 0 => clsf: 1.9159071898422475e-11
Idx 2000 => clsf: 7.770750015678729e-11
Idx 4000 => clsf: 1.428219088134286e-11
Idx 6000 => clsf: 2.0972025505106018e-10
Training => auc: 0.999985, clsf_pos: 3.6924909636582015e-06, clsf_neg: 1.3725894032479147e-10
Threshold is set to 0.028964009135961533
Min. Probailities on test set with label 1: 0.04810567572712898
Testing ==> auc: 0.999932, prec: 0.0426, rec: 1.0000, F1score: 0.0817, clsf_loss: 2.621581618456048e-09
Epoch 12 -------------------------------------------------------------------------
Idx 0 => clsf: 1.6186394238837387e-11
Idx 2000 => clsf: 2.3966533618802188e-11
Idx 4000 => clsf: 1.9677308393806214e-11
Idx 6000 => clsf: 3.2605609184832574e-11
Training => auc: 0.999998, clsf_pos: 2.456007905493607e-06, clsf_neg: 1.4827362948555134e-10
Threshold is set to 0.08942635357379913
Min. Probailities on test set with label 1: 0.010869729332625866
Testing ==> auc: 0.999902, prec: 0.8889, rec: 0.9302, F1score: 0.9091, clsf_loss: 3.758317657087673e-09
Epoch 13 -------------------------------------------------------------------------
Idx 0 => clsf: 1.9662832126399188e-11
Idx 2000 => clsf: 2.6728515234442085e-11
Idx 4000 => clsf: 1.0870407872454191e-11
Idx 6000 => clsf: 1.0176811650330908e-11
Training => auc: 0.999869, clsf_pos: 4.76427794637857e-06, clsf_neg: 1.0161227415039775e-10
Threshold is set to 0.012571227736771107
Min. Probailities on test set with label 1: 0.06783907860517502
Testing ==> auc: 0.999939, prec: 0.0325, rec: 1.0000, F1score: 0.0630, clsf_loss: 2.882978078488918e-09
Epoch 14 -------------------------------------------------------------------------
Idx 0 => clsf: 1.4055852835814786e-11
Idx 2000 => clsf: 4.490695593162286e-10
Idx 4000 => clsf: 3.6349090404286244e-10
Idx 6000 => clsf: 1.1976087656295764e-11
Training => auc: 0.999992, clsf_pos: 2.9908405849710107e-06, clsf_neg: 1.927581844141102e-10
Threshold is set to 0.05567270889878273
Min. Probailities on test set with label 1: 0.019926929846405983
Testing ==> auc: 0.999919, prec: 0.1405, rec: 0.9767, F1score: 0.2456, clsf_loss: 3.2658402648877427e-09
Epoch 15 -------------------------------------------------------------------------
Idx 0 => clsf: 1.2783750620581902e-11
Idx 2000 => clsf: 1.615479972016942e-11
Idx 4000 => clsf: 4.387665925031925e-11
Idx 6000 => clsf: 1.1835641841595468e-11
Training => auc: 0.999991, clsf_pos: 2.8810395633627195e-06, clsf_neg: 1.0705120123688516e-10
Threshold is set to 0.03907858580350876
Min. Probailities on test set with label 1: 0.019675686955451965
Testing ==> auc: 0.999922, prec: 0.0733, rec: 0.9767, F1score: 0.1364, clsf_loss: 3.1810547529431688e-09
Epoch 16 -------------------------------------------------------------------------
Idx 0 => clsf: 2.623142675295398e-11
Idx 2000 => clsf: 5.304361117008938e-11
Idx 4000 => clsf: 8.207490242995163e-12
Idx 6000 => clsf: 9.826322047712388e-12
Training => auc: 0.999998, clsf_pos: 2.1857701995031675e-06, clsf_neg: 7.998341572390544e-11
Threshold is set to 0.06598863750696182
Min. Probailities on test set with label 1: 0.003597858129069209
Testing ==> auc: 0.999525, prec: 0.9091, rec: 0.9302, F1score: 0.9195, clsf_loss: 5.606712871752961e-09
Epoch 17 -------------------------------------------------------------------------
Idx 0 => clsf: 1.1762273446902505e-11
Idx 2000 => clsf: 9.192134033109145e-12
Idx 4000 => clsf: 9.19030389984199e-12
Idx 6000 => clsf: 1.9899106667997657e-11
Training => auc: 0.999975, clsf_pos: 3.4574179608171107e-06, clsf_neg: 1.8256371414615558e-10
Threshold is set to 0.01627841591835022
Min. Probailities on test set with label 1: 0.005854449234902859
Testing ==> auc: 0.999678, prec: 0.0840, rec: 0.9767, F1score: 0.1547, clsf_loss: 4.9219099906849806e-09
Epoch 18 -------------------------------------------------------------------------
Idx 0 => clsf: 2.2994200293835476e-11
Idx 2000 => clsf: 3.798718839487236e-10
Idx 4000 => clsf: 7.848527383558235e-12
Idx 6000 => clsf: 1.2480525293789846e-11
Training => auc: 0.999994, clsf_pos: 3.0306803182611475e-06, clsf_neg: 1.2851915365263977e-10
Threshold is set to 0.057559721171855927
Min. Probailities on test set with label 1: 0.0342058427631855
Testing ==> auc: 0.999931, prec: 0.3590, rec: 0.9767, F1score: 0.5250, clsf_loss: 3.5859999414356025e-09
Epoch 19 -------------------------------------------------------------------------
Idx 0 => clsf: 2.1808053626837243e-11
Idx 2000 => clsf: 1.1375086636511433e-11
Idx 4000 => clsf: 1.806585783747927e-11
Idx 6000 => clsf: 1.7897584803083788e-10
Training => auc: 0.999990, clsf_pos: 3.134448661512579e-06, clsf_neg: 1.2103909541316682e-10
Threshold is set to 0.034912291914224625
Min. Probailities on test set with label 1: 0.005457509309053421
Testing ==> auc: 0.999842, prec: 0.1843, rec: 0.9302, F1score: 0.3077, clsf_loss: 5.072182673870884e-09
Epoch 20 -------------------------------------------------------------------------
Idx 0 => clsf: 1.3696956763231682e-11
Idx 2000 => clsf: 4.746208981387667e-10
Idx 4000 => clsf: 1.243938459183358e-11
Idx 6000 => clsf: 5.273234973679486e-12
Training => auc: 0.999997, clsf_pos: 2.2446158709499286e-06, clsf_neg: 5.727815791112256e-11
Threshold is set to 0.05452180653810501
Min. Probailities on test set with label 1: 0.003080059075728059
Testing ==> auc: 0.996904, prec: 0.8163, rec: 0.9302, F1score: 0.8696, clsf_loss: 5.4293605167288206e-09
Epoch 21 -------------------------------------------------------------------------
Idx 0 => clsf: 7.842456718754054e-12
Idx 2000 => clsf: 1.1155242528315679e-11
Idx 4000 => clsf: 4.9478988614626296e-12
Idx 6000 => clsf: 8.654909254557364e-12
Training => auc: 0.999985, clsf_pos: 3.848815595119959e-06, clsf_neg: 1.5159962174493558e-10
Threshold is set to 0.020875653252005577
Min. Probailities on test set with label 1: 0.023123309016227722
Testing ==> auc: 0.999934, prec: 0.0570, rec: 1.0000, F1score: 0.1078, clsf_loss: 3.3928269083105533e-09
Epoch 22 -------------------------------------------------------------------------
Idx 0 => clsf: 1.5345659465371142e-10
Idx 2000 => clsf: 5.501275650299231e-12
Idx 4000 => clsf: 9.348277360543555e-12
Idx 6000 => clsf: 9.292738453736682e-12
Training => auc: 0.999989, clsf_pos: 2.7361597858543973e-06, clsf_neg: 7.292741410758197e-11
Threshold is set to 0.029038527980446815
Min. Probailities on test set with label 1: 0.00556844100356102
Testing ==> auc: 0.999881, prec: 0.1522, rec: 0.9767, F1score: 0.2633, clsf_loss: 4.3210821587535975e-09
Epoch 23 -------------------------------------------------------------------------
Idx 0 => clsf: 6.914073046732083e-12
Idx 2000 => clsf: 9.990107699420214e-12
Idx 4000 => clsf: 1.1983554773498106e-11
Idx 6000 => clsf: 1.1166637614579145e-09
Training => auc: 0.999968, clsf_pos: 3.7955039715598105e-06, clsf_neg: 1.330833082624494e-10
Threshold is set to 0.014160431921482086
Min. Probailities on test set with label 1: 0.06285425275564194
Testing ==> auc: 0.999897, prec: 0.0393, rec: 1.0000, F1score: 0.0757, clsf_loss: 2.552049460646799e-09
Epoch 24 -------------------------------------------------------------------------
Idx 0 => clsf: 9.16762759456402e-12
Idx 2000 => clsf: 2.9319695976637306e-11
Idx 4000 => clsf: 5.73566151171323e-12
Idx 6000 => clsf: 7.502823015648197e-12
Training => auc: 0.999987, clsf_pos: 3.128912567262887e-06, clsf_neg: 1.6404451408380538e-10
Threshold is set to 0.039402686059474945
Min. Probailities on test set with label 1: 0.03392123058438301
Testing ==> auc: 0.999922, prec: 0.0649, rec: 0.9767, F1score: 0.1217, clsf_loss: 3.1273605927140125e-09
Epoch 25 -------------------------------------------------------------------------
Idx 0 => clsf: 5.050874649081827e-12
Idx 2000 => clsf: 5.812217027112432e-12
Idx 4000 => clsf: 4.785491447556467e-12
Idx 6000 => clsf: 4.921901861770772e-12
Training => auc: 0.999991, clsf_pos: 2.1945190837868722e-06, clsf_neg: 9.22651954837761e-11
Threshold is set to 0.04655322805047035
Min. Probailities on test set with label 1: 0.006852686870843172
Testing ==> auc: 0.999900, prec: 0.1695, rec: 0.9302, F1score: 0.2867, clsf_loss: 4.127334918280212e-09
Epoch 26 -------------------------------------------------------------------------
Idx 0 => clsf: 2.953482458600831e-10
Idx 2000 => clsf: 5.5948119401239005e-12
Idx 4000 => clsf: 5.517459319287488e-12
Idx 6000 => clsf: 4.7847706699521986e-12
Training => auc: 0.999992, clsf_pos: 3.426252987992484e-06, clsf_neg: 1.2722446707247315e-10
Threshold is set to 0.05697764456272125
Min. Probailities on test set with label 1: 0.04139380529522896
Testing ==> auc: 0.999872, prec: 0.1449, rec: 0.9302, F1score: 0.2508, clsf_loss: 3.327045527967698e-09
Epoch 27 -------------------------------------------------------------------------
Idx 0 => clsf: 4.2121419199792065e-12
Idx 2000 => clsf: 5.48666754390803e-12
Idx 4000 => clsf: 6.069302178890457e-12
Idx 6000 => clsf: 2.6801724901936996e-12
Training => auc: 0.999995, clsf_pos: 2.593475983303506e-06, clsf_neg: 1.0941256234353602e-10
Threshold is set to 0.05497302860021591
Min. Probailities on test set with label 1: 0.009865447878837585
Testing ==> auc: 0.999829, prec: 0.1717, rec: 0.9302, F1score: 0.2899, clsf_loss: 4.911965056919598e-09
Epoch 28 -------------------------------------------------------------------------
Idx 0 => clsf: 5.702323162271039e-12
Idx 2000 => clsf: 6.707787103543694e-12
Idx 4000 => clsf: 2.6709374295608157e-11
Idx 6000 => clsf: 4.207073578399445e-11
Training => auc: 0.999978, clsf_pos: 3.416762183405808e-06, clsf_neg: 1.0317076359900312e-10
Threshold is set to 0.018782800063490868
Min. Probailities on test set with label 1: 0.02162165194749832
Testing ==> auc: 0.999902, prec: 0.0590, rec: 1.0000, F1score: 0.1114, clsf_loss: 3.4409597393647573e-09
Epoch 29 -------------------------------------------------------------------------
Idx 0 => clsf: 8.777535642767731e-11
Idx 2000 => clsf: 3.4373552615374336e-11
Idx 4000 => clsf: 6.3479985157322e-11
Idx 6000 => clsf: 5.350741551224392e-12
Training => auc: 0.999994, clsf_pos: 2.5309675493190298e-06, clsf_neg: 7.236103383156944e-11
Threshold is set to 0.04678452014923096
Min. Probailities on test set with label 1: 0.008769948035478592
Testing ==> auc: 0.999897, prec: 0.1591, rec: 0.9767, F1score: 0.2736, clsf_loss: 3.988388730391534e-09
Epoch 30 -------------------------------------------------------------------------
Idx 0 => clsf: 2.7339825282163277e-12
Idx 2000 => clsf: 4.078090296011361e-12
Idx 4000 => clsf: 5.958975934222677e-12
Idx 6000 => clsf: 2.0843914701890176e-12
Training => auc: 0.999997, clsf_pos: 2.2534729851031443e-06, clsf_neg: 1.3964437939328889e-10
Threshold is set to 0.07935924082994461
Min. Probailities on test set with label 1: 0.006430990528315306
Testing ==> auc: 0.999862, prec: 0.7273, rec: 0.9302, F1score: 0.8163, clsf_loss: 4.7155950255728385e-09
Epoch 31 -------------------------------------------------------------------------
Idx 0 => clsf: 1.7688365962914565e-12
Idx 2000 => clsf: 2.9690527380416e-12
Idx 4000 => clsf: 4.252486904277042e-11
Idx 6000 => clsf: 2.8031249196813768e-12
Training => auc: 0.999993, clsf_pos: 2.3058580609358614e-06, clsf_neg: 4.7196076907729534e-11
Threshold is set to 0.040091872215270996
Min. Probailities on test set with label 1: 0.010271180421113968
Testing ==> auc: 0.999853, prec: 0.0820, rec: 0.9302, F1score: 0.1507, clsf_loss: 4.219815608053068e-09
Epoch 32 -------------------------------------------------------------------------
Idx 0 => clsf: 2.6215791690265e-12
Idx 2000 => clsf: 3.462985757526904e-12
Idx 4000 => clsf: 8.699985870608273e-11
Idx 6000 => clsf: 1.5569846844101787e-12
Training => auc: 0.999996, clsf_pos: 2.2348228867485886e-06, clsf_neg: 1.07841194307845e-10
Threshold is set to 0.06484325975179672
Min. Probailities on test set with label 1: 0.0033310684375464916
Testing ==> auc: 0.999437, prec: 0.1961, rec: 0.9302, F1score: 0.3239, clsf_loss: 5.115790902010531e-09
Epoch 33 -------------------------------------------------------------------------
Idx 0 => clsf: 7.833456973360686e-12
Idx 2000 => clsf: 2.7143396419404553e-09
Idx 4000 => clsf: 2.8771134780170016e-12
Idx 6000 => clsf: 2.0916504205742426e-12
Training => auc: 0.999992, clsf_pos: 2.9837074180250056e-06, clsf_neg: 8.617891122941757e-11
Threshold is set to 0.041820891201496124
Min. Probailities on test set with label 1: 0.007536693941801786
Testing ==> auc: 0.999825, prec: 0.1036, rec: 0.9302, F1score: 0.1865, clsf_loss: 5.3403890198922e-09
Epoch 34 -------------------------------------------------------------------------
Idx 0 => clsf: 7.480131097858944e-11
Idx 2000 => clsf: 3.3165540626323153e-12
Idx 4000 => clsf: 2.702665782144953e-12
Idx 6000 => clsf: 9.562286726327862e-12
Training => auc: 0.999999, clsf_pos: 1.7759648471837863e-06, clsf_neg: 9.133163669794442e-11
Threshold is set to 0.09692412614822388
Min. Probailities on test set with label 1: 0.0043848794884979725
Testing ==> auc: 0.999795, prec: 0.7018, rec: 0.9302, F1score: 0.8000, clsf_loss: 5.972694339106965e-09
Epoch 35 -------------------------------------------------------------------------
Idx 0 => clsf: 4.086367529076984e-12
Idx 2000 => clsf: 2.499957706125766e-11
Idx 4000 => clsf: 5.429280358626443e-10
Idx 6000 => clsf: 5.2497672875517765e-11
Training => auc: 0.999993, clsf_pos: 2.2145218281366397e-06, clsf_neg: 5.818910978061531e-11
Threshold is set to 0.0451289638876915
Min. Probailities on test set with label 1: 0.008911632001399994
Testing ==> auc: 0.999830, prec: 0.1533, rec: 0.9302, F1score: 0.2632, clsf_loss: 5.310671902236663e-09
Epoch 36 -------------------------------------------------------------------------
Idx 0 => clsf: 1.2235316926290096e-10
Idx 2000 => clsf: 4.6513400769887525e-12
Idx 4000 => clsf: 2.4154343889609686e-12
Idx 6000 => clsf: 1.5250466904939697e-12
Training => auc: 0.999998, clsf_pos: 1.7965813867704128e-06, clsf_neg: 4.913838086428868e-11
Threshold is set to 0.06879152357578278
Min. Probailities on test set with label 1: 0.001360047492198646
Testing ==> auc: 0.978942, prec: 0.9091, rec: 0.9302, F1score: 0.9195, clsf_loss: 6.827893805905205e-09
Epoch 37 -------------------------------------------------------------------------
Idx 0 => clsf: 2.9519321015358813e-12
Idx 2000 => clsf: 8.933629010166033e-12
Idx 4000 => clsf: 2.0031190248182007e-12
Idx 6000 => clsf: 1.4920726329817335e-12
Training => auc: 0.999999, clsf_pos: 1.651143179515202e-06, clsf_neg: 5.3793913767918866e-11
Threshold is set to 0.08773916959762573
Min. Probailities on test set with label 1: 0.0021049317438155413
Testing ==> auc: 0.998583, prec: 0.9524, rec: 0.9302, F1score: 0.9412, clsf_loss: 6.077982117602687e-09
'''
|
# -*- coding: utf-8 -*-
import irc3
import base64
__doc__ = '''
===================================================
:mod:`irc3.plugins.sasl` SASL authentification
===================================================
Allow to use sasl authentification
..
>>> from irc3.testing import IrcBot
>>> from irc3.testing import ini2config
Usage::
>>> config = ini2config("""
... [bot]
... sasl_username = irc3
... sasl_password = passwd
... """)
>>> bot = IrcBot(**config)
'''
@irc3.plugin
class Sasl:
def __init__(self, bot):
self.bot = bot
self.events = [
irc3.event(r'^:\S+ CAP \S+ LS :(?P<data>.*)', self.cap_ls),
irc3.event(r'^:\S+ CAP \S+ ACK :.*sasl.*', self.cap_ack),
irc3.event(r'AUTHENTICATE +', self.authenticate),
irc3.event(r'^:\S+ 903 \S+ :SASL authentication successful',
self.cap_end),
]
def connection_ready(self, *args, **kwargs):
self.bot.send('CAP LS\r\n')
self.bot.attach_events(*self.events)
def cap_ls(self, data=None, **kwargs):
if 'sasl' in data.lower():
self.bot.send_line('CAP REQ :sasl')
else:
self.cap_end()
def cap_ack(self, **kwargs):
self.bot.send_line('AUTHENTICATE PLAIN')
def authenticate(self, **kwargs):
auth = ('{sasl_username}\0'
'{sasl_username}\0'
'{sasl_password}').format(**self.bot.config)
auth = base64.encodebytes(auth.encode('utf8'))
auth = auth.decode('utf8').rstrip('\n')
self.bot.send_line('AUTHENTICATE ' + auth)
def cap_end(self, **kwargs):
self.bot.send_line('CAP END')
self.bot.detach_events(*self.events)
|
'''
Author: Vinicius de Figueiredo Marques
++++++++++++++++++++++++++++++++++++++
The objective of this script is to load a XML file representing a database structure with some data.
Then it process this structure and generates SQL instructions to be inserted to a choosen SGBD
'''
from XMLParser import *
from DataBaseFacade import *
from ConfigParser import *
# Load input and database file information
parameters = sys.argv[1:]
if(len(parameters)<2):
print "Error! You must pass two parameters: the first one should be the input file and second one should be a database info(host, user, password, port) json file"
sys.exit(1)
for i in range(len(parameters)):
if i == 0: # input
fileInput = parameters[i]
if i == 1: # database file information
config = ConfigParser(parameters[i])
db = DataBaseFacade(name = DataBaseFacade.MYSQL,host = config.host(), user = config.user(), password = config.password(), port = config.port())
x = XMLParser(fileInput)
x.parse()
x.generate()
tablesInfo = x.getTablesInfo();
tablesData = x.getTablesData();
if db.createDatabase(x.getDatabaseName()):
print "++++ Database Created Sucefully ++++"
if db.createTables(tablesInfo):
print "++++ Tables Created Sucefully ++++"
if db.insertData(tablesData):
print "++++ Data Inserted Sucefully ++++"
db.closeConnection()
|
"""
File: linkedbst.py
Author: Ken Lambert
"""
from abstractcollection import AbstractCollection
from bstnode import BSTNode
from linkedstack import LinkedStack
# from linkedqueue import LinkedQueue
from math import log
class LinkedBST(AbstractCollection):
"""An link-based binary search tree implementation."""
def __init__(self, sourceCollection=None):
"""Sets the initial state of self, which includes the
contents of sourceCollection, if it's present."""
self._root = None
AbstractCollection.__init__(self, sourceCollection)
# Accessor methods
def __str__(self):
"""Returns a string representation with the tree rotated
90 degrees counterclockwise."""
def recurse(node, level):
str_repr = ""
if node != None:
str_repr += recurse(node.right, level + 1)
str_repr += "| " * level
str_repr += str(node.data) + "\n"
str_repr += recurse(node.left, level + 1)
return str_repr
return recurse(self._root, 0)
def __iter__(self):
"""Supports a preorder traversal on a view of self."""
if not self.isEmpty():
stack = LinkedStack()
stack.push(self._root)
while not stack.isEmpty():
node = stack.pop()
yield node.data
if node.right != None:
stack.push(node.right)
if node.left != None:
stack.push(node.left)
def preorder(self):
"""Supports a preorder traversal on a view of self."""
return None
def inorder(self):
"""Supports an inorder traversal on a view of self."""
lyst = list()
def recurse(node):
if node != None:
recurse(node.left)
lyst.append(node.data)
recurse(node.right)
recurse(self._root)
return iter(lyst)
def postorder(self):
"""Supports a postorder traversal on a view of self."""
return None
def levelorder(self):
"""Supports a levelorder traversal on a view of self."""
return None
def __contains__(self, item):
"""Returns True if target is found or False otherwise."""
return self.find(item) != None
def find(self, item):
"""If item matches an item in self, returns the
matched item, or None otherwise."""
def recurse(node):
if node is None:
return None
elif item == node.data:
return node.data
elif item < node.data:
return recurse(node.left)
else:
return recurse(node.right)
return recurse(self._root)
# Mutator methods
def clear(self):
"""Makes self become empty."""
self._root = None
self._size = 0
def add(self, item):
"""Adds item to the tree."""
# Helper function to search for item's position
def recurse(node):
# New item is less, go left until spot is found
if item < node.data:
if node.left == None:
node.left = BSTNode(item)
else:
recurse(node.left)
# New item is greater or equal,
# go right until spot is found
elif node.right == None:
node.right = BSTNode(item)
else:
recurse(node.right)
# End of recurse
# Tree is empty, so new item goes at the root
if self.isEmpty():
self._root = BSTNode(item)
# Otherwise, search for the item's spot
else:
recurse(self._root)
self._size += 1
def remove(self, item):
"""Precondition: item is in self.
Raises: KeyError if item is not in self.
postcondition: item is removed from self."""
if not item in self:
raise KeyError("Item not in tree.""")
# Helper function to adjust placement of an item
def lift_max_in_left_subtree_to_top(top):
# Replace top's datum with the maximum datum in the left subtree
# Pre: top has a left child
# Post: the maximum node in top's left subtree
# has been removed
# Post: top.data = maximum value in top's left subtree
parent = top
current_node = top.left
while not current_node.right == None:
parent = current_node
current_node = current_node.right
top.data = current_node.data
if parent == top:
top.left = current_node.left
# End of recurse
else:
parent.right = current_node.left
# Begin main part of the method
if self.isEmpty(): return None
# Attempt to locate the node containing the item
item_removed = None
pre_root = BSTNode(None)
pre_root.left = self._root
parent = pre_root
direction = 'L'
current_node = self._root
while not current_node == None:
if current_node.data == item:
item_removed = current_node.data
break
parent = current_node
if current_node.data > item:
direction = 'L'
current_node = current_node.left
else:
direction = 'R'
current_node = current_node.right
# Return None if the item is absent
if item_removed == None: return None
# The item is present, so remove its node
# Case 1: The node has a left and a right child
# Replace the node's value with the maximum value in the
# left subtree
# Delete the maximium node in the left subtree
if not current_node.left == None \
and not current_node.right == None:
lift_max_in_left_subtree_to_top(current_node)
else:
# Case 2: The node has no left child
if current_node.left == None:
new_child = current_node.right
# Case 3: The node has no right child
else:
new_child = current_node.left
# Case 2 & 3: Tie the parent to the new child
if direction == 'L':
parent.left = new_child
else:
parent.right = new_child
# All cases: Reset the root (if it hasn't changed no harm done)
# Decrement the collection's size counter
# Return the item
self._size -= 1
if self.isEmpty():
self._root = None
else:
self._root = pre_root.left
return item_removed
def replace(self, item, new_item):
"""
If item is in self, replaces it with new_item and
returns the old item, or returns None otherwise."""
probe = self._root
while probe != None:
if probe.data == item:
old_data = probe.data
probe.data = new_item
return old_data
elif probe.data > item:
probe = probe.left
else:
probe = probe.right
return None
def height(self):
'''
Return the height of tree
:return: int
>>> bst = LinkedBST()
>>> bst.height()
0
>>> bst.add(1)
>>> bst.height()
0
>>> bst.add(2)
>>> bst.height()
1
>>> bst.add(0)
>>> bst.height()
1
>>> bst.add(0)
>>> bst.height()
2
'''
def height1(top):
'''
Helper function
:param top: BSTNode
:return: int
'''
# base case
if top.left is None and top.right is None:
return 0
# recursive cases
maximum = 0
for child in top.left, top.right:
if child is not None:
child_height = height1(child)
if child_height > maximum:
maximum = child_height
return maximum + 1
if self._root is None:
return 0
return height1(self._root)
def is_balanced(self):
'''
Return True if tree is balanced
:return: bool
'''
return self.height() < (2*log(self._size + 1) - 1)
def range_find(self, low, high):
'''
Returns a list of the items in the tree, where low <= item <= high."""
:param low: int
:param high: int
:return: list of items
>>> bst = LinkedBST()
>>> bst.add(1)
>>> bst.add(2)
>>> bst.add(3)
>>> bst.range_find(1, 3)
[1, 2, 3]
>>> bst.rebalance()
>>> bst.range_find(1, 3)
[1, 2, 3]
>>> bst.add(4)
>>> bst.add(5)
>>> bst.range_find(3, 4)
[3, 4]
>>> bst.rebalance()
>>> bst.add(1200)
>>> bst.add(120)
>>> bst.range_find(4, 200)
[4, 5, 120]
'''
def recurse(node, items_range):
if node is not None:
if node.data > low:
recurse(node.left, items_range)
if low <= node.data <= high:
items_range.append(node.data)
if node.data <= high:
recurse(node.right, items_range)
needed_items = []
recurse(self._root, needed_items)
return needed_items
def rebalance(self):
'''
Rebalances the tree.
:return:
>>> bst = LinkedBST()
>>> bst.add(1)
>>> bst.add(2)
>>> bst.add(3)
>>> bst.height()
2
>>> bst.rebalance()
>>> bst.height()
1
>>> bst.add(4)
>>> bst.add(5)
>>> bst.height()
3
>>> bst.rebalance()
>>> bst.height()
2
'''
# helper method for adding elements from sorted list to BST
def recurse(bst, sorted_list, start, end):
middle = (start + end + 1) // 2
bst.add(sorted_list[middle])
if (middle - 1) >= start:
recurse(bst, sorted_list, start, middle - 1)
if end >= (middle + 1):
recurse(bst, sorted_list, middle + 1, end)
lyst = list(self.inorder())
self.clear()
recurse(self, lyst, 0, len(lyst)-1)
def successor(self, item):
"""
Returns the smallest item that is larger than
item, or None if there is no such item.
:param item: the item of which to find the successor
:type item: anything comparable with items of tree
:return: the item from the tree coming after the passed in one
:rtype: anything comparable
>>> bst = LinkedBST()
>>> bst.add(1)
>>> bst.add(2)
>>> bst.add(3)
>>> bst.successor(2)
3
>>> bst.rebalance()
>>> bst.successor(1)
2
>>> bst.add(4)
>>> bst.add(5)
>>> bst.successor(4)
5
>>> bst.rebalance()
>>> bst.successor(3)
4
>>> bst.successor(5) is None
True
>>> bst.successor(-1000)
1
"""
last_left = None
walk = self._root
# finding the needed item
while walk is not None and walk.data != item:
if item < walk.data:
last_left = walk
walk = walk.left
else:
walk = walk.right
if walk is None or walk.right is None:
# the successor is above or doesn't exist in the tree.
if last_left is None:
return None
return last_left.data
# the successor is to the right of the found item
walk = walk.right
while walk.left is not None:
walk = walk.left
return walk.data
def predecessor(self, item):
"""
Returns the largest item that is smaller than
item, or None if there is no such item.
:param item: the item of which to find the predecessor
:type item: anything comparable with items of tree
:return: the item from the tree coming before the passed in one
:rtype: anything comparable
>>> bst = LinkedBST()
>>> bst.add(1)
>>> bst.add(2)
>>> bst.add(3)
>>> bst.predecessor(2)
1
>>> bst.rebalance()
>>> bst.predecessor(3)
2
>>> bst.add(4)
>>> bst.add(5)
>>> bst.predecessor(4)
3
>>> bst.rebalance()
>>> bst.predecessor(3)
2
>>> bst.predecessor(1) is None
True
>>> bst.predecessor(100000)
5
"""
last_right = None
walk = self._root
# finding the needed item
while walk is not None and walk.data != item:
if item < walk.data:
walk = walk.left
else:
last_right = walk
walk = walk.right
if walk is None or walk.left is None:
# the successor is above or doesn't exist in the tree.
if last_right is None:
return None
return last_right.data
# the successor is to the left of the found item
walk = walk.left
while walk.right is not None:
walk = walk.right
return walk.data
def demo_bst(self, path):
"""
Demonstration of efficiency binary search tree for the search tasks.
:param path:
:type path:
:return:
:rtype:
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
|
from django.views.decorators.csrf import csrf_exempt
from django.http import Http404, JsonResponse, HttpResponse
from django.contrib.auth import authenticate, login
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from api.serializers import ExperimentSerializer
from experiments.models import Experiment, Measurement
from sensors.models import Sensor
from rocks.models import Rock
import json, os, time
from django.db import connection
import pandas as pd
import numpy as np
from django.db import transaction
from django.conf import settings
import time, base64
from .tasks import add_experiment_to_db
from .models import APIClient
@csrf_exempt
def upload_chunk(request, checksum):
root_dir = 'media/datasets/'
dir_path = 'media/datasets/'+checksum+"/"
dump_file = dir_path+"dataset.csv"
meta_file = dir_path+"metadata.json"
if request.method=="POST":
auth = request.META['HTTP_AUTHORIZATION'].split()
username, password = base64.b64decode( auth[1]).decode("utf-8").split(':')
user = authenticate(username=username, password=password)
if user is None or user.is_active==False:
return HttpResponse('AUTHENTICATION_FAILED')
if 'metadata' in request.POST:
experiment_meta = json.loads(request.POST["metadata"])
with open(meta_file, "a") as f:
json.dump(experiment_meta, f)
return HttpResponse('METADATA_RECEIVED')
if Experiment.objects.filter(checksum=checksum).exists():
return HttpResponse('DATASET_ALREADY_IN_DB')
if os.path.isdir(root_dir)==False:
os.mkdir(root_dir)
if os.path.isdir(dir_path)==False:
os.mkdir(dir_path)
if 'chunk' in request.POST:
with open(dump_file, "a") as f:
f.write(request.POST['chunk'])
return HttpResponse('CHUNK_RECEIVED')
@csrf_exempt
def addexperiment(request, checksum):
dir_path = 'media/datasets/'+checksum+"/"
dump_file = dir_path+"dataset.csv"
meta_file = dir_path+"metadata.json"
if request.method=="POST":
auth = request.META['HTTP_AUTHORIZATION'].split()
username, password = base64.b64decode( auth[1]).decode("utf-8").split(':')
user = authenticate(username=username, password=password)
if user is None or user.is_active==False:
return HttpResponse('AUTHENTICATION_FAILED')
if Experiment.objects.filter(checksum=checksum).exists():
return HttpResponse('DATASET_ALREADY_IN_DB')
with open(meta_file, "r") as f:
experiment_meta = json.load(f)
sensors_abbrs = list(pd.read_csv(dump_file, nrows=1).columns)
sensors_abbrs.remove('time')
sensors = { sensor['abbreviation']:sensor['id'] for sensor in list(Sensor.objects.filter(abbreviation__in=sensors_abbrs).values('abbreviation', 'id')) }
with open(dump_file, "r") as f:
num_lines = sum(1 for line in f) - 1
experiment_start_unix = int(time.mktime(time.strptime(experiment_meta['start_time'], '%Y-%m-%d %H:%M:%S')))*(10**6)
rock = Rock.objects.get(id=experiment_meta['rock_id'])
experiment = Experiment(start_time = experiment_meta['start_time'], description = experiment_meta['description'], rock_id=rock, checksum=checksum, nr_data_points=num_lines*len(sensors_abbrs) )
experiment.sensors = [ id for id in sensors.values() ]
experiment.save()
add_experiment_to_db.delay(checksum)
return HttpResponse('EXPERIMENT_BEING_ADDED_TO_THE_DB')
@csrf_exempt
def getsensors(request):
if request.method=="GET":
sensors = [ sensor['abbreviation'] for sensor in list(Sensor.objects.values('abbreviation')) ]
return HttpResponse(json.dumps(sensors))
@csrf_exempt
def getrocks(request):
if request.method=="GET":
rocks = [ rock['name'] for rock in list(Rock.objects.values('name')) ]
return HttpResponse(json.dumps(rocks))
@csrf_exempt
def get_initial_data(request):
if request.method=="GET":
auth = request.META['HTTP_AUTHORIZATION'].split()
username, password = base64.b64decode( auth[1]).decode("utf-8").split(':')
user = authenticate(username=username, password=password)
if user is None or user.is_active==False:
return HttpResponse('AUTHENTICATION_FAILED')
sensors = [ sensor for sensor in list(Sensor.objects.values('abbreviation', 'id')) ]
rocks = [ rock for rock in list(Rock.objects.values('name', 'id')) ]
api_latest = APIClient.objects.all().order_by('-date')[0]
return HttpResponse(json.dumps({'rocks':rocks, 'sensors':sensors, 'api_version':api_latest.version}))
def download_api_client(request):
api_latest = APIClient.objects.all().order_by('-date')[0]
file_path = os.path.join(settings.MEDIA_ROOT, str(api_latest.file))
if os.path.exists(file_path):
with open(file_path, 'rb') as f:
response = HttpResponse(f.read(), content_type="application/vnd.ms-excel")
response['Content-Disposition'] = 'inltine; filename='+os.path.basename(file_path)
return response
else:
return HttpResponse("ERROR! File no longer exists in the server")
|
from rest_framework.routers import DefaultRouter
from .views import SnippetViewSet,TestViewSet
router = DefaultRouter()
router.register(r'snippets', SnippetViewSet)
router.register(r'tests', TestViewSet)
urlpatterns = router.urls
# from django.conf.urls import url
# from . import views
#
# urlpatterns = [
# url(r'^snippets/$', views.snippet_list),
# url(r'^snippets/(?P<pk>[0-9]+)/$', views.snippet_detail),
# ]
|
from utils.time_watch import time_watch
class Rotate:
''' 数组循环左移
'''
@classmethod
def fun_1(cls, lst, k):
tmp = lst[:]
print(id(tmp))
print(id(lst))
for i in range(k):
tmp.append(tmp.pop(0))
return tmp
@classmethod
def fun_2(cls, lst, k):
m = k % len(lst)
return lst[m:] + lst[:m]
# 暴力循环
@classmethod
def fun_3(cls, lst, k):
m = k % len(lst)
for i in range(m):
tmp = lst[0]
for j in range(1, len(lst)):
lst[j-1] = lst[j]
lst[-1] = tmp
return lst
@time_watch
def test(s):
res = Rotate.fun_1(s, 17)
return res
if __name__ == '__main__':
s = [1, 4, 53, 32, 2, 13, 22]
res = test(s)
print(res)
|
import pandas as pd
from autumn.core.inputs.database import get_input_db
def get_mmr_testing_numbers():
"""
Returns daily PCR test numbers for Myanmar
"""
input_db = get_input_db()
df = input_db.query(
"covid_mmr",
columns=["date_index", "tests"],
)
df.dropna(how="any", inplace=True)
return pd.Series(df["tests"].to_numpy(), index=df.date_index)
def base_mmr_adult_vacc_doses():
# Slide 5 of Mya Yee Mon's PowerPoint sent on 12th November - applied to the 15+ population only
"""Will move this to inputs db"""
times = [
366, # 1st Jan 2021
393, # 27th Jan
499, # 13th May
522, # 5th June
599, # 21st Aug
606, # 28th Aug
613, # 4th Sept
620, # 11th Sept
627, # 18th Sept
634, # 25th Sept
641, # 2nd Oct
648, # 9th Oct
655, # 16th Oct
662, # 23rd Oct
665, # 26th Oct
670, # 31st Oct
678, # 8th Nov
]
values = [
0,
104865,
1772177,
1840758,
4456857,
4683410,
4860264,
4944654,
5530365,
7205913,
8390746,
9900823,
11223285,
12387573,
12798322,
13244996,
13905795,
]
return times, values
|
#!/usr/bin/env
############################################
# test_vehicle.py
# Author: Paul Yang
# Date: June, 2016
# Brief: this is to show HOWTO of python class, __init__method, accessing instance/class attribute
############################################
from vehicle import Truck, Sportscar, Compactcar
cars = [Truck('Mitush'),
Truck('Orangetruck'),
Sportscar('Z3'),Compactcar('Polo')]
print("--- test abstract methods ---")
for car in cars:
print(car.name + ': ' + car.move())
print("--- test common methods with different object ---")
for car in cars:
print(car.name + ': ' + drive(car))
print("--- test in inherited methods turbo ---")
yaris = Compactcar('Yaris')
yaris.turbo_move()
|
import sys
N=int(input())
a=[[]]*N
sum=0
for i in range(N):
b=input()
x=b.split()
if(len(x)==N):
a[i]=b.split()
else:
print("不符合要求,请重新输入!")
sys.exit()
if(N%2==0):
for j in range(N):
sum=sum+int(a[j][j])+int(a[j][N-j-1])
print(sum)
else:
for j in range(N):
if(j==(N-1)/2):
sum=sum+int(a[j][j])
else:
sum=sum+int(a[j][j])+int(a[j][N-j-1])
print(sum)
|
#encoding:utf-8
from celery import Celery
from WenShuCourtDB_Mongo import WenshucoutMongoDb
from config import master_ip
mydb = WenshucoutMongoDb('WenShuCourt',host=master_ip)
app = Celery('db_tasks', broker='redis://127.0.0.1:6379/0',backend='redis://127.0.0.1:6379/1')
def getCourts():
ret = mydb.getCourts()
return ret
def getParams():
ret = mydb.getParams()
return ret
def getDocids():
ret = mydb.getDocids()
return ret
@app.task
def insertParams(params):
mydb.insertParams(params)
@app.task
def insertParam(param):
mydb.insertParam(param)
@app.task
def updateParamStatus(param):
mydb.updateParamStatus(param)
@app.task
def updateCourtStatus(court_name):
mydb.updateCourtStatus(court_name)
@app.task
def insertCourts(courts):
mydb.insertCourts(courts)
@app.task
def insertCourt(court):
mydb.insertCourt(court)
@app.task
def insertDocids(docids):
mydb.insertDocids(docids)
@app.task
def insertDocid(docid):
mydb.insertDocid(docid)
|
'''
Client module that controls database, telegram client, and face recognition model
'''
import copy
import datetime
import threading
import logging
from pickle import PicklingError, UnpicklingError
from bson.errors import BSONError
from PIL import Image
import numpy as np
from model.recognition_model import FaceRecognition
from utils import file_to_image_array, pack_model, unpack_model, extract_encodings
from utils import add_label_to_image, image_to_file, predict_caption
from utils import predict_reference_note
LOGGER = logging.getLogger(__name__)
class ClientError(Exception):
''' Base client error '''
class ImageFileError(ClientError):
''' Image file related error '''
class PackModelError(ClientError):
''' Pack model error '''
class UnpackModelError(ClientError):
''' Unpack model error '''
class NoModelError(ClientError):
''' No model error '''
class NoBinDataError(ClientError):
''' No bin-data attribute error '''
class CreateCaptionError(ClientError):
''' Error when creating caption '''
class ExtractFaceError(ClientError):
''' Error when extracting face data '''
class LabelNoteFoundError(ClientError):
''' Error when label does not exist '''
class Client:
''' Client object '''
def __init__(self, database=None, telegram=None):
self.database = database
self.telegram = telegram
if telegram:
self.telegram.set_train_handler(self.train_image)
self.telegram.set_predict_handler(self.predict_image)
self.telegram.set_mention_handler(self.mention_label)
self.telegram.set_retrain_handler(self.retrain)
self.model = None
self.model_lock = threading.Lock()
LOGGER.info("Client is initialized")
def start(self):
''' To start client before interactions '''
LOGGER.info("Client starting")
self.telegram.start()
def idle(self):
''' To make client idle '''
LOGGER.info("Client idle")
self.telegram.idle()
def stop(self):
''' To stop client '''
LOGGER.info("Client stopping")
self.telegram.stop()
def retrain(self):
''' Retrain and re-extract after DNN update '''
faces = self.database.get_faces()
if not faces:
return
for face in faces:
image_f = unpack_model(face['image'])
image = file_to_image_array(image_f)
face['face'] = FaceRecognition.get_face_encoding(image).tolist()
self.database.update_face(face)
# Retrieve training set
faces = self.database.get_faces()
x_train = [np.array(x['face']) for x in faces]
y_train = [x['label'] for x in faces]
model = FaceRecognition.train(x_train, y_train)
self.update_model(model, len(faces))
self.database.update_command_counter('retrain')
def mention_label(self, label, note=None):
''' Update label with note '''
# Check if label exist
result = self.database.find_label(label)
if not result:
raise LabelNoteFoundError
if note:
self.database.add_note(label, note)
self.database.update_command_counter('label')
def get_train(self, image_f, label):
''' Retrieve traning set with new image '''
# Read image from file
try:
image = file_to_image_array(image_f)
except IOError:
LOGGER.error("cannot read image for train")
raise ImageFileError
# Extract target face feature
face_encoding = FaceRecognition.get_face_encoding(image)
# Retrieve training set
faces = self.database.get_faces()
x_train = [np.array(x['face']) for x in faces]
y_train = [x['label'] for x in faces]
# Append new face to training set
x_train.append(face_encoding)
y_train.append(label)
return {'x_train': x_train, 'y_train': y_train,
'face': face_encoding, 'label': label}
def update_model(self, model, faces_count):
''' Update model in memory and database '''
# Pack model
try:
model_bin = pack_model(model)
except (PicklingError, BSONError) as exp:
LOGGER.error("cannot pack model, %s", exp)
raise PackModelError
# Update model in memory
self.model_lock.acquire()
self.model = model
self.model_lock.release()
# Update model in database
self.database.add_model({'bin-data': model_bin,
'face_count': faces_count,
'createdAt': datetime.datetime.utcnow()})
self.database.delete_outdated_models()
LOGGER.debug("update and deleted old model")
def train_image(self, image_f, label):
'''
Train the model with the given image
:param image_f: file descriptor of the image
:param label: string label for the image
:return dict
'''
LOGGER.debug("train_image called")
training_set = self.get_train(image_f, label)
# Train the model
model = FaceRecognition.train(training_set['x_train'],
training_set['y_train'])
# Update model
self.update_model(model, len(training_set['x_train']))
# Pack image
try:
image_bin = pack_model(image_f)
except (PicklingError, BSONError) as exp:
LOGGER.error("cannot pack image, %s", exp)
raise PackModelError
self.database.add_faces([{'face': training_set['face'].tolist(
), 'label': training_set['label'], 'image': image_bin}])
self.database.update_command_counter('train')
def get_model(self):
''' Get latest model '''
# Check existing model
self.model_lock.acquire()
if self.model:
model_copy = copy.deepcopy(self.model)
self.model_lock.release()
return model_copy
self.model_lock.release()
# Retrieve model from database
LOGGER.debug("Fetching model")
model_coll = self.database.get_model()
# Check model properties
if model_coll is None:
LOGGER.debug("No model found")
raise NoModelError
if 'bin-data' not in model_coll.keys():
LOGGER.error("No bin-data in model")
raise NoBinDataError
model_bin = model_coll['bin-data']
# Unpack model
self.model_lock.acquire()
try:
self.model = unpack_model(model_bin)
except UnpicklingError:
LOGGER.error("Cannot unpack model")
self.model_lock.release()
raise UnpackModelError
# Release model lock
model_copy = copy.deepcopy(self.model)
self.model_lock.release()
return model_copy
@staticmethod
def handle_predict_result(image, x_locations, predictions):
''' Handle prediction result '''
# Add label to image
LOGGER.debug("Adding label to predicted image")
try:
image = add_label_to_image(image, x_locations, predictions)
except (IOError, TypeError, ValueError) as exp:
LOGGER.error("Cannot add label to image, %s", exp)
raise ImageFileError
# Convert image to file
LOGGER.debug("Converting labelled image to file")
try:
file = image_to_file(image)
except IOError:
LOGGER.error("Cannot convert image to file")
raise ImageFileError
# Create prediction caption
LOGGER.debug("Creating prediction caption")
try:
caption = predict_caption(predictions)
except (TypeError, ValueError):
LOGGER.error("Cannot create caption")
raise CreateCaptionError
return {'file': file, 'caption': caption}
@staticmethod
def extract_encoding_from_image(image_array):
''' Extract face encodings and locations '''
LOGGER.debug("Extracting encodings")
try:
faces_encodings, x_locations = extract_encodings(image_array)
if (not faces_encodings or not x_locations):
raise ExtractFaceError("No faces found")
return {'encodings': faces_encodings, 'locations': x_locations}
except (TypeError, ValueError) as exp:
raise ExtractFaceError(exp)
def predict_image(self, image_f):
'''
Predict label of the image with model
:param image_f: file descriptor of the image
:return dict: image with label, and caption
'''
LOGGER.debug("predict_image called")
# Convert / Open
try:
image = Image.open(image_f)
image_array = file_to_image_array(image_f)
except IOError:
LOGGER.error("cannot read image file")
raise ImageFileError
# Extract encoding
result = Client.extract_encoding_from_image(image_array)
faces_encodings, x_locations = result['encodings'], result['locations']
# Get model
model_copy = self.get_model()
# Predict
LOGGER.debug("Starting prediction")
predictions = FaceRecognition.predict(faces_encodings,
model_copy)
# Process predicted result
result = Client.handle_predict_result(image, x_locations, predictions)
references, notes = predict_reference_note(self.database, predictions)
self.database.update_command_counter('predict')
return {'image': result['file'], 'caption': result['caption'],
'notes': notes, 'references': references}
@staticmethod
def add_images_mock(images, labels, weights=None):
'''
Add image and train model, development mock
:param images: images to train with
:param labels: labels to train with
:param weights: weights of different models
:return model: the trained model
'''
LOGGER.debug("add_images_mock called")
return FaceRecognition.train(images, labels, weights)
@staticmethod
def predict_image_mock(image, model):
'''
Predict label of image with model
:param image: image to predict
:param model: model for prediction
:return list: predicted labels, probability, distance
'''
LOGGER.debug("predict_image_mock called")
return FaceRecognition.predict(image, model)
|
import os
import re
import json
import sys
os.chdir(os.path.dirname(__file__))
sys.path.append("..")
from tool.append_to_json import AppendToJson
def main_run():
os.chdir(os.path.dirname(__file__))
lexicon_name = "lexicon"
input_path = '../../data/knowledge_triple.json'
output_path = '../../data/' + lexicon_name + ".json"
lexicon_path = "../../resource/" + lexicon_name + ".txt"
if os.path.isfile(output_path):
os.remove(output_path)
# os.mkdir(output_path)
print('Start filtering...')
lexicon_list = []
lexicon_lines = open(lexicon_path, 'r', encoding='utf-8').readlines()
for i, line in enumerate(lexicon_lines):
line = line.strip()
lexicon_list.append(line)
print(len(lexicon_list))
triple_list = []
lines = open(input_path, 'r', encoding='utf-8').readlines()
print(len(lines))
for i, line in enumerate(lines):
line = json.loads(line)
#print(line)
arrays = line['关系']
name1 = arrays[0]
relation = arrays[1].replace(':','').replace(':','').replace(' ','').replace(' ','').replace('【','').replace('】','')
name2 = arrays[2]
if relation.strip() == "" or name1.strip() == "" or name2.strip() == "":
continue
triple = name1 + "->" + relation + "->" + name2
if triple not in triple_list:
triple_list.append(triple)
if name1 in lexicon_list and name2 in lexicon_list:
AppendToJson().append(output_path, line)
# print(triple)
else:
# print("[不匹配] - " + triple)
pass
print("filter Ending...")
if __name__ == '__main__':
main_run()
|
# This file test_campigns is created by lincan for Project uuloong-strategy
# on a date of 8/17/16 - 3:01 PM
import unittest
from flask import json
from mongoengine import connect
from manage import app
__author__ = "lincan"
__copyright__ = "Copyright 2016, The uuloong-strategy Project"
__version__ = "0.1"
__maintainer__ = "lincan"
__status__ = "Production"
UniversalHeader = {
"Content-Type": "application/json"
}
class CampaignsTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def tearDown(self):
db = connect('uuloong-strategy')
db.drop_database('uuloong-strategy')
def test_post_campaigns(self):
sample_campaigns = {
"name": "ChartBoost_1",
"supplier": "ChartBoost",
"access_info": {
"key": "13213123213123"
},
"enum": "kCampaignsChartBoost"
}
rv = self.app.post('/api/v1.0/campaigns', data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create game should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
self.assertEqual(data["name"], "ChartBoost_1", "campaigns name should be 'ChartBoost_1'. but got " + str(data["name"]))
self.assertEqual(data["supplier"], "ChartBoost", "campaigns supplier should be 'ChartBoost'. but got " + str(data["name"]))
self.assertEqual(data["enum"], "kCampaignsChartBoost", "campaigns enum should be 'kCampaignsChartBoost'. but got " + str(data["name"]))
self.assertNotEqual(data["id"], "", "id should not be empty")
def test_get_campaigns(self):
sample_campaigns = {
"name": "ChartBoost_1",
"supplier": "ChartBoost",
"access_info": {
"key": "13213123213123"
},
"enum": "kCampaignsChartBoost"
}
rv = self.app.post('/api/v1.0/campaigns', data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
game_id = data["id"]
rv = self.app.get('/api/v1.0/campaigns', data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
self.assertEqual(game_id, data[0]["id"], "since database only have one entry, get game should be same as saved one")
sample_campaigns = {
"name": "ChartBoost_2",
"supplier": "ChartBoost",
"access_info": {
"key": "13213123213123"
},
"enum": "kCampaignsChartBoost"
}
rv = self.app.post('/api/v1.0/campaigns', data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
# get two games
rv = self.app.get('/api/v1.0/campaigns', data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
self.assertEqual(len(data), 2, "database should have two entry")
# get specific game
rv = self.app.get('/api/v1.0/campaigns/' + game_id, data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
self.assertEqual(game_id, data["id"], "id should be campaigns as the first one")
def test_put_campaigns(self):
sample_campaigns = {
"name": "ChartBoost_1",
"supplier": "ChartBoost",
"access_info": {
"key": "13213123213123"
},
"enum": "kCampaignsChartBoost"
}
rv = self.app.post('/api/v1.0/campaigns', data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
game_id = data["id"]
sample_campaigns = {
"name": "ChartBoost_2",
"supplier": "Admob",
"access_info": {
"key": "fadsfadfafdasf"
},
"enum": "kCampaignsAdmob"
}
rv = self.app.put('/api/v1.0/campaigns/' + game_id, data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
self.assertEqual(data["name"], "ChartBoost_2", "campaigns name had changed to 'ChartBoost_2'")
self.assertEqual(data["supplier"], "Admob", "campaigns supplier had changed to 'Admob'")
self.assertEqual(data["enum"], "kCampaignsAdmob", "campaigns enum had changed to 'kCampaignsAdmob'")
self.assertEqual(data["access_info"]["key"], "fadsfadfafdasf", "campaigns access_info -> key had changed to 'fadsfadfafdasf'")
def test_delete_campaigns(self):
sample_campaigns = {
"name": "ChartBoost_1",
"supplier": "ChartBoost",
"access_info": {
"key": "13213123213123"
},
"enum": "kCampaignsChartBoost"
}
rv = self.app.post('/api/v1.0/campaigns', data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
game_id = data["id"]
rv = self.app.delete('/api/v1.0/campaigns/' + game_id, data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
# try to get this game
rv = self.app.get('/api/v1.0/campaigns/' + game_id, data=json.dumps(sample_campaigns), headers=UniversalHeader)
self.assertEqual(rv.status_code, 200, "create campaigns should return 200. but got " + str(rv.status_code))
data = json.loads(rv.data)
self.assertEqual(data.get("id"), None, "id should be same as the first one")
|
import urllib2
import datetime
import time
import random
import os
import csv
import re
from bs4 import BeautifulSoup
seanad_yr_base_address = 'http://oireachtasdebates.oireachtas.ie/debates%20authoring/debateswebpack.nsf/datelist?readform&chamber=seanad&year='
seanad_yr_addresses = {}
for yr in range(1922,2017):
seanad_yr_addresses[yr] = seanad_yr_base_address + str(yr)
## from main year page: all 'opendocument' strings are in a link to a new date's minutes
## also, looks like every month name appears exactly once (unless there are no minutes from that month, eg august sometimes)
## pattern, on the main year page:
## Month
## href link to individual day address, with path after 'oireachtasdebates.oireachtas.ie',
## followed by >DD<
## align='"center"
## next month
## (four center tags before first month)
months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September','October', 'November', 'December']
## function, takes as args:
## month (string, capitalized)
## text for a single year's main page html,
## base url, for the individual dates' paths to be appended to
## returns:
## dict whose keys are the dates (strings of format 'dd') for which there are legislative minutes,
## and whose values are the URLs for a specific date's minutes
def find_one_months_addresses(month, yr_text, base_address):
date_addresses = {}
m_index = yr_text.find(month)
if m_index==-1:
return None
end_index = yr_text[m_index:].find('align="center"') ## this is found at the end of each month's row on the table
href_indices = [h.start() for h in re.finditer('href',yr_text[m_index:m_index+end_index])]
for i in href_indices:
path_begin_index = m_index + i + 6 ## 6 = length of string 'href="'
path_end_index = yr_text[path_begin_index:].find('">')
path = yr_text[path_begin_index:path_begin_index+path_end_index]
dd = yr_text[path_begin_index+path_end_index+2:path_begin_index+path_end_index+4] ## each link ends with >DD<
date_addresses[dd] = [base_address+path]
return date_addresses
## takes a year's main page address, calls on function above, returns a dict of the following form:
## keys: months
## values: keys, representing dates for which there are minutes
## values: individual date URLs
def find_one_years_addresses(yr_address):
yr_page = urllib2.urlopen(yr_address)
yr_soup = BeautifulSoup(yr_page.read(), "html.parser")
yr_txt = str(yr_soup)
addresses_by_month = {}
for m in months:
this_months_addresses = find_one_months_addresses(m, yr_txt, 'http://oireachtasdebates.oireachtas.ie')
addresses_by_month[m] = this_months_addresses
return addresses_by_month
## creating a master dict with all individual date URLs for every year
all_date_addresses = {}
for yr in seanad_yr_addresses.keys():
try:
all_date_addresses[yr] = find_one_years_addresses(seanad_yr_addresses[yr]) ## function defined above
except:
print "Error with find_one_years_addresses() for yr: %s" %(yr)
time.sleep(random.uniform(0,5))
pass
### FINDING NUMBER OF PAGES FOR EACH DATE, appending to that date's list
### (which contains the dat's page1 URL and is stored as a value in the all_date_addresses dict)
for yr in all_date_addresses.keys():
for mo in all_date_addresses[yr].keys():
if all_date_addresses[yr][mo] is None:
continue
for d in all_date_addresses[yr][mo].keys():
try:
date_url = all_date_addresses[yr][mo][d][0]
date_page = urllib2.urlopen(date_url)
date_soup = BeautifulSoup(date_page.read(), "html.parser")
date_txt = str(date_soup)
## number of pages is always found after a </select> tag
select_index = date_txt.find('</select>')
num_index = select_index + len('</select> of ')
endex = date_txt[num_index:].find('\n')
n_pages = date_txt[num_index:num_index+endex]
## for each date, appending the num_pages to the end of a list which previously contained only the URL for the
## first page of that date
all_date_addresses[yr][mo][d].append(n_pages)
except:
print "Error getting page numbers for: %s, %s, %s" %(yr, mo, d)
time.sleep(random.uniform(0,5))
pass
## writing csv
c = open('seanad_single_date_urls.csv', 'wb')
c_writer = csv.writer(c)
c_writer.writerow(["Year", "Month", "Day", "URL", "NumPages"])
for yr in all_date_addresses.keys():
for m in all_date_addresses[yr].keys():
if all_date_addresses[yr][m] is None:
continue
for d in all_date_addresses[yr][m].keys():
try:
c_writer.writerow([yr, m, d, all_date_addresses[yr][m][d][0], all_date_addresses[yr][m][d][1] ])
except:
print "ERROR: couldn't write: %s%s%s" %(yr,m,d)
pass
c.flush()
c.close()
|
# coding:utf-8
import requests, re
from mooc_login import get_cookie
from pyquery import PyQuery as pq
username = "username"
password = "password"
cookie = get_cookie(username, password) # 从mooc_login取回来cookies
s = requests.Session()
def get_course_id():
course_ids = []
postdata= {
"tabIndex": 1,
"searchType": 0,
"schoolcourseType": 0,
"pageIndex": 1
}
course_page = s.post("http://sgu.chinamoocs.com/portal/ajaxMyCourseIndex.mooc", data=postdata, cookies=cookie).text
# 取ajax回来的课程列表 ,下一行是解析
course_page_pqed = pq(course_page)
links = course_page_pqed(".view-shadow")
# 在解析的页面里寻找.view-shadow(就是去学习按钮)
for link in links:
course_ids.append(re.split("index/|\.mooc", link.get("href"))[1])
# 取课程ID
print 'course_ids', course_ids
return course_ids
def get_course_info(course_id):
info_page = pq(s.get("http://sgu.chinamoocs.com/portal/session/unitNavigation/"+course_id+".mooc", cookies=cookie).text)
cells = info_page(".lecture-title")
# 解析课程内容页,并找到每节课的div
for cell in cells: # 这里是取没上的课的item(去掉上完的,去掉习题.)
cell_ele = pq(cell)
# unitid_ele = cell_ele(".unitItem")
if cell_ele(".icon-play-done") != [] or cell_ele(".unitItem").text() == u"练一练 章节练习":
continue
# unitid = unitid_ele.attr("unitid")
itemid = cell_ele(".linkPlay").attr("itemid")
response = s.get("http://sgu.chinamoocs.com/study/updateDurationVideo.mooc?itemId="+itemid+"&isOver=2&duration=700000¤tPosition=700000", cookies=cookie)
# 发送请求,刷课
if response.status_code != 200:
print "GGGG"
if __name__ == '__main__':
nums = get_course_id()
for num in nums:
get_course_info(num)
|
#!/u/shared/programs/x86_64/python/2.5.5/bin/python
# 2013-05-09, tc
#
# Use:
# ./pp.py data.d [lbin=1] [binstart=0]
#
from numpy import loadtxt
from math import sqrt
from sys import argv
if len(argv)<2:
print '*****************************************'
print 'usage: ./pp.py filename [lbin=1] [binstart=0]'
print '*****************************************'
quit()
elif len(argv)<3:
print '[default]: lbin=1, binstart=0'
filename = argv[1]
lbin=1
binstart=0
elif len(argv)<4:
print '[default]: binstart=0'
filename = argv[1]
lbin=int(argv[2])
binstart=0
else:
filename = argv[1]
lbin=int(argv[2])
binstart=int(argv[3])
fileoutput=argv[4]
out = open(fileoutput,'a')
UU=argv[5]
gg=argv[6]
#import data
y=loadtxt(filename)
rows=len(y)
if len(y.shape)>1:
print 'ERROR: only works if each row has the same number of elements'
quit()
#binning
j=0
bin=0
ybins=[]
if(rows%lbin==0): maxj=rows
else: maxj=rows-lbin
while j<maxj:
sum=0
while j<(bin+1)*lbin:
sum+=y[j]
j+=1
ybins.append(sum/lbin*1.)
bin+=1
nbinstot=bin
nbins=nbinstot-binstart
#analysis of the binned data
sum=0
sum2=0
count=0
for i in range(binstart,nbinstot,1):
sum+=ybins[i]
sum2+=ybins[i]**2
count+=1
#check
if(count!=nbins):
print 'ERROR: there are',count,'bins, not',nbins,"\n"
quit()
av=sum/nbins
av2=sum2/nbins
var=av2-av**2
std=sqrt(var/nbins)
#output
#print 'nrows: ',nbinstot*lbin,'+',rows-nbinstot*lbin
#print 'binning: ',nbinstot,'bins, L =',lbin
#print 'starting bin: ',binstart
#print 'n bins: ',nbins
#print 'average: ',av
#print 'std (av): ',std
out.write("%s " % UU)
out.write("%s " % gg)
out.write("%s " % av)
out.write("%s\n" % std)
print av
quit()
#plot
#pyplot.figure()
#pyplot.plot(ybins)
#pyplot.axhline(av,linewidth=1.5,color='r')
#pyplot.axhline(av+std,linewidth=1,color='r')
#pyplot.axhline(av-std,linewidth=1,color='r')
#pyplot.draw()
#pyplot.show()
#
|
f = open('lab3-2.txt', 'r')
def push(val):
global top
top += 1
stack[top] = val
def pop():
global top
fin = top
top -= 1
return stack[fin]
def isEmpty():
global top
return top == -1
def isFull():
global top
return top == len(stack)
if __name__ == "__main__":
a = f.read()
top = -1
stack = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
print("check stack full: " + str(isFull()))
print("check stack empty: " + str(isEmpty()))
print()
for i in range(len(a.split())):
data = a.split()[i]
j = 0
if (len(data) % 2 == 0):
while j < (len(data) // 2):
push(data[j])
j += 1
else:
while j < (len(data) // 2):
push(data[j])
j += 1
j += 1
while j < len(data):
if (pop() == data[j]):
j += 1
else:
print(data + " Error")
break
if j == len(data):
print(data + " PALINDROME")
|
import pygame
pygame.init()
screen = pygame.display.set_mode((320, 470))
color = (88, 89, 90)
color_light = (208, 0, 147)
color_text = (255, 255, 255)
color_dark = (174, 0, 255)
width = 500
height = 500
color_blue = (85, 182, 217)
color_orange = (255, 110, 63)
color_red = (255, 0, 0)
screen.fill((50, 50, 50))
smallfont = pygame.font.Font('Moderat-Black.ttf', 35)
text1 = smallfont.render('0', True, color_text)
text2 = smallfont.render('.', True, color_text)
text3 = smallfont.render('=', True, color_text)
text4 = smallfont.render('/', True, color_text)
text5 = smallfont.render('1', True, color_text)
text6 = smallfont.render('2', True, color_text)
text7 = smallfont.render('3', True, color_text)
text8 = smallfont.render('x', True, color_text)
text9 = smallfont.render('4', True, color_text)
text10 = smallfont.render('5', True, color_text)
text11 = smallfont.render('6', True, color_text)
text12 = smallfont.render('-', True, color_text)
text13 = smallfont.render('7', True, color_text)
text14 = smallfont.render('8', True, color_text)
text15 = smallfont.render('9', True, color_text)
text16 = smallfont.render('+', True, color_text)
textclear = smallfont.render('Clear', True, color_text)
calcScreen = pygame.draw.rect(screen, color, [20, 20, 280, 75])
# First Row Buttons
button7 = pygame.draw.rect(screen, color, [20, 120, 60, 50])
button8 = pygame.draw.rect(screen, color, [94, 120, 60, 50])
button9 = pygame.draw.rect(screen, color, [167.5, 120, 60, 50])
buttonAddition = pygame.draw.rect(screen, color_orange, [240, 120, 60, 50])
# Second Row Buttons
button4 = pygame.draw.rect(screen, color, [20, 190, 60, 50])
button5 = pygame.draw.rect(screen, color, [94, 190, 60, 50])
button6 = pygame.draw.rect(screen, color, [167.5, 190, 60, 50])
buttonSubtraction = pygame.draw.rect(screen, color_orange, [240, 190, 60, 50])
# Third Row Buttons
button1 = pygame.draw.rect(screen, color, [20, 260, 60, 50])
button2 = pygame.draw.rect(screen, color, [94, 260, 60, 50])
button3 = pygame.draw.rect(screen, color, [167.5, 260, 60, 50])
buttonMultiply = pygame.draw.rect(screen, color_orange, [240, 260, 60, 50])
# Fourth Row Buttons
button0 = pygame.draw.rect(screen, color, [20, 330, 60, 50])
buttonDecimal = pygame.draw.rect(screen, color_blue, [94, 330, 60, 50])
buttonEqual = pygame.draw.rect(screen, color_blue, [167.5, 330, 60, 50])
buttonDivide = pygame.draw.rect(screen, color_orange, [240, 330, 60, 50])
# Clear Button
buttonClear = pygame.draw.rect(screen, color_red, [20, 400, 282, 50])
#First Row Text
screen.blit(text1, (20 + 20, 330 + 3))
screen.blit(text2, (100 + 20, 330 + 3))
screen.blit(text3, (167.5 + 20, 330 + 3))
screen.blit(text4, (240 + 20, 330 + 3))
#Second Row Text
screen.blit(text5, (20 + 20, 260 + 3))
screen.blit(text6, (94 + 20, 260 + 3))
screen.blit(text7, (167.5 + 20, 260 + 3))
screen.blit(text8, (240 + 20, 260 + 3))
#Third Row Text
screen.blit(text9, (20 + 20, 190 + 3))
screen.blit(text10, (94 + 20, 190 + 3))
screen.blit(text11, (167.5 + 20, 190 + 3))
screen.blit(text12, (240 + 20, 190 + 3))
#Fourth Row Text
screen.blit(text13, (20 + 20, 120 + 3))
screen.blit(text14, (94 + 20, 120 + 3))
screen.blit(text15, (167.5 + 20, 120 + 3))
screen.blit(text16, (240 + 20, 120 + 3))
#Clear Text
screen.blit(textclear, (20 + 95, 282 + 120))
# GAME LOOP
answer = ""
blank = ""
while True:
textanswer = smallfont.render(answer, True, color_text)
screen.blit(textanswer, (20 + 15, 33 + 3))
for ev in pygame.event.get():
if ev.type == pygame.QUIT:
pygame.quit()
if ev.type == pygame.MOUSEBUTTONDOWN:
if 20 <= mouse[0] <= 80 and 120 <= mouse[1] <= 170:
answer+="7"
elif 94 <= mouse[0] <= 154 and 120 <= mouse[1] <= 170:
answer += "8"
elif 167.5 <= mouse[0] <= 227.5 and 120 <= mouse[1] <= 170:
answer += "9"
elif 240 <= mouse[0] <= 300 and 120 <= mouse[1] <= 170:
answer += "+"
elif 20 <= mouse[0] <= 80 and 190 <= mouse[1] <= 240:
answer += "4"
elif 94 <= mouse[0] <= 154 and 190 <= mouse[1] <= 240:
answer += "5"
elif 167.5 <= mouse[0] <= 227.5 and 190 <= mouse[1] <= 240:
answer += "6"
elif 240 <= mouse[0] <= 300 and 190 <= mouse[1] <= 240:
answer += "-"
elif 20 <= mouse[0] <= 80 and 260 <= mouse[1] <= 310:
answer += "1"
elif 94 <= mouse[0] <= 154 and 260 <= mouse[1] <= 310:
answer += "2"
elif 167.5 <= mouse[0] <= 227.5 and 260 <= mouse[1] <= 310:
answer += "3"
elif 240 <= mouse[0] <= 300 and 260 <= mouse[1] <= 310:
answer += "*"
elif 20 <= mouse[0] <= 80 and 330 <= mouse[1] <= 380:
answer += "0"
elif 94 <= mouse[0] <= 154 and 330 <= mouse[1] <= 380:
answer += "."
elif 167.5 <= mouse[0] <= 227.5 and 330 <= mouse[1] <= 380:
print ("Equal")
calcScreen1 = pygame.draw.rect(screen, color, [20, 20, 280, 75])
answer = str(eval(answer))
elif 240 <= mouse[0] <= 300 and 330 <= mouse[1] <= 380:
answer += "/"
elif 20 <= mouse[0] <= 302 and 400 <= mouse[1] <= 450:
calcScreen2 = pygame.draw.rect(screen, color, [20, 20, 280, 75])
answer = ""
mouse = pygame.mouse.get_pos()
pygame.display.update()
|
import base64
import json
import Queue
import ssl
import time
import urllib
import urllib2
import uuid
import random
import string
from threading import Thread
from time import sleep
from concurrent.futures import ThreadPoolExecutor
from hawkeye_test_runner import (HawkeyeTestCase, HawkeyeTestSuite,
DeprecatedHawkeyeTestCase)
from hawkeye_utils import HawkeyeConstants
__author__ = 'hiranya'
ALL_PROJECTS = {}
SYNAPSE_MODULES = {}
def clear_kind(requests, kind):
entities = requests.get(
'/{{lang}}/datastore/kind_query?kind={}'.format(kind)).json()
paths = [entity['path'] for entity in entities]
for path in paths:
encoded_path = base64.urlsafe_b64encode(json.dumps(path))
requests.delete('/{{lang}}/datastore/manage_entity'
'?pathBase64={}'.format(encoded_path))
class DataStoreCleanupTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
response = self.http_delete('/datastore/module')
self.assertEquals(response.status, 200)
response = self.http_delete('/datastore/project')
self.assertEquals(response.status, 200)
response = self.http_delete('/datastore/transactions')
self.assertEquals(response.status, 200)
class SimpleKindAwareInsertTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
response = self.http_post('/datastore/project',
'name={0}&description=Mediation Engine&rating=8&license=L1'.format(
HawkeyeConstants.PROJECT_SYNAPSE))
project_info = json.loads(response.payload)
self.assertEquals(response.status, 201)
self.assertTrue(project_info['success'])
project_id = project_info['project_id']
self.assertTrue(project_id is not None)
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE] = project_id
response = self.http_post('/datastore/project',
'name={0}&description=XML Parser&rating=6&license=L1'.format(
HawkeyeConstants.PROJECT_XERCES))
project_info = json.loads(response.payload)
self.assertEquals(response.status, 201)
self.assertTrue(project_info['success'])
project_id = project_info['project_id']
self.assertTrue(project_id is not None)
ALL_PROJECTS[HawkeyeConstants.PROJECT_XERCES] = project_id
response = self.http_post('/datastore/project',
'name={0}&description=MapReduce Framework&rating=10&license=L2'.format(
HawkeyeConstants.PROJECT_HADOOP))
project_info = json.loads(response.payload)
self.assertEquals(response.status, 201)
self.assertTrue(project_info['success'])
project_id = project_info['project_id']
self.assertTrue(project_id is not None)
ALL_PROJECTS[HawkeyeConstants.PROJECT_HADOOP] = project_id
# Allow some time to eventual consistency to run its course
sleep(5)
class KindAwareInsertWithParentTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
response = self.http_post('/datastore/module',
'name={0}&description=A Mediation Core&project_id={1}'.format(
HawkeyeConstants.MOD_CORE,
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
mod_info = json.loads(response.payload)
self.assertEquals(response.status, 201)
self.assertTrue(mod_info['success'])
module_id = mod_info['module_id']
self.assertTrue(module_id is not None)
SYNAPSE_MODULES[HawkeyeConstants.MOD_CORE] = module_id
response = self.http_post('/datastore/module',
'name={0}&description=Z NIO HTTP transport&project_id={1}'.format(
HawkeyeConstants.MOD_NHTTP,
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
mod_info = json.loads(response.payload)
self.assertEquals(response.status, 201)
self.assertTrue(mod_info['success'])
module_id = mod_info['module_id']
self.assertTrue(module_id is not None)
SYNAPSE_MODULES[HawkeyeConstants.MOD_NHTTP] = module_id
class SimpleKindAwareQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
project_list = self.assert_and_get_list('/datastore/project')
for entry in project_list:
response = self.http_get('/datastore/project?id={0}'.
format(entry['project_id']))
entity_list = json.loads(response.payload)
project_info = entity_list[0]
self.assertEquals(len(entity_list), 1)
self.assertEquals(project_info['name'], entry['name'])
module_list = self.assert_and_get_list('/datastore/module')
for entry in module_list:
response = self.http_get('/datastore/module?id={0}'.
format(entry['module_id']))
entity_list = json.loads(response.payload)
mod_info = entity_list[0]
self.assertEquals(len(entity_list), 1)
self.assertEquals(mod_info['name'], entry['name'])
class ZigZagQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
response = self.http_get('/datastore/zigzag')
self.assertEquals(response.status, 200)
class AncestorQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list('/datastore/project_modules?' \
'project_id={0}'.format(ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
modules = []
for entity in entity_list:
if entity['type'] == 'module':
modules.append(entity['name'])
self.assertEquals(len(modules), 2)
self.assertTrue(modules.index(HawkeyeConstants.MOD_CORE) != -1)
self.assertTrue(modules.index(HawkeyeConstants.MOD_NHTTP) != -1)
class OrderedKindAncestorQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list('/datastore/project_modules?' \
'project_id={0}&order=module_id'.format(\
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
modules = []
for entity in entity_list:
if entity['type'] == 'module':
modules.append(entity['name'])
entity_list = self.assert_and_get_list('/datastore/project_modules?' \
'project_id={0}&order=description'.format(\
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
modules = []
for entity in entity_list:
if entity['type'] == 'module':
modules.append(entity['name'])
entity_list = self.assert_and_get_list('/datastore/project_modules?' \
'project_id={0}&order=name'.format(\
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
modules = []
for entity in entity_list:
if entity['type'] == 'module':
modules.append(entity['name'])
self.assertEquals(len(modules), 2)
self.assertTrue(modules.index(HawkeyeConstants.MOD_CORE) != -1)
self.assertTrue(modules.index(HawkeyeConstants.MOD_NHTTP) != -1)
class KindlessQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list(
'/datastore/project_keys?comparator=gt&project_id={0}'.format(
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
self.assertTrue(len(entity_list) == 3 or len(entity_list) == 4)
for entity in entity_list:
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_SYNAPSE)
entity_list = self.assert_and_get_list(
'/datastore/project_keys?comparator=ge&project_id={0}'.format(
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
self.assertTrue(len(entity_list) == 4 or len(entity_list) == 5)
project_seen = False
for entity in entity_list:
if entity['name'] == HawkeyeConstants.PROJECT_SYNAPSE:
project_seen = True
break
self.assertTrue(project_seen)
class KindlessAncestorQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list(
'/datastore/project_keys?ancestor=true&comparator=gt&project_id={0}'.
format(ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
self.assertTrue(len(entity_list) == 1 or len(entity_list) == 2)
for entity in entity_list:
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_SYNAPSE)
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_XERCES)
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_HADOOP)
entity_list = self.assert_and_get_list(
'/datastore/project_keys?ancestor=true&comparator=ge&project_id={0}'.
format(ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE]))
self.assertTrue(len(entity_list) == 2 or len(entity_list) == 3)
project_seen = False
for entity in entity_list:
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_XERCES)
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_HADOOP)
if entity['name'] == 'Synapse':
project_seen = True
break
self.assertTrue(project_seen)
class QueryByKeyNameTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
response = self.http_get('/datastore/entity_names?project_name={0}'.
format(HawkeyeConstants.PROJECT_SYNAPSE))
entity = json.loads(response.payload)
self.assertEquals(entity['project_id'],
ALL_PROJECTS[HawkeyeConstants.PROJECT_SYNAPSE])
response = self.http_get('/datastore/entity_names?project_name={0}&' \
'module_name={1}'.
format(HawkeyeConstants.PROJECT_SYNAPSE, HawkeyeConstants.MOD_CORE))
entity = json.loads(response.payload)
self.assertEquals(entity['module_id'],
SYNAPSE_MODULES[HawkeyeConstants.MOD_CORE])
class SinglePropertyBasedQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list('/datastore/project_ratings?'
'rating=10&comparator=eq')
self.assertEquals(len(entity_list), 1)
self.assertEquals(entity_list[0]['name'], HawkeyeConstants.PROJECT_HADOOP)
entity_list = self.assert_and_get_list('/datastore/project_ratings?'
'rating=6&comparator=gt')
self.assertEquals(len(entity_list), 2)
for entity in entity_list:
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_XERCES)
entity_list = self.assert_and_get_list('/datastore/project_ratings?'
'rating=6&comparator=ge')
self.assertEquals(len(entity_list), 3)
entity_list = self.assert_and_get_list('/datastore/project_ratings?'
'rating=8&comparator=lt')
self.assertEquals(len(entity_list), 1)
self.assertEquals(entity_list[0]['name'], HawkeyeConstants.PROJECT_XERCES)
entity_list = self.assert_and_get_list('/datastore/project_ratings?'
'rating=8&comparator=le')
self.assertEquals(len(entity_list), 2)
for entity in entity_list:
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_HADOOP)
entity_list = self.assert_and_get_list('/datastore/project_ratings?'
'rating=8&comparator=ne')
self.assertEquals(len(entity_list), 2)
for entity in entity_list:
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_SYNAPSE)
try:
self.assert_and_get_list('/datastore/project_ratings?'
'rating=5&comparator=le')
self.fail('Returned an unexpected result')
except AssertionError:
pass
class OrderedResultQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list('/datastore/project_ratings?'
'rating=6&comparator=ge&desc=true')
self.assertEquals(len(entity_list), 3)
last_rating = 100
for entity in entity_list:
self.assertTrue(entity['rating'], last_rating)
last_rating = entity['rating']
class LimitedResultQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list('/datastore/project_ratings?'
'rating=6&comparator=ge&limit=2')
self.assertEquals(len(entity_list), 2)
entity_list = self.assert_and_get_list('/datastore/project_ratings?rating=6'
'&comparator=ge&limit=2&desc=true')
self.assertEquals(len(entity_list), 2)
last_rating = 100
for entity in entity_list:
self.assertTrue(entity['rating'], last_rating)
last_rating = entity['rating']
class ProjectionQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list('/datastore/project_fields?'
'fields=project_id,name')
self.assertEquals(len(entity_list), 3)
for entity in entity_list:
self.assertTrue(entity.get('rating') is None)
self.assertTrue(entity.get('description') is None)
self.assertTrue(entity['project_id'] is not None)
self.assertTrue(entity['name'] is not None)
entity_list = self.assert_and_get_list('/datastore/project_fields?'
'fields=name,rating&rate_limit=8')
self.assertEquals(len(entity_list), 2)
for entity in entity_list:
self.assertTrue(entity['rating'] is not None)
self.assertTrue(entity.get('description') is None)
self.assertTrue(entity.get('project_id') is None)
self.assertTrue(entity['name'] is not None)
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_XERCES)
class GQLProjectionQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list('/datastore/project_fields?'
'fields=name,rating&gql=true')
self.assertEquals(len(entity_list), 3)
for entity in entity_list:
self.assertTrue(entity['rating'] is not None)
self.assertTrue(entity.get('description') is None)
self.assertTrue(entity.get('project_id') is None)
self.assertTrue(entity['name'] is not None)
class CompositeQueryTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
entity_list = self.assert_and_get_list('/datastore/project_filter?'
'license=L1&rate_limit=5')
self.assertEquals(len(entity_list), 2)
for entity in entity_list:
self.assertNotEquals(entity['name'], HawkeyeConstants.PROJECT_HADOOP)
entity_list = self.assert_and_get_list('/datastore/project_filter?'
'license=L1&rate_limit=8')
self.assertEquals(len(entity_list), 1)
self.assertEquals(entity_list[0]['name'], HawkeyeConstants.PROJECT_SYNAPSE)
entity_list = self.assert_and_get_list('/datastore/project_filter?'
'license=L2&rate_limit=5')
self.assertEquals(len(entity_list), 1)
self.assertEquals(entity_list[0]['name'], HawkeyeConstants.PROJECT_HADOOP)
class SimpleTransactionTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
key = str(uuid.uuid1())
response = self.http_get('/datastore/transactions?' \
'key={0}&amount=1'.format(key))
entity = json.loads(response.payload)
self.assertTrue(entity['success'])
self.assertEquals(entity['counter'], 1)
response = self.http_get('/datastore/transactions?' \
'key={0}&amount=1'.format(key))
entity = json.loads(response.payload)
self.assertTrue(entity['success'])
self.assertEquals(entity['counter'], 2)
response = self.http_get('/datastore/transactions?' \
'key={0}&amount=3'.format(key))
entity = json.loads(response.payload)
self.assertFalse(entity['success'])
self.assertEquals(entity['counter'], 2)
class CrossGroupTransactionTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
key = str(uuid.uuid1())
response = self.http_get('/datastore/transactions?' \
'key={0}&amount=1&xg=true'.format(key))
entity = json.loads(response.payload)
self.assertTrue(entity['success'])
self.assertEquals(entity['counter'], 1)
self.assertEquals(entity['backup'], 1)
response = self.http_get('/datastore/transactions?' \
'key={0}&amount=1&xg=true'.format(key))
entity = json.loads(response.payload)
self.assertTrue(entity['success'])
self.assertEquals(entity['counter'], 2)
self.assertEquals(entity['backup'], 2)
response = self.http_get('/datastore/transactions?' \
'key={0}&amount=3&xg=true'.format(key))
entity = json.loads(response.payload)
self.assertFalse(entity['success'])
self.assertEquals(entity['counter'], 2)
self.assertEquals(entity['backup'], 2)
class QueryCursorTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
project1 = self.assert_and_get_list('/datastore/project_cursor')
project2 = self.assert_and_get_list('/datastore/project_cursor?' \
'cursor={0}'.format(project1['next']))
project3 = self.assert_and_get_list('/datastore/project_cursor?' \
'cursor={0}'.format(project2['next']))
projects = [ project1['project'], project2['project'], project3['project'] ]
self.assertTrue(HawkeyeConstants.PROJECT_SYNAPSE in projects)
self.assertTrue(HawkeyeConstants.PROJECT_XERCES in projects)
self.assertTrue(HawkeyeConstants.PROJECT_HADOOP in projects)
project4 = self.assert_and_get_list('/datastore/project_cursor?' \
'cursor={0}'.format(project3['next']))
self.assertTrue(project4['project'] is None)
self.assertTrue(project4['next'] is None)
class JDOIntegrationTest(DeprecatedHawkeyeTestCase):
def run_hawkeye_test(self):
response = self.http_put('/datastore/jdo_project',
'name=Cassandra&rating=10')
self.assertEquals(response.status, 201)
project_info = json.loads(response.payload)
self.assertTrue(project_info['success'])
project_id = project_info['project_id']
response = self.http_get('/datastore/jdo_project?project_id=' + project_id)
self.assertEquals(response.status, 200)
project_info = json.loads(response.payload)
self.assertEquals(project_info['name'], 'Cassandra')
self.assertEquals(project_info['rating'], 10)
response = self.http_post('/datastore/jdo_project',
'proj
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.