hexsha
stringlengths
40
40
size
int64
5
2.06M
ext
stringclasses
11 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
251
max_stars_repo_name
stringlengths
4
130
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
251
max_issues_repo_name
stringlengths
4
130
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
251
max_forks_repo_name
stringlengths
4
130
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
1
1.05M
avg_line_length
float64
1
1.02M
max_line_length
int64
3
1.04M
alphanum_fraction
float64
0
1
7252008c26b1662083a1400694c806c34e33ed67
910
py
Python
graviteeio_cli/lint/functions/length.py
gravitee-io/gravitee-cli
8e3bf9f2c0c2873e0f6e67f8fcaf0d3b6c44b3ca
[ "Apache-2.0" ]
12
2019-05-29T20:06:01.000Z
2020-10-07T07:40:27.000Z
graviteeio_cli/lint/functions/length.py
gravitee-io/graviteeio-cli
0e0069b00ce40813efc7d40142a6dc4b4ec7a261
[ "Apache-2.0" ]
41
2019-11-04T18:18:18.000Z
2021-04-22T16:12:51.000Z
graviteeio_cli/lint/functions/length.py
gravitee-io/gravitee-cli
8e3bf9f2c0c2873e0f6e67f8fcaf0d3b6c44b3ca
[ "Apache-2.0" ]
6
2019-06-18T04:27:49.000Z
2021-06-02T17:52:24.000Z
from graviteeio_cli.lint.types.function_result import FunctionResult def length(value, **kwargs): """Count the length of a string an or array, the number of properties in an object, or a numeric value, and define minimum and/or maximum values.""" min = None max = None if "min" in kwargs and type(kwargs["min"]) is int: min = kwargs["min"] if "max" in kwargs and type(kwargs["max"]) is int: max = kwargs["max"] value_length = 0 if value: if type(value) is (int or float): value_length = value else: value_length = len(value) results = [] if min and value_length < min: results.append( FunctionResult("min length is {}".format(min)) ) if max and value_length > max: results.append( FunctionResult("max length is {}".format(max)) ) return results
26
152
0.597802
a0c60f619b683347cb7cc9f4f6e9936af96f0dbd
27,874
py
Python
smartrecruiters_python_client/apis/analytics_api.py
roksela/smartrecruiters-python-client
6d0849d173a3d6718b5f0769098f4c76857f637d
[ "MIT" ]
5
2018-03-27T08:20:13.000Z
2022-03-30T06:23:38.000Z
smartrecruiters_python_client/apis/analytics_api.py
roksela/smartrecruiters-python-client
6d0849d173a3d6718b5f0769098f4c76857f637d
[ "MIT" ]
null
null
null
smartrecruiters_python_client/apis/analytics_api.py
roksela/smartrecruiters-python-client
6d0849d173a3d6718b5f0769098f4c76857f637d
[ "MIT" ]
2
2018-12-05T04:48:37.000Z
2020-12-17T12:12:12.000Z
# coding: utf-8 """ Unofficial python library for the SmartRecruiters API The SmartRecruiters API provides a platform to integrate services or applications, build apps and create fully customizable career sites. It exposes SmartRecruiters functionality and allows to connect and build software enhancing it. OpenAPI spec version: 1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient
44.10443
280
0.583052
a0c68d4449b586355649b08e113c775fd8d862f6
398
py
Python
Timofei-Khirianov-2019/lesson_001/003_anketa.py
anklav24/Python-Education
49ebcfabda1376390ee71e1fe321a51e33831f9e
[ "Apache-2.0" ]
null
null
null
Timofei-Khirianov-2019/lesson_001/003_anketa.py
anklav24/Python-Education
49ebcfabda1376390ee71e1fe321a51e33831f9e
[ "Apache-2.0" ]
null
null
null
Timofei-Khirianov-2019/lesson_001/003_anketa.py
anklav24/Python-Education
49ebcfabda1376390ee71e1fe321a51e33831f9e
[ "Apache-2.0" ]
null
null
null
name = input('Hello! What is your name? : ') print('Nice to meet you,', name + '!') print() age = int(input('How old are you ' + name + '? : ')) print() x = age + 1 print(' ', x, end=' ') if x >= 11 and x <= 19: print('', end='') elif x % 10 == 1: print('', end='') elif x % 10 >= 2 and x % 10 <= 4: print('', end='') else: print('', end='') print('!')
19.9
52
0.502513
a0c69fd6e11617fc5f9eb586f7c2029856d0877b
2,399
py
Python
Technical_Indicators/rainbow_charts.py
vhn0912/Finance
39cf49d4d778d322537531cee4ce3981cc9951f9
[ "MIT" ]
441
2020-04-22T02:21:19.000Z
2022-03-29T15:00:24.000Z
Technical_Indicators/rainbow_charts.py
happydasch/Finance
4f6c5ea8f60fb0dc3b965ffb9628df83c2ecef35
[ "MIT" ]
5
2020-07-06T15:19:58.000Z
2021-07-23T18:32:29.000Z
Technical_Indicators/rainbow_charts.py
happydasch/Finance
4f6c5ea8f60fb0dc3b965ffb9628df83c2ecef35
[ "MIT" ]
111
2020-04-21T11:40:39.000Z
2022-03-20T07:26:17.000Z
import numpy as np import pandas as pd import matplotlib.pyplot as plt import warnings warnings.filterwarnings("ignore") import yfinance as yf yf.pdr_override() import datetime as dt # input symbol = 'AAPL' start = dt.date.today() - dt.timedelta(days = 365*2) end = dt.date.today() # Read data df = yf.download(symbol,start,end) # R=red, O=orange, Y=yellow, G=green, B=blue, I = indigo, and V=violet df['Red'] = df['Adj Close'].rolling(2).mean() df['Orange'] = df['Red'].rolling(2).mean() df['Yellow'] = df['Orange'].rolling(2).mean() df['Green'] = df['Yellow'].rolling(2).mean() df['Blue'] = df['Green'].rolling(2).mean() df['Indigo'] = df['Blue'].rolling(2).mean() df['Violet'] = df['Indigo'].rolling(2).mean() df = df.dropna() colors = ['k','r', 'orange', 'yellow', 'g', 'b', 'indigo', 'violet'] df[['Adj Close','Red','Orange','Yellow','Green','Blue','Indigo','Violet']].plot(colors=colors, figsize=(18,12)) plt.fill_between(df.index, df['Low'], df['High'], color='grey', alpha=0.4) plt.plot(df['Low'], c='darkred', linestyle='--', drawstyle="steps") plt.plot(df['High'], c='forestgreen', linestyle='--', drawstyle="steps") plt.title('Rainbow Charts') plt.legend(loc='best') plt.xlabel('Date') plt.ylabel('Price') plt.show() # ## Candlestick with Rainbow from matplotlib import dates as mdates dfc = df.copy() dfc['VolumePositive'] = dfc['Open'] < dfc['Adj Close'] #dfc = dfc.dropna() dfc = dfc.reset_index() dfc['Date'] = mdates.date2num(dfc['Date'].tolist()) from mplfinance.original_flavor import candlestick_ohlc fig, ax1 = plt.subplots(figsize=(20,12)) candlestick_ohlc(ax1,dfc.values, width=0.5, colorup='g', colordown='r', alpha=1.0) #colors = ['red', 'orange', 'yellow', 'green', 'blue', 'indigo', 'violet'] #labels = ['Red', 'Orange', 'Yellow', 'Green', 'Blue', 'Indigo', 'Violet'] for i in dfc[['Red', 'Orange', 'Yellow', 'Green', 'Blue', 'Indigo', 'Violet']]: ax1.plot(dfc['Date'], dfc[i], color=i, label=i) ax1.xaxis_date() ax1.xaxis.set_major_formatter(mdates.DateFormatter('%d-%m-%Y')) ax1.grid(True, which='both') ax1.minorticks_on() ax1v = ax1.twinx() colors = dfc.VolumePositive.map({True: 'g', False: 'r'}) ax1v.bar(dfc.Date, dfc['Volume'], color=colors, alpha=0.4) ax1v.axes.yaxis.set_ticklabels([]) ax1v.set_ylim(0, 3*df.Volume.max()) ax1.set_title('Stock '+ symbol +' Closing Price') ax1.set_ylabel('Price') ax1.set_xlabel('Date') ax1.legend(loc='best') plt.show()
36.348485
111
0.667361
a0c8d55fb37c691da19d42d22717e7769ad0fbbf
1,670
py
Python
UpWork_Projects/pdf_downloader.py
SurendraTamang/Web-Scrapping
2bb60cce9010b4b68f5c11bf295940832bb5df50
[ "MIT" ]
null
null
null
UpWork_Projects/pdf_downloader.py
SurendraTamang/Web-Scrapping
2bb60cce9010b4b68f5c11bf295940832bb5df50
[ "MIT" ]
null
null
null
UpWork_Projects/pdf_downloader.py
SurendraTamang/Web-Scrapping
2bb60cce9010b4b68f5c11bf295940832bb5df50
[ "MIT" ]
1
2022-01-18T17:15:51.000Z
2022-01-18T17:15:51.000Z
import requests from urllib.request import urlopen from urllib.request import urlretrieve import cgi import os.path pdf_downloader()
33.4
108
0.552096
a0cab7a3ae269edaac7fa1a7d902a54bd96a752d
13,282
py
Python
backend/app/vta/texdf/tex_df.py
megagonlabs/leam
f19830d4d6935bece7d163abbc533cfb4bc2e729
[ "Apache-2.0" ]
7
2020-09-14T07:03:51.000Z
2022-01-13T10:11:53.000Z
backend/app/vta/texdf/tex_df.py
megagonlabs/leam
f19830d4d6935bece7d163abbc533cfb4bc2e729
[ "Apache-2.0" ]
null
null
null
backend/app/vta/texdf/tex_df.py
megagonlabs/leam
f19830d4d6935bece7d163abbc533cfb4bc2e729
[ "Apache-2.0" ]
1
2020-09-07T22:26:27.000Z
2020-09-07T22:26:27.000Z
import spacy import json, os import dill as pickle import numpy as np import pandas as pd from sklearn.feature_extraction.text import TfidfVectorizer from sqlalchemy import create_engine, select, MetaData, Table, Column, Integer, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from typing import List, Dict, Any from flask import current_app from app.models import Dataset # from vta.operators import featurize # from vta.operators import clean # from vta.operators import select # from vta import spacy_nlp from .tex_column import TexColumn from .tex_metadata import MetadataItem from .tex_vis import TexVis from ..types import VTAColumnType, VisType
37.840456
100
0.595844
a0cc5ea31e6d19f7b084b456d80ccf0e5baf6865
1,604
py
Python
orders-api/orders_api/models.py
kelvinducray/fastapi-orders-api
37176329f717adf8ad8749be4ed50f7c875b0cf5
[ "MIT" ]
null
null
null
orders-api/orders_api/models.py
kelvinducray/fastapi-orders-api
37176329f717adf8ad8749be4ed50f7c875b0cf5
[ "MIT" ]
null
null
null
orders-api/orders_api/models.py
kelvinducray/fastapi-orders-api
37176329f717adf8ad8749be4ed50f7c875b0cf5
[ "MIT" ]
null
null
null
from uuid import uuid4 from sqlalchemy import Boolean, Column, DateTime, Integer, String from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship from .database import Base # class User(Base): # __tablename__ = "users" # id = Column(Integer, primary_key=True, index=True) # email = Column(String, unique=True, index=True) # hashed_password = Column(String) # is_active = Column(Boolean, default=True) # items = relationship("Item", back_populates="owner") # class Item(Base): # __tablename__ = "items" # id = Column(Integer, primary_key=True, index=True) # title = Column(String, index=True) # description = Column(String, index=True) # owner_id = Column(Integer, ForeignKey("users.id")) # owner = relationship("User", back_populates="items")
27.655172
68
0.704489
a0cc745e3a8e279006b132f30ea4111764df2ce1
32,293
py
Python
src/ID_meshes.py
faycalki/tainted-paths
81cecf6c1fba903ec3b8043e22652d222892609d
[ "MIT" ]
4
2019-09-26T21:34:32.000Z
2021-11-18T19:31:15.000Z
src/ID_meshes.py
faycalki/tainted-paths
81cecf6c1fba903ec3b8043e22652d222892609d
[ "MIT" ]
null
null
null
src/ID_meshes.py
faycalki/tainted-paths
81cecf6c1fba903ec3b8043e22652d222892609d
[ "MIT" ]
null
null
null
mesh_pic_bandits = 0 mesh_pic_mb_warrior_1 = 1 mesh_pic_messenger = 2 mesh_pic_prisoner_man = 3 mesh_pic_prisoner_fem = 4 mesh_pic_prisoner_wilderness = 5 mesh_pic_siege_sighted = 6 mesh_pic_siege_sighted_fem = 7 mesh_pic_camp = 8 mesh_pic_payment = 9 mesh_pic_escape_1 = 10 mesh_pic_escape_1_fem = 11 mesh_pic_victory = 12 mesh_pic_defeat = 13 mesh_pic_wounded = 14 mesh_pic_wounded_fem = 15 mesh_pic_steppe_bandits = 16 mesh_pic_mountain_bandits = 17 mesh_pic_sea_raiders = 18 mesh_pic_deserters = 19 mesh_pic_forest_bandits = 20 mesh_pic_cattle = 21 mesh_pic_looted_village = 22 mesh_pic_village_p = 23 mesh_pic_village_s = 24 mesh_pic_village_w = 25 mesh_pic_recruits = 26 mesh_pic_arms_swadian = 27 mesh_pic_castle1 = 28 mesh_pic_castledes = 29 mesh_pic_castlesnow = 30 mesh_pic_charge = 31 mesh_pic_khergit = 32 mesh_pic_nord = 33 mesh_pic_rhodock = 34 mesh_pic_sally_out = 35 mesh_pic_siege_attack = 36 mesh_pic_swad = 37 mesh_pic_town1 = 38 mesh_pic_towndes = 39 mesh_pic_townriot = 40 mesh_pic_townsnow = 41 mesh_pic_vaegir = 42 mesh_pic_villageriot = 43 mesh_pic_sarranid_encounter = 44 mesh_pic_mort = 45 mesh_mp_score_a = 46 mesh_mp_score_b = 47 mesh_portrait_blend_out = 48 mesh_load_window = 49 mesh_checkbox_off = 50 mesh_checkbox_on = 51 mesh_white_plane = 52 mesh_white_dot = 53 mesh_player_dot = 54 mesh_flag_infantry = 55 mesh_flag_archers = 56 mesh_flag_cavalry = 57 mesh_inv_slot = 58 mesh_mp_ingame_menu = 59 mesh_mp_inventory_left = 60 mesh_mp_inventory_right = 61 mesh_mp_inventory_choose = 62 mesh_mp_inventory_slot_glove = 63 mesh_mp_inventory_slot_horse = 64 mesh_mp_inventory_slot_armor = 65 mesh_mp_inventory_slot_helmet = 66 mesh_mp_inventory_slot_boot = 67 mesh_mp_inventory_slot_empty = 68 mesh_mp_inventory_slot_equip = 69 mesh_mp_inventory_left_arrow = 70 mesh_mp_inventory_right_arrow = 71 mesh_mp_ui_host_main = 72 mesh_mp_ui_host_maps_1 = 73 mesh_mp_ui_host_maps_2 = 74 mesh_mp_ui_host_maps_3 = 75 mesh_mp_ui_host_maps_4 = 76 mesh_mp_ui_host_maps_5 = 77 mesh_mp_ui_host_maps_6 = 78 mesh_mp_ui_host_maps_7 = 79 mesh_mp_ui_host_maps_8 = 80 mesh_mp_ui_host_maps_9 = 81 mesh_mp_ui_host_maps_10 = 82 mesh_mp_ui_host_maps_11 = 83 mesh_mp_ui_host_maps_12 = 84 mesh_mp_ui_host_maps_13 = 85 mesh_mp_ui_host_maps_randomp = 86 mesh_mp_ui_host_maps_randoms = 87 mesh_mp_ui_command_panel = 88 mesh_mp_ui_command_border_l = 89 mesh_mp_ui_command_border_r = 90 mesh_mp_ui_welcome_panel = 91 mesh_flag_project_sw = 92 mesh_flag_project_vg = 93 mesh_flag_project_kh = 94 mesh_flag_project_nd = 95 mesh_flag_project_rh = 96 mesh_flag_project_sr = 97 mesh_flag_projects_end = 98 mesh_flag_project_sw_miss = 99 mesh_flag_project_vg_miss = 100 mesh_flag_project_kh_miss = 101 mesh_flag_project_nd_miss = 102 mesh_flag_project_rh_miss = 103 mesh_flag_project_sr_miss = 104 mesh_flag_project_misses_end = 105 mesh_color_picker = 106 mesh_custom_map_banner_01 = 107 mesh_custom_map_banner_02 = 108 mesh_custom_map_banner_03 = 109 mesh_custom_banner_01 = 110 mesh_custom_banner_02 = 111 mesh_custom_banner_bg = 112 mesh_custom_banner_fg01 = 113 mesh_custom_banner_fg02 = 114 mesh_custom_banner_fg03 = 115 mesh_custom_banner_fg04 = 116 mesh_custom_banner_fg05 = 117 mesh_custom_banner_fg06 = 118 mesh_custom_banner_fg07 = 119 mesh_custom_banner_fg08 = 120 mesh_custom_banner_fg09 = 121 mesh_custom_banner_fg10 = 122 mesh_custom_banner_fg11 = 123 mesh_custom_banner_fg12 = 124 mesh_custom_banner_fg13 = 125 mesh_custom_banner_fg14 = 126 mesh_custom_banner_fg15 = 127 mesh_custom_banner_fg16 = 128 mesh_custom_banner_fg17 = 129 mesh_custom_banner_fg18 = 130 mesh_custom_banner_fg19 = 131 mesh_custom_banner_fg20 = 132 mesh_custom_banner_fg21 = 133 mesh_custom_banner_fg22 = 134 mesh_custom_banner_fg23 = 135 mesh_custom_banner_charge_01 = 136 mesh_custom_banner_charge_02 = 137 mesh_custom_banner_charge_03 = 138 mesh_custom_banner_charge_04 = 139 mesh_custom_banner_charge_05 = 140 mesh_custom_banner_charge_06 = 141 mesh_custom_banner_charge_07 = 142 mesh_custom_banner_charge_08 = 143 mesh_custom_banner_charge_09 = 144 mesh_custom_banner_charge_10 = 145 mesh_custom_banner_charge_11 = 146 mesh_custom_banner_charge_12 = 147 mesh_custom_banner_charge_13 = 148 mesh_custom_banner_charge_14 = 149 mesh_custom_banner_charge_15 = 150 mesh_custom_banner_charge_16 = 151 mesh_custom_banner_charge_17 = 152 mesh_custom_banner_charge_18 = 153 mesh_custom_banner_charge_19 = 154 mesh_custom_banner_charge_20 = 155 mesh_custom_banner_charge_21 = 156 mesh_custom_banner_charge_22 = 157 mesh_custom_banner_charge_23 = 158 mesh_custom_banner_charge_24 = 159 mesh_custom_banner_charge_25 = 160 mesh_custom_banner_charge_26 = 161 mesh_custom_banner_charge_27 = 162 mesh_custom_banner_charge_28 = 163 mesh_custom_banner_charge_29 = 164 mesh_custom_banner_charge_30 = 165 mesh_custom_banner_charge_31 = 166 mesh_custom_banner_charge_32 = 167 mesh_custom_banner_charge_33 = 168 mesh_custom_banner_charge_34 = 169 mesh_custom_banner_charge_35 = 170 mesh_custom_banner_charge_36 = 171 mesh_custom_banner_charge_37 = 172 mesh_custom_banner_charge_38 = 173 mesh_custom_banner_charge_39 = 174 mesh_custom_banner_charge_40 = 175 mesh_custom_banner_charge_41 = 176 mesh_custom_banner_charge_42 = 177 mesh_custom_banner_charge_43 = 178 mesh_custom_banner_charge_44 = 179 mesh_custom_banner_charge_45 = 180 mesh_custom_banner_charge_46 = 181 mesh_tableau_mesh_custom_banner = 182 mesh_tableau_mesh_custom_banner_square = 183 mesh_tableau_mesh_custom_banner_tall = 184 mesh_tableau_mesh_custom_banner_short = 185 mesh_tableau_mesh_shield_round_1 = 186 mesh_tableau_mesh_shield_round_2 = 187 mesh_tableau_mesh_shield_round_3 = 188 mesh_tableau_mesh_shield_round_4 = 189 mesh_tableau_mesh_shield_round_5 = 190 mesh_tableau_mesh_shield_small_round_1 = 191 mesh_tableau_mesh_shield_small_round_2 = 192 mesh_tableau_mesh_shield_small_round_3 = 193 mesh_tableau_mesh_shield_kite_1 = 194 mesh_tableau_mesh_shield_kite_2 = 195 mesh_tableau_mesh_shield_kite_3 = 196 mesh_tableau_mesh_shield_kite_4 = 197 mesh_tableau_mesh_shield_heater_1 = 198 mesh_tableau_mesh_shield_heater_2 = 199 mesh_tableau_mesh_shield_pavise_1 = 200 mesh_tableau_mesh_shield_pavise_2 = 201 mesh_heraldic_armor_bg = 202 mesh_tableau_mesh_heraldic_armor_a = 203 mesh_tableau_mesh_heraldic_armor_b = 204 mesh_tableau_mesh_heraldic_armor_c = 205 mesh_tableau_mesh_heraldic_armor_d = 206 mesh_outer_terrain_plain_1 = 207 mesh_banner_a01 = 208 mesh_banner_a02 = 209 mesh_banner_a03 = 210 mesh_banner_a04 = 211 mesh_banner_a05 = 212 mesh_banner_a06 = 213 mesh_banner_a07 = 214 mesh_banner_a08 = 215 mesh_banner_a09 = 216 mesh_banner_a10 = 217 mesh_banner_a11 = 218 mesh_banner_a12 = 219 mesh_banner_a13 = 220 mesh_banner_a14 = 221 mesh_banner_a15 = 222 mesh_banner_a16 = 223 mesh_banner_a17 = 224 mesh_banner_a18 = 225 mesh_banner_a19 = 226 mesh_banner_a20 = 227 mesh_banner_a21 = 228 mesh_banner_b01 = 229 mesh_banner_b02 = 230 mesh_banner_b03 = 231 mesh_banner_b04 = 232 mesh_banner_b05 = 233 mesh_banner_b06 = 234 mesh_banner_b07 = 235 mesh_banner_b08 = 236 mesh_banner_b09 = 237 mesh_banner_b10 = 238 mesh_banner_b11 = 239 mesh_banner_b12 = 240 mesh_banner_b13 = 241 mesh_banner_b14 = 242 mesh_banner_b15 = 243 mesh_banner_b16 = 244 mesh_banner_b17 = 245 mesh_banner_b18 = 246 mesh_banner_b19 = 247 mesh_banner_b20 = 248 mesh_banner_b21 = 249 mesh_banner_c01 = 250 mesh_banner_c02 = 251 mesh_banner_c03 = 252 mesh_banner_c04 = 253 mesh_banner_c05 = 254 mesh_banner_c06 = 255 mesh_banner_c07 = 256 mesh_banner_c08 = 257 mesh_banner_c09 = 258 mesh_banner_c10 = 259 mesh_banner_c11 = 260 mesh_banner_c12 = 261 mesh_banner_c13 = 262 mesh_banner_c14 = 263 mesh_banner_c15 = 264 mesh_banner_c16 = 265 mesh_banner_c17 = 266 mesh_banner_c18 = 267 mesh_banner_c19 = 268 mesh_banner_c20 = 269 mesh_banner_c21 = 270 mesh_banner_d01 = 271 mesh_banner_d02 = 272 mesh_banner_d03 = 273 mesh_banner_d04 = 274 mesh_banner_d05 = 275 mesh_banner_d06 = 276 mesh_banner_d07 = 277 mesh_banner_d08 = 278 mesh_banner_d09 = 279 mesh_banner_d10 = 280 mesh_banner_d11 = 281 mesh_banner_d12 = 282 mesh_banner_d13 = 283 mesh_banner_d14 = 284 mesh_banner_d15 = 285 mesh_banner_d16 = 286 mesh_banner_d17 = 287 mesh_banner_d18 = 288 mesh_banner_d19 = 289 mesh_banner_d20 = 290 mesh_banner_d21 = 291 mesh_banner_e01 = 292 mesh_banner_e02 = 293 mesh_banner_e03 = 294 mesh_banner_e04 = 295 mesh_banner_e05 = 296 mesh_banner_e06 = 297 mesh_banner_e07 = 298 mesh_banner_e08 = 299 mesh_banner_e09 = 300 mesh_banner_e10 = 301 mesh_banner_e11 = 302 mesh_banner_e12 = 303 mesh_banner_e13 = 304 mesh_banner_e14 = 305 mesh_banner_e15 = 306 mesh_banner_e16 = 307 mesh_banner_e17 = 308 mesh_banner_e18 = 309 mesh_banner_e19 = 310 mesh_banner_e20 = 311 mesh_banner_e21 = 312 mesh_banner_f01 = 313 mesh_banner_f02 = 314 mesh_banner_f03 = 315 mesh_banner_f04 = 316 mesh_banner_f05 = 317 mesh_banner_f06 = 318 mesh_banner_f07 = 319 mesh_banner_f08 = 320 mesh_banner_f09 = 321 mesh_banner_f10 = 322 mesh_banner_f11 = 323 mesh_banner_f12 = 324 mesh_banner_f13 = 325 mesh_banner_f14 = 326 mesh_banner_f15 = 327 mesh_banner_f16 = 328 mesh_banner_f17 = 329 mesh_banner_f18 = 330 mesh_banner_f19 = 331 mesh_banner_f20 = 332 mesh_banner_h01 = 333 mesh_banner_h02 = 334 mesh_banner_h03 = 335 mesh_banner_h04 = 336 mesh_banner_h05 = 337 mesh_banner_h06 = 338 mesh_banner_h07 = 339 mesh_banner_h08 = 340 mesh_banner_h09 = 341 mesh_banner_h10 = 342 mesh_banner_h11 = 343 mesh_banner_h12 = 344 mesh_banner_h13 = 345 mesh_banner_h14 = 346 mesh_banner_h15 = 347 mesh_banner_h16 = 348 mesh_banner_h17 = 349 mesh_banner_h18 = 350 mesh_banner_h19 = 351 mesh_banner_h20 = 352 mesh_banner_h21 = 353 mesh_banner_i01 = 354 mesh_banner_i02 = 355 mesh_banner_i03 = 356 mesh_banner_i04 = 357 mesh_banner_i05 = 358 mesh_banner_i06 = 359 mesh_banner_i07 = 360 mesh_banner_i08 = 361 mesh_banner_i09 = 362 mesh_banner_i10 = 363 mesh_banner_i11 = 364 mesh_banner_i12 = 365 mesh_banner_i13 = 366 mesh_banner_i14 = 367 mesh_banner_i15 = 368 mesh_banner_i16 = 369 mesh_banner_i17 = 370 mesh_banner_i18 = 371 mesh_banner_i19 = 372 mesh_banner_i20 = 373 mesh_banner_i21 = 374 mesh_banner_k01 = 375 mesh_banner_k02 = 376 mesh_banner_k03 = 377 mesh_banner_k04 = 378 mesh_banner_k05 = 379 mesh_banner_k06 = 380 mesh_banner_k07 = 381 mesh_banner_k08 = 382 mesh_banner_k09 = 383 mesh_banner_k10 = 384 mesh_banner_k11 = 385 mesh_banner_k12 = 386 mesh_banner_k13 = 387 mesh_banner_k14 = 388 mesh_banner_k15 = 389 mesh_banner_k16 = 390 mesh_banner_k17 = 391 mesh_banner_k18 = 392 mesh_banner_k19 = 393 mesh_banner_k20 = 394 mesh_banner_g01 = 395 mesh_banner_g02 = 396 mesh_banner_g03 = 397 mesh_banner_g04 = 398 mesh_banner_g05 = 399 mesh_banner_g06 = 400 mesh_banner_g07 = 401 mesh_banner_g08 = 402 mesh_banner_g09 = 403 mesh_banner_g10 = 404 mesh_banner_kingdom_a = 405 mesh_banner_kingdom_b = 406 mesh_banner_kingdom_c = 407 mesh_banner_kingdom_d = 408 mesh_banner_kingdom_e = 409 mesh_banner_kingdom_f = 410 mesh_banner_kingdom_g = 411 mesh_banner_kingdom_h = 412 mesh_banner_kingdom_i = 413 mesh_banner_kingdom_j = 414 mesh_banner_kingdom_k = 415 mesh_banner_kingdom_l = 416 mesh_banner_kingdom_ll = 417 mesh_banner_kingdom_m = 418 mesh_banner_kingdom_n = 419 mesh_banner_kingdom_o = 420 mesh_banner_kingdom_p = 421 mesh_banner_kingdom_q = 422 mesh_banner_kingdom_r = 423 mesh_banner_kingdom_s = 424 mesh_banner_kingdom_t = 425 mesh_banner_kingdom_u = 426 mesh_banner_kingdom_v = 427 mesh_banner_kingdom_w = 428 mesh_banner_kingdom_x = 429 mesh_banner_kingdom_y = 430 mesh_banner_kingdom_z = 431 mesh_banner_kingdom_2a = 432 mesh_banner_kingdom_2b = 433 mesh_banner_kingdom_2c = 434 mesh_banner_kingdom_2d = 435 mesh_banner_k21 = 436 mesh_arms_a01 = 437 mesh_arms_a02 = 438 mesh_arms_a03 = 439 mesh_arms_a04 = 440 mesh_arms_a05 = 441 mesh_arms_a06 = 442 mesh_arms_a07 = 443 mesh_arms_a08 = 444 mesh_arms_a09 = 445 mesh_arms_a10 = 446 mesh_arms_a11 = 447 mesh_arms_a12 = 448 mesh_arms_a13 = 449 mesh_arms_a14 = 450 mesh_arms_a15 = 451 mesh_arms_a16 = 452 mesh_arms_a17 = 453 mesh_arms_a18 = 454 mesh_arms_a19 = 455 mesh_arms_a20 = 456 mesh_arms_a21 = 457 mesh_arms_b01 = 458 mesh_arms_b02 = 459 mesh_arms_b03 = 460 mesh_arms_b04 = 461 mesh_arms_b05 = 462 mesh_arms_b06 = 463 mesh_arms_b07 = 464 mesh_arms_b08 = 465 mesh_arms_b09 = 466 mesh_arms_b10 = 467 mesh_arms_b11 = 468 mesh_arms_b12 = 469 mesh_arms_b13 = 470 mesh_arms_b14 = 471 mesh_arms_b15 = 472 mesh_arms_b16 = 473 mesh_arms_b17 = 474 mesh_arms_b18 = 475 mesh_arms_b19 = 476 mesh_arms_b20 = 477 mesh_arms_b21 = 478 mesh_arms_c01 = 479 mesh_arms_c02 = 480 mesh_arms_c03 = 481 mesh_arms_c04 = 482 mesh_arms_c05 = 483 mesh_arms_c06 = 484 mesh_arms_c07 = 485 mesh_arms_c08 = 486 mesh_arms_c09 = 487 mesh_arms_c10 = 488 mesh_arms_c11 = 489 mesh_arms_c12 = 490 mesh_arms_c13 = 491 mesh_arms_c14 = 492 mesh_arms_c15 = 493 mesh_arms_c16 = 494 mesh_arms_c17 = 495 mesh_arms_c18 = 496 mesh_arms_c19 = 497 mesh_arms_c20 = 498 mesh_arms_c21 = 499 mesh_arms_d01 = 500 mesh_arms_d02 = 501 mesh_arms_d03 = 502 mesh_arms_d04 = 503 mesh_arms_d05 = 504 mesh_arms_d06 = 505 mesh_arms_d07 = 506 mesh_arms_d08 = 507 mesh_arms_d09 = 508 mesh_arms_d10 = 509 mesh_arms_d11 = 510 mesh_arms_d12 = 511 mesh_arms_d13 = 512 mesh_arms_d14 = 513 mesh_arms_d15 = 514 mesh_arms_d16 = 515 mesh_arms_d17 = 516 mesh_arms_d18 = 517 mesh_arms_d19 = 518 mesh_arms_d20 = 519 mesh_arms_d21 = 520 mesh_arms_e01 = 521 mesh_arms_e02 = 522 mesh_arms_e03 = 523 mesh_arms_e04 = 524 mesh_arms_e05 = 525 mesh_arms_e06 = 526 mesh_arms_e07 = 527 mesh_arms_e08 = 528 mesh_arms_e09 = 529 mesh_arms_e10 = 530 mesh_arms_e11 = 531 mesh_arms_e12 = 532 mesh_arms_e13 = 533 mesh_arms_e14 = 534 mesh_arms_e15 = 535 mesh_arms_e16 = 536 mesh_arms_e17 = 537 mesh_arms_e18 = 538 mesh_arms_e19 = 539 mesh_arms_e20 = 540 mesh_arms_e21 = 541 mesh_arms_f01 = 542 mesh_arms_f02 = 543 mesh_arms_f03 = 544 mesh_arms_f04 = 545 mesh_arms_f05 = 546 mesh_arms_f06 = 547 mesh_arms_f07 = 548 mesh_arms_f08 = 549 mesh_arms_f09 = 550 mesh_arms_f10 = 551 mesh_arms_f11 = 552 mesh_arms_f12 = 553 mesh_arms_f13 = 554 mesh_arms_f14 = 555 mesh_arms_f15 = 556 mesh_arms_f16 = 557 mesh_arms_f17 = 558 mesh_arms_f18 = 559 mesh_arms_f19 = 560 mesh_arms_f20 = 561 mesh_arms_h01 = 562 mesh_arms_h02 = 563 mesh_arms_h03 = 564 mesh_arms_h04 = 565 mesh_arms_h05 = 566 mesh_arms_h06 = 567 mesh_arms_h07 = 568 mesh_arms_h08 = 569 mesh_arms_h09 = 570 mesh_arms_h10 = 571 mesh_arms_h11 = 572 mesh_arms_h12 = 573 mesh_arms_h13 = 574 mesh_arms_h14 = 575 mesh_arms_h15 = 576 mesh_arms_h16 = 577 mesh_arms_h17 = 578 mesh_arms_h18 = 579 mesh_arms_h19 = 580 mesh_arms_h20 = 581 mesh_arms_h21 = 582 mesh_arms_i01 = 583 mesh_arms_i02 = 584 mesh_arms_i03 = 585 mesh_arms_i04 = 586 mesh_arms_i05 = 587 mesh_arms_i06 = 588 mesh_arms_i07 = 589 mesh_arms_i08 = 590 mesh_arms_i09 = 591 mesh_arms_i10 = 592 mesh_arms_i11 = 593 mesh_arms_i12 = 594 mesh_arms_i13 = 595 mesh_arms_i14 = 596 mesh_arms_i15 = 597 mesh_arms_i16 = 598 mesh_arms_i17 = 599 mesh_arms_i18 = 600 mesh_arms_i19 = 601 mesh_arms_i20 = 602 mesh_arms_i21 = 603 mesh_arms_k01 = 604 mesh_arms_k02 = 605 mesh_arms_k03 = 606 mesh_arms_k04 = 607 mesh_arms_k05 = 608 mesh_arms_k06 = 609 mesh_arms_k07 = 610 mesh_arms_k08 = 611 mesh_arms_k09 = 612 mesh_arms_k10 = 613 mesh_arms_k11 = 614 mesh_arms_k12 = 615 mesh_arms_k13 = 616 mesh_arms_k14 = 617 mesh_arms_k15 = 618 mesh_arms_k16 = 619 mesh_arms_k17 = 620 mesh_arms_k18 = 621 mesh_arms_k19 = 622 mesh_arms_k20 = 623 mesh_arms_g01 = 624 mesh_arms_g02 = 625 mesh_arms_g03 = 626 mesh_arms_g04 = 627 mesh_arms_g05 = 628 mesh_arms_g06 = 629 mesh_arms_g07 = 630 mesh_arms_g08 = 631 mesh_arms_g09 = 632 mesh_arms_g10 = 633 mesh_arms_kingdom_a = 634 mesh_arms_kingdom_b = 635 mesh_arms_kingdom_c = 636 mesh_arms_kingdom_d = 637 mesh_arms_kingdom_e = 638 mesh_arms_kingdom_f = 639 mesh_arms_kingdom_g = 640 mesh_arms_kingdom_h = 641 mesh_arms_kingdom_i = 642 mesh_arms_kingdom_j = 643 mesh_arms_kingdom_k = 644 mesh_arms_kingdom_l = 645 mesh_arms_kingdom_ll = 646 mesh_arms_kingdom_m = 647 mesh_arms_kingdom_n = 648 mesh_arms_kingdom_o = 649 mesh_arms_kingdom_p = 650 mesh_arms_kingdom_q = 651 mesh_arms_kingdom_r = 652 mesh_arms_kingdom_s = 653 mesh_arms_kingdom_t = 654 mesh_arms_kingdom_u = 655 mesh_arms_kingdom_v = 656 mesh_arms_kingdom_w = 657 mesh_arms_kingdom_x = 658 mesh_arms_kingdom_y = 659 mesh_arms_kingdom_z = 660 mesh_arms_kingdom_2a = 661 mesh_arms_kingdom_2b = 662 mesh_arms_kingdom_2c = 663 mesh_arms_kingdom_2d = 664 mesh_arms_k21 = 665 mesh_banners_default_a = 666 mesh_banners_default_b = 667 mesh_banners_default_c = 668 mesh_banners_default_d = 669 mesh_banners_default_e = 670 mesh_troop_label_banner = 671 mesh_ui_kingdom_shield_1 = 672 mesh_ui_kingdom_shield_2 = 673 mesh_ui_kingdom_shield_3 = 674 mesh_ui_kingdom_shield_4 = 675 mesh_ui_kingdom_shield_5 = 676 mesh_ui_kingdom_shield_6 = 677 mesh_ui_kingdom_shield_7 = 678 mesh_ui_kingdom_shield_8 = 679 mesh_ui_kingdom_shield_9 = 680 mesh_ui_kingdom_shield_10 = 681 mesh_ui_kingdom_shield_11 = 682 mesh_ui_kingdom_shield_12 = 683 mesh_ui_kingdom_shield_13 = 684 mesh_ui_kingdom_shield_14 = 685 mesh_ui_kingdom_shield_15 = 686 mesh_ui_kingdom_shield_16 = 687 mesh_ui_kingdom_shield_17 = 688 mesh_ui_kingdom_shield_18 = 689 mesh_ui_kingdom_shield_19 = 690 mesh_ui_kingdom_shield_20 = 691 mesh_ui_kingdom_shield_21 = 692 mesh_ui_kingdom_shield_22 = 693 mesh_ui_kingdom_shield_23 = 694 mesh_ui_kingdom_shield_24 = 695 mesh_ui_kingdom_shield_25 = 696 mesh_ui_kingdom_shield_26 = 697 mesh_ui_kingdom_shield_27 = 698 mesh_ui_kingdom_shield_28 = 699 mesh_ui_kingdom_shield_29 = 700 mesh_ui_kingdom_shield_30 = 701 mesh_ui_kingdom_shield_31 = 702 mesh_mouse_arrow_down = 703 mesh_mouse_arrow_right = 704 mesh_mouse_arrow_left = 705 mesh_mouse_arrow_up = 706 mesh_mouse_arrow_plus = 707 mesh_mouse_left_click = 708 mesh_mouse_right_click = 709 mesh_status_ammo_ready = 710 mesh_main_menu_background = 711 mesh_loading_background = 712 mesh_ui_quick_battle_a = 713 mesh_white_bg_plane_a = 714 mesh_cb_ui_icon_infantry = 715 mesh_cb_ui_icon_archer = 716 mesh_cb_ui_icon_horseman = 717 mesh_cb_ui_main = 718 mesh_cb_ui_maps_scene_01 = 719 mesh_cb_ui_maps_scene_02 = 720 mesh_cb_ui_maps_scene_03 = 721 mesh_cb_ui_maps_scene_04 = 722 mesh_cb_ui_maps_scene_05 = 723 mesh_cb_ui_maps_scene_06 = 724 mesh_cb_ui_maps_scene_07 = 725 mesh_cb_ui_maps_scene_08 = 726 mesh_cb_ui_maps_scene_09 = 727 mesh_mp_ui_host_maps_14 = 728 mesh_mp_ui_host_maps_15 = 729 mesh_ui_kingdom_shield_7 = 730 mesh_flag_project_rb = 731 mesh_flag_project_rb_miss = 732 mesh_mp_ui_host_maps_16 = 733 mesh_mp_ui_host_maps_17 = 734 mesh_mp_ui_host_maps_18 = 735 mesh_mp_ui_host_maps_19 = 736 mesh_mp_ui_host_maps_20 = 737 mesh_pic_mb_warrior_2 = 738 mesh_pic_mb_warrior_3 = 739 mesh_pic_mb_warrior_4 = 740 mesh_pic_mercenary = 741 mesh_facegen_board = 742 mesh_status_background = 743 mesh_status_health_bar = 744 mesh_game_log_window = 745 mesh_restore_game_panel = 746 mesh_message_window = 747 mesh_party_window_b = 748 mesh_party_member_button = 749 mesh_party_member_button_pressed = 750 mesh_longer_button = 751 mesh_longer_button_down = 752 mesh_button_1 = 753 mesh_button_1_down = 754 mesh_used_button = 755 mesh_used_button_down = 756 mesh_longer_button = 757 mesh_longer_button_down = 758 mesh_options_window = 759 mesh_message_window = 760 mesh_note_window = 761 mesh_left_button = 762 mesh_left_button_down = 763 mesh_left_button_hl = 764 mesh_right_button = 765 mesh_right_button_down = 766 mesh_right_button_hl = 767 mesh_center_button = 768 mesh_drop_button = 769 mesh_drop_button_down = 770 mesh_drop_button_hl = 771 mesh_drop_button_child = 772 mesh_drop_button_child_down = 773 mesh_drop_button_child_hl = 774 mesh_num_1 = 775 mesh_num_2 = 776 mesh_num_3 = 777 mesh_num_4 = 778 mesh_num_5 = 779 mesh_num_6 = 780 mesh_num_7 = 781 mesh_num_8 = 782 mesh_num_9 = 783 mesh_num_10 = 784 mesh_num_11 = 785 mesh_num_12 = 786 mesh_num_13 = 787 mesh_num_14 = 788 mesh_num_15 = 789 mesh_num_16 = 790 mesh_num_17 = 791 mesh_num_18 = 792 mesh_num_19 = 793 mesh_num_20 = 794 mesh_num_21 = 795 mesh_num_22 = 796 mesh_num_23 = 797 mesh_num_24 = 798 mesh_num_25 = 799 mesh_num_26 = 800 mesh_num_27 = 801 mesh_num_28 = 802 mesh_num_29 = 803 mesh_num_30 = 804 mesh_num_31 = 805 mesh_num_32 = 806 mesh_num_33 = 807 mesh_num_34 = 808 mesh_num_35 = 809 mesh_num_36 = 810 mesh_num_37 = 811 mesh_num_38 = 812 mesh_num_39 = 813 mesh_num_40 = 814 mesh_num_41 = 815 mesh_num_42 = 816 mesh_num_43 = 817 mesh_num_44 = 818 mesh_num_45 = 819 mesh_num_46 = 820 mesh_num_47 = 821 mesh_num_48 = 822 mesh_message_window = 823 mesh_face_gen_window = 824 mesh_order_frame = 825 mesh_tableau_mesh_early_transitional_heraldic_banner = 826 mesh_tableau_mesh_early_transitional_heraldic = 827 mesh_tableau_mesh_samurai_heraldic_flag = 828 mesh_tableau_mesh_banner_spear = 829 mesh_invisi_st_plane_fullsc = 830 mesh_bt_flag_1 = 831 mesh_bt_flag_2 = 832 mesh_bt_flag_3 = 833 mesh_pic_bt_crossbow = 834 mesh_pic_bt_shield = 835 mesh_pic_bt_horse_archer = 836 mesh_pic_bt_twohand = 837 mesh_pic_bt_bow = 838 mesh_pic_bt_horse = 839 mesh_pic_bt_musket = 840 mesh_pic_bt_leader = 841 mesh_bt_cion_tier1 = 842 mesh_bt_cion_tier2 = 843 mesh_bt_cion_tier3 = 844 mesh_bt_cion_tier4 = 845 mesh_bt_cion_tier5 = 846 mesh_bt_cion_tier6 = 847 mesh_pic_bt_charge_auto = 848 mesh_pic_bt_hold = 849 mesh_pic_bt_followme = 850 mesh_pic_bt_unite = 851 mesh_pic_bt_divide = 852 mesh_pic_bt_advan = 853 mesh_pic_bt_fall = 854 mesh_pic_bt_holdfire = 855 mesh_pic_bt_anyw = 856 mesh_pic_bt_clicked = 857 mesh_pic_bt_return = 858 mesh_pic_camp_meet = 859 mesh_pic_meetlady = 860 mesh_pic_meetlady2 = 861 mesh_pic_meetlady3 = 862 mesh_1pic_ruin_0 = 863 mesh_1pic_ruin_1 = 864 mesh_1pic_ruin_2 = 865 mesh_1pic_ruin_3 = 866 mesh_1pic_ruin_4 = 867 mesh_1pic_ruin_5 = 868 mesh_1pic_ruin_6 = 869 mesh_1pic_ruin_7 = 870 mesh_1pic_ruin_8 = 871 mesh_1pic_ruin_9 = 872 mesh_1pic_ruin_10 = 873 mesh_1pic_ruin_11 = 874 mesh_1pic_ruin_12 = 875 mesh_1pic_ruin_13 = 876 mesh_1pic_ruin_14 = 877 mesh_1pic_ruin_15 = 878 mesh_1pic_ruin_16 = 879 mesh_1pic_ruin_17 = 880 mesh_1pic_ruin_18 = 881 mesh_1pic_ruin_19 = 882 mesh_1pic_ruin_20 = 883 mesh_1pic_ruin_21 = 884 mesh_1pic_ruin_22 = 885 mesh_1pic_ruin_23 = 886 mesh_1pic_ruin_24 = 887 mesh_1pic_ruin_25 = 888 mesh_1pic_ruin_26 = 889 mesh_1pic_ruin_27 = 890 mesh_1pic_ruin_28 = 891 mesh_1pic_ruin_29 = 892 mesh_1pic_ruin_30 = 893 mesh_1pic_ruin_31 = 894 mesh_1pic_ruin_32 = 895 mesh_1pic_ruin_33 = 896 mesh_1pic_ruin_34 = 897 mesh_1pic_ruin_35 = 898 mesh_1pic_ruin_36 = 899 mesh_1pic_ruin_37 = 900 mesh_1pic_ruin_38 = 901 mesh_1pic_ruin_39 = 902 mesh_1pic_ruin_40 = 903 mesh_1pic_ruin_41 = 904 mesh_1pic_ruin_42 = 905 mesh_1pic_ruin_43 = 906 mesh_1pic_ruin_44 = 907 mesh_1pic_ruin_45 = 908 mesh_1pic_ruin_46 = 909 mesh_1pic_ruin_47 = 910 mesh_1pic_ruin_48 = 911 mesh_1pic_ruin_49 = 912 mesh_1pic_ruin_50 = 913 mesh_1pic_ruin_51 = 914 mesh_1pic_ruin_52 = 915 mesh_1pic_ruin_53 = 916 mesh_1pic_ruin_54 = 917 mesh_1pic_ruin_55 = 918 mesh_1pic_ruin_56 = 919 mesh_1pic_ruin_57 = 920 mesh_1pic_ruin_58 = 921 mesh_1pic_ruin_59 = 922 mesh_1pic_ruin_60 = 923 mesh_1pic_ruin_61 = 924 mesh_1pic_ruin_62 = 925 mesh_1pic_ruin_63 = 926 mesh_1pic_ruin_64 = 927 mesh_1pic_ruin_65 = 928 mesh_1pic_ruin_66 = 929 mesh_1pic_ruin_67 = 930 mesh_1pic_ruin_68 = 931 mesh_1pic_ruin_69 = 932 mesh_1pic_ruin_70 = 933 mesh_1pic_ruin_71 = 934 mesh_1pic_ruin_72 = 935 mesh_1pic_ruin_73 = 936 mesh_1pic_ruin_74 = 937 mesh_1pic_ruin_75 = 938 mesh_1pic_ruin_76 = 939 mesh_1pic_ruin_77 = 940 mesh_1pic_ruin_78 = 941 mesh_1pic_ruin_79 = 942 mesh_1pic_ruin_80 = 943 mesh_1pic_ruin_81 = 944 mesh_1pic_ruin_82 = 945 mesh_1pic_ruin_83 = 946 mesh_1pic_ruin_84 = 947 mesh_1pic_ruin_85 = 948 mesh_1pic_ruin_86 = 949 mesh_1pic_ruin_87 = 950 mesh_1pic_ruin_88 = 951 mesh_1pic_ruin_89 = 952 mesh_1pic_ruin_90 = 953 mesh_1pic_ruin_91 = 954 mesh_1pic_ruin_92 = 955 mesh_1pic_ruin_93 = 956 mesh_1pic_ruin_94 = 957 mesh_1pic_ruin_95 = 958 mesh_1pic_ruin_96 = 959 mesh_1pic_ruin_97 = 960 mesh_1pic_ruin_98 = 961 mesh_1pic_ruin_99 = 962 mesh_1pic_ruin_100 = 963 mesh_1pic_ruin_101 = 964 mesh_1pic_ruin_102 = 965 mesh_1pic_ruin_103 = 966 mesh_1pic_ruin_104 = 967 mesh_1pic_ruin_105 = 968 mesh_1pic_ruin_106 = 969 mesh_1pic_ruin_107 = 970 mesh_1pic_ruin_108 = 971 mesh_1pic_ruin_109 = 972 mesh_1pic_ruin_110 = 973 mesh_1pic_ruin_111 = 974 mesh_1pic_ruin_112 = 975 mesh_1pic_ruin_113 = 976 mesh_1pic_ruin_114 = 977 mesh_1pic_ruin_115 = 978 mesh_1pic_ruin_116 = 979 mesh_1pic_ruin_117 = 980 mesh_1pic_ruin_118 = 981 mesh_1pic_ruin_119 = 982 mesh_1pic_ruin_120 = 983 mesh_1pic_ruin_121 = 984 mesh_1pic_ruin_122 = 985 mesh_1pic_ruin_123 = 986 mesh_1pic_ruin_124 = 987 mesh_1pic_ruin_125 = 988 mesh_1pic_ruin_126 = 989 mesh_1pic_ruin_127 = 990 mesh_1pic_ruin_128 = 991 mesh_1pic_ruin_129 = 992 mesh_1pic_ruin_130 = 993 mesh_1pic_ruin_131 = 994 mesh_1pic_ruin_132 = 995 mesh_1pic_ruin_133 = 996 mesh_1pic_ruin_134 = 997 mesh_1pic_ruin_135 = 998 mesh_1pic_ruin_136 = 999 mesh_1pic_ruin_137 = 1000 mesh_1pic_ruin_138 = 1001 mesh_1pic_ruin_139 = 1002 mesh_1pic_ruin_140 = 1003 mesh_1pic_ruin_141 = 1004 mesh_1pic_ruin_142 = 1005 mesh_1pic_ruin_143 = 1006 mesh_1pic_ruin_144 = 1007 mesh_1pic_ruin_145 = 1008 mesh_1pic_ruin_146 = 1009 mesh_1pic_ruin_ex1 = 1010 mesh_1pic_ruin_ex2 = 1011 mesh_1pic_ruin_ex3 = 1012 mesh_1pic_ruin_ex4 = 1013 mesh_1pic_ruin_ex5 = 1014 mesh_1pic_ruin_ex6 = 1015 mesh_1pic_ruin_ex7 = 1016 mesh_1pic_ruin_ex8 = 1017 mesh_1pic_ruin_ex9 = 1018 mesh_1pic_ruin_ex10 = 1019 mesh_1pic_ruin_ex11 = 1020 mesh_1pic_ruin_ex12 = 1021 mesh_1pic_ruin_ex13 = 1022 mesh_1pic_ruin_ex14 = 1023 mesh_1pic_ruin_ex15 = 1024 mesh_1pic_ruin_ex16 = 1025 mesh_1pic_ruin_ex17 = 1026 mesh_1pic_ruin_ex18 = 1027 mesh_1pic_ruin_ex19 = 1028 mesh_1pic_ruin_ex20 = 1029 mesh_1pic_ruin_ex21 = 1030 mesh_1pic_ruin_ex22 = 1031 mesh_1pic_ruin_ex23 = 1032 mesh_1pic_ruin_ex24 = 1033 mesh_1pic_ruin_ex25 = 1034 mesh_pic_encounter1 = 1035 mesh_pic_encounter2 = 1036 mesh_pic_encounter3 = 1037 mesh_pic_xex8 = 1038 mesh_pic_xex9 = 1039 mesh_pic_xex10 = 1040 mesh_pic_xex11 = 1041 mesh_pic_xex12 = 1042 mesh_pic_xex13 = 1043 mesh_pic_xex14 = 1044 mesh_st_tercio = 1045 mesh_st_pincer_movement = 1046 mesh_encounter4vik = 1047 mesh_encounter5pirate = 1048 mesh_pic_ship_shipyard = 1049 mesh_st_pic_plain = 1050 mesh_st_pic_desert = 1051 mesh_st_pic_mount = 1052 mesh_st_pic_snow = 1053 mesh_st_pic_sea = 1054 mesh_st_lancecharge = 1055 mesh_st_ccccharge = 1056 mesh_st_viking = 1057 mesh_black_st_plane = 1058 mesh_invisi_st_plane = 1059 mesh_pic_invisi_backgrounds = 1060 mesh_pic_policy_choose_prt = 1061 mesh_pic_policy_choose_prt_bk = 1062 mesh_pic_religion_screenn = 1063 mesh_pic_gbt_punch = 1064 mesh_pic_gbt_lick = 1065 mesh_pic_gbt_finger = 1066 mesh_pic_gbt_love = 1067 mesh_pic_gbt_place = 1068 mesh_pic_gbt_bed_sheet = 1069 mesh_pic_money_bag = 1070 mesh_pic_sea_backg = 1071 mesh_tableau_mesh_flag = 1072 mesh_pic_backg_inv = 1073 mesh_pic_library = 1074 mesh_pic_fuck_back = 1075 mesh_pic_ghost_ship_encount = 1076 mesh_pic_visit_train = 1077 mesh_pic_weknow = 1078 mesh_pic_bank_back = 1079 mesh_pic_wm_blank = 1080 mesh_pic_wm_horse = 1081 mesh_pic_wm_finewood = 1082 mesh_pic_wm_iron = 1083 mesh_pic_wm_elephant = 1084 mesh_pic_wm_whale = 1085 mesh_pic_wm_fish = 1086 mesh_pic_wm_maize = 1087 mesh_pic_wm_copper = 1088 mesh_pic_wm_marble = 1089 mesh_pic_wm_pearl = 1090 mesh_pic_wm_gem = 1091 mesh_pic_wm_ceramic = 1092 mesh_pic_wm_gold = 1093 mesh_pic_wm_silver = 1094 mesh_pic_wm_ivory = 1095 mesh_pic_wm_coffee = 1096 mesh_pic_wm_cacao = 1097 mesh_pic_wm_silk = 1098 mesh_pic_wm_nutmeg = 1099 mesh_pic_wm_allspice = 1100 mesh_pic_wm_cinnamon = 1101 mesh_pic_wm_clove = 1102 mesh_pic_wm_pepper = 1103 mesh_pic_wm_tabaco = 1104 mesh_pic_wm_tea = 1105 mesh_pic_marry = 1106 mesh_pic_religion_symbol_0 = 1107 mesh_pic_religion_symbol_1 = 1108 mesh_pic_religion_symbol_2 = 1109 mesh_pic_religion_symbol_3 = 1110 mesh_pic_religion_symbol_4 = 1111 mesh_pic_religion_symbol_5 = 1112 mesh_pic_religion_symbol_6 = 1113 mesh_pic_religion_symbol_7 = 1114 mesh_pic_religion_symbol_8 = 1115 mesh_pic_religion_symbol_9 = 1116 mesh_pic_religion_symbol_10 = 1117 mesh_pic_religion_symbol_11 = 1118 mesh_pic_religion_symbol_12 = 1119 mesh_pic_religion_symbol_13 = 1120 mesh_pic_religion_symbol_14 = 1121 mesh_pic_religion_symbol_15 = 1122 mesh_pic_religion_symbol_16 = 1123 mesh_pic_disaster_volcano = 1124 mesh_pic_disaster_earthquake = 1125 mesh_pic_disaster_storm = 1126 mesh_pic_disaster_typhoon = 1127 mesh_pic_disaster_fire = 1128 mesh_pic_disaster_sand = 1129 mesh_pic_disaster_tides = 1130 mesh_pic_disaster_ice = 1131 mesh_pic_disaster_flood = 1132 mesh_flag_div_1 = 1133 mesh_flag_div_2 = 1134 mesh_flag_div_3 = 1135 mesh_flag_div_4 = 1136 mesh_flag_div_5 = 1137 mesh_flag_div_6 = 1138 mesh_flag_div_7 = 1139 mesh_flag_div_8 = 1140 mesh_flag_div_9 = 1141 mesh_pic_battle_tile_2 = 1142 mesh_pic_battle_tile_3 = 1143 mesh_pic_battle_tile_4 = 1144 mesh_pic_battle_tile_5 = 1145 mesh_pic_battle_tile_6 = 1146 mesh_pic_battle_tile_7 = 1147 mesh_pic_battle_tile_8 = 1148 mesh_pic_battle_tile_9 = 1149 mesh_pic_battle_tile_10 = 1150 mesh_pic_battle_tile_11 = 1151 mesh_pic_battle_tile_s1 = 1152 mesh_pic_battle_tile_s2 = 1153 mesh_pic_battle_tile_s3 = 1154 mesh_pic_battle_tile_s4 = 1155 mesh_pic_battle_tile_n1 = 1156 mesh_pic_gameover = 1157 mesh_pic_cla_mercernary = 1158 mesh_pic_cla_merchant = 1159 mesh_pic_cla_adventurer = 1160 mesh_pic_cla_lord = 1161 mesh_pic_cla_bandit = 1162 mesh_pic_cla_pirate = 1163 mesh_pic_ptown_euro = 1164 mesh_pic_ptown_snow = 1165 mesh_pic_ptown_roman = 1166 mesh_pic_ptown_arab = 1167 mesh_pic_ptown_wooden = 1168 mesh_pic_ptown_asia = 1169 mesh_pic_ptown_asia_2 = 1170 mesh_pic_ptown_jap = 1171 mesh_pic_ptown_uurt = 1172 mesh_pic_ptown_teepee = 1173 mesh_pic_meetlady4 = 1174 mesh_pic_battle_formation_backriver = 1175 mesh_pic_battle_formation_sideattack = 1176 mesh_pic_battle_formation_backattack = 1177 mesh_pic_battle_formation_8door = 1178 mesh_pic_battle_formation_encampment = 1179 mesh_pic_battle_formation_lionheart = 1180 mesh_pic_battle_formation_mangudai = 1181 mesh_pic_battle_formation_pincer = 1182 mesh_pic_battle_formation_base = 1183 mesh_OrteliusWorldMap1570 = 1184 mesh_pic_portrait_yoritomo = 1185 mesh_pic_portrait_munemori = 1186 mesh_pic_portrait_xiaozong = 1187 mesh_pic_portrait_shizong = 1188 mesh_pic_portrait_genghiskhan = 1189 mesh_pic_portrait_philip_ii = 1190 mesh_pic_portrait_richard_i = 1191 mesh_pic_portrait_barbarossa = 1192 mesh_pic_portrait_alfonso_viii = 1193 mesh_pic_portrait_yaqub = 1194 mesh_pic_portrait_baldwin = 1195 mesh_pic_portrait_saladin = 1196 mesh_pic_portrait_tekish = 1197 mesh_pic_portrait_ghiyath = 1198 mesh_pic_portrait_akbar = 1199 mesh_pic_portrait_ivan = 1200 mesh_pic_portrait_frederick_ii = 1201 mesh_pic_portrait_maxi = 1202 mesh_pic_portrait_john_iii = 1203 mesh_pic_portrait_selimii = 1204 mesh_pic_portrait_stephen = 1205 mesh_pic_portrait_elizabeth = 1206 mesh_pic_portrait_philip = 1207 mesh_pic_portrait_sebastian = 1208 mesh_pic_portrait_william = 1209 mesh_pic_portrait_wanli = 1210 mesh_pic_portrait_oda = 1211 mesh_town_t_plain = 1212 mesh_town_t_water = 1213 mesh_town_t_hill = 1214 mesh_town_t_desert = 1215 mesh_town_t_snow = 1216 mesh_town_t_mountain = 1217 mesh_town_t_mil = 1218 mesh_town_t_ore = 1219 mesh_town_t_horse = 1220 mesh_town_t_holy = 1221 mesh_town_t_pasture = 1222 mesh_town_t_mine = 1223 mesh_town_t_market = 1224 mesh_town_t_barrack = 1225 mesh_town_t_farm = 1226 mesh_town_t_hall = 1227 mesh_town_t_prison = 1228 mesh_town_t_library = 1229 mesh_town_t_temple = 1230 mesh_town_t_smithy = 1231 mesh_white_plane_upper = 1232 mesh_white_plane_center = 1233 mesh_town_e_onehand = 1234 mesh_town_e_twohand = 1235 mesh_town_e_polearm = 1236 mesh_town_e_bow = 1237 mesh_town_e_crossbow = 1238 mesh_town_e_arquebus = 1239 mesh_town_e_ammo = 1240 mesh_town_e_light = 1241 mesh_town_e_heavy = 1242 mesh_town_e_horse = 1243 mesh_town_e_siege = 1244 mesh_town_e_wood = 1245 mesh_town_e_shipammo = 1246 mesh_town_d_onehand = 1247 mesh_town_d_twohand = 1248 mesh_town_d_polearm = 1249 mesh_town_d_bow = 1250 mesh_town_d_crossbow = 1251 mesh_town_d_arquebus = 1252 mesh_town_d_ammo = 1253 mesh_town_d_light = 1254 mesh_town_d_heavy = 1255 mesh_town_d_horse = 1256 mesh_town_d_siege = 1257 mesh_town_d_wood = 1258 mesh_town_d_shipammo = 1259 mesh_status_troop_ratio_bar = 1260 mesh_status_troop_ratio_bar_button = 1261
25.528063
58
0.843619
a0cc84ea1f11da3af87cb6aff03136b234f94184
30,936
py
Python
q2_longitudinal/_vega.py
thermokarst/q2-longitudinal
1967617214417b7097ce96e4a7dfdfbb5fd17faf
[ "BSD-3-Clause" ]
null
null
null
q2_longitudinal/_vega.py
thermokarst/q2-longitudinal
1967617214417b7097ce96e4a7dfdfbb5fd17faf
[ "BSD-3-Clause" ]
null
null
null
q2_longitudinal/_vega.py
thermokarst/q2-longitudinal
1967617214417b7097ce96e4a7dfdfbb5fd17faf
[ "BSD-3-Clause" ]
null
null
null
# ---------------------------------------------------------------------------- # Copyright (c) 2017-2018, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import json import pandas as pd
33.699346
79
0.230185
a0ce075406a832ed84007060dd79bad299dae4e6
11,696
py
Python
state_workflow_sdk/api/state_workflow/state_workflow_client.py
easyopsapis/easyops-api-python
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
[ "Apache-2.0" ]
5
2019-07-31T04:11:05.000Z
2021-01-07T03:23:20.000Z
state_workflow_sdk/api/state_workflow/state_workflow_client.py
easyopsapis/easyops-api-python
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
[ "Apache-2.0" ]
null
null
null
state_workflow_sdk/api/state_workflow/state_workflow_client.py
easyopsapis/easyops-api-python
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- import os import sys import state_workflow_sdk.api.state_workflow.callback_pb2 import state_workflow_sdk.api.state_workflow.createStateWorkflow_pb2 import state_workflow_sdk.model.state_workflow.stateWorkflow_pb2 import state_workflow_sdk.api.state_workflow.deleteStateWorkflow_pb2 import google.protobuf.empty_pb2 import state_workflow_sdk.api.state_workflow.filterInstanceOfStateWorkflow_pb2 import state_workflow_sdk.api.state_workflow.searchStateWorkflow_pb2 import state_workflow_sdk.api.state_workflow.transitWorkflowStatus_pb2 import state_workflow_sdk.utils.http_util import google.protobuf.json_format
41.183099
254
0.658516
a0ceec8ec85ef44ddb9d9cd56199a36790b171fc
4,171
py
Python
tests/contour_classifiers/test_randomforest.py
yamathcy/motif
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
[ "MIT" ]
21
2016-08-22T22:00:49.000Z
2020-03-29T04:15:19.000Z
tests/contour_classifiers/test_randomforest.py
yamathcy/motif
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
[ "MIT" ]
22
2016-08-28T01:07:08.000Z
2018-02-07T14:38:26.000Z
tests/contour_classifiers/test_randomforest.py
yamathcy/motif
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
[ "MIT" ]
3
2017-01-12T10:04:27.000Z
2022-01-06T13:25:48.000Z
"""Test for motif.classify.mvgaussian """ from __future__ import print_function import unittest import numpy as np from motif.contour_classifiers import random_forest
32.585938
76
0.529369
a0cf8257e1729da63a070f7fb21ed2b3279418e3
7,365
py
Python
awsenv/profile.py
KensoDev/awsenv
4bf759106d2e0d79221d0ca9188ed7686e119b2c
[ "Apache-2.0" ]
6
2016-09-11T08:39:50.000Z
2018-10-22T13:41:34.000Z
awsenv/profile.py
KensoDev/awsenv
4bf759106d2e0d79221d0ca9188ed7686e119b2c
[ "Apache-2.0" ]
1
2017-01-09T23:58:20.000Z
2017-01-09T23:58:20.000Z
awsenv/profile.py
KensoDev/awsenv
4bf759106d2e0d79221d0ca9188ed7686e119b2c
[ "Apache-2.0" ]
5
2017-01-09T23:26:12.000Z
2021-09-08T09:35:59.000Z
""" Profile-aware session wrapper. """ from os import environ from botocore.exceptions import ProfileNotFound from botocore.session import Session from awsenv.cache import CachedSession def get_default_profile_name(): """ Get the default profile name from the environment. """ return environ.get("AWS_DEFAULT_PROFILE", "default") class AWSProfile(AWSSession): """ AWS profile configuration. """ def __init__(self, profile, session_duration, cached_session, account_id=None): """ Configure a session for a profile. :param profile: the name of the profile to use, if any :param session_duration: the duration of the session (in seconds) must be in the range 900-3600 :param cached_session: the cached session to use, if any :param account_id: the account id for profile auto-generation (if any) """ self.session_duration = session_duration self.cached_session = cached_session self.account_id = account_id super(AWSProfile, self).__init__(profile) def to_envvars(self): return { "AWS_ACCESS_KEY_ID": self.access_key_id, "AWS_DEFAULT_REGION": self.region_name, "AWS_PROFILE": self.profile, "AWS_SECRET_ACCESS_KEY": self.secret_access_key, "AWS_SESSION_NAME": self.session_name, "AWS_SESSION_TOKEN": self.session_token, } def update_credentials(self): """ Update the profile's credentials by assuming a role, if necessary. """ if not self.role_arn: return if self.cached_session is not None: # use current role access_key, secret_key = self.current_role() else: # assume role to get a new token access_key, secret_key = self.assume_role() if access_key and secret_key: self.session.set_credentials( access_key=access_key, secret_key=secret_key, token=self.cached_session.token if self.cached_session else None, ) def current_role(self): """ Load credentials for the current role. """ return ( environ.get("AWS_ACCESS_KEY_ID", self.access_key_id), environ.get("AWS_SECRET_ACCESS_KEY", self.secret_access_key), ) def assume_role(self): """ Assume a role. """ # we need to pass in the regions and keys because botocore does not # automatically merge configuration from the source_profile sts_client = self.session.create_client( service_name="sts", region_name=self.region_name, aws_access_key_id=self.access_key_id, aws_secret_access_key=self.secret_access_key, ) session_name = CachedSession.make_name() result = sts_client.assume_role(**{ "RoleArn": self.role_arn, "RoleSessionName": session_name, "DurationSeconds": self.session_duration, }) # update the cached session self.cached_session = CachedSession( name=session_name, token=result["Credentials"]["SessionToken"], profile=self.profile, ) return ( result["Credentials"]["AccessKeyId"], result["Credentials"]["SecretAccessKey"], )
31.075949
91
0.60611
a0d0d288568d1ad31c787944a756b68fdcfc394c
13,358
py
Python
cail/algo/twoiwil.py
Stanford-ILIAD/Confidence-Aware-Imitation-Learning
1d8af0e4ab87a025885133a2384d5a937329b2f5
[ "MIT" ]
16
2021-10-30T15:19:37.000Z
2022-03-23T12:57:49.000Z
cail/algo/twoiwil.py
syzhang092218-source/Confidence-Aware-Imitation-Learning
1d8af0e4ab87a025885133a2384d5a937329b2f5
[ "MIT" ]
null
null
null
cail/algo/twoiwil.py
syzhang092218-source/Confidence-Aware-Imitation-Learning
1d8af0e4ab87a025885133a2384d5a937329b2f5
[ "MIT" ]
2
2021-11-29T11:28:16.000Z
2022-03-06T14:12:47.000Z
import torch import os import torch.nn.functional as F import numpy as np import copy from torch import nn from torch.optim import Adam from torch.autograd import Variable from torch.utils.tensorboard import SummaryWriter from tqdm import tqdm from typing import Tuple from .ppo import PPO, PPOExpert from .utils import CULoss from cail.network import AIRLDiscrim, Classifier from cail.buffer import SerializedBuffer
34.786458
105
0.586166
a0d0f0826bf05af84c68e2d12e3788dc07ebfcd6
7,327
py
Python
data/generation_scripts/MantaFlow/scripts3D/compactifyData.py
tum-pbs/VOLSIM
795a31c813bf072eb88289126d7abd9fba8b0e54
[ "MIT" ]
7
2022-01-28T09:40:15.000Z
2022-03-07T01:52:00.000Z
data/generation_scripts/MantaFlow/scripts3D/compactifyData.py
tum-pbs/VOLSIM
795a31c813bf072eb88289126d7abd9fba8b0e54
[ "MIT" ]
null
null
null
data/generation_scripts/MantaFlow/scripts3D/compactifyData.py
tum-pbs/VOLSIM
795a31c813bf072eb88289126d7abd9fba8b0e54
[ "MIT" ]
1
2022-03-14T22:08:47.000Z
2022-03-14T22:08:47.000Z
import numpy as np import os, shutil import imageio baseDir = "data/train_verbose" outDir = "data/train" #baseDir = "data/test_verbose" #outDir = "data/test" outDirVidCopy = "data/videos" combineVidsAll = {"smoke" : ["densMean", "densSlice", "velMean", "velSlice", "presMean", "presSlice"], "liquid": ["flagsMean", "flagsSlice", "velMean", "velSlice", "phiMean", "phiSlice"] } convertData = True processVid = True copyVidOnly = False ignoreTop = ["shapes", "waves"] ignoreSim = [] ignoreFrameDict = {} excludeIgnoreFrame = False topDirs = os.listdir(baseDir) topDirs.sort() #shutil.rmtree(outDir) #os.makedirs(outDir) # top level folders for topDir in topDirs: mantaMsg("\n" + topDir) if ignoreTop and any( item in topDir for item in ignoreTop ) : mantaMsg("Ignored") continue simDir = os.path.join(baseDir, topDir) sims = os.listdir(simDir) sims.sort() # sim_000000 folders for sim in sims: if ignoreSim and any( item in sim for item in ignoreSim ) : mantaMsg(sim + " - Ignored") continue currentDir = os.path.join(simDir, sim) files = os.listdir(currentDir) files.sort() destDir = os.path.join(outDir, topDir, sim) #if os.path.isdir(destDir): # shutil.rmtree(destDir) if not os.path.isdir(destDir): os.makedirs(destDir) # single files for file in files: filePath = os.path.join(currentDir, file) # copy src folder to destination if os.path.isdir(filePath) and file == "src": dest = os.path.join(destDir, "src") if not os.path.isdir(dest): shutil.copytree(filePath, dest, symlinks=False) # combine video files elif os.path.isdir(filePath) and file == "render": if not processVid: continue dest = os.path.join(destDir, "render") if copyVidOnly: shutil.copytree(filePath, dest, symlinks=False) continue if not os.path.isdir(dest): os.makedirs(dest) #mantaMsg(file) renderDir = os.path.join(currentDir, "render") vidFiles = os.listdir(renderDir) if "smoke" in topDir: combineVids = combineVidsAll["smoke"] elif "liquid" in topDir: combineVids = combineVidsAll["liquid"] else: combineVids = [""] for vidFile in vidFiles: if combineVids[0] + "00.mp4" not in vidFile: continue vidLine = [] for combineVid in combineVids: # find all video part files corresponding to current one vidParts = [] i = 0 while os.path.exists(os.path.join(renderDir, vidFile.replace(combineVids[0]+"00.mp4", combineVid+"%02d.mp4" % i))): vidParts.append(vidFile.replace(combineVids[0]+"00.mp4", combineVid+"%02d.mp4" % i)) i += 1 assert len(vidParts) == 11 # combine each video part file loadedVids = [] for part in vidParts: currentFile = os.path.join(renderDir, part) loaded = imageio.mimread(currentFile) #mantaMsg(len(loaded)) #mantaMsg(loaded[0].shape) loadedVids.append(loaded) #temp1 = np.concatenate(loadedVids[0:4], axis=2) #temp2 = np.concatenate(loadedVids[4:8], axis=2) #temp3 = np.concatenate(loadedVids[8:11]+[np.zeros_like(loadedVids[0])], axis=2) #vidLine.append(np.concatenate([temp1, temp2, temp3], axis=1)) vidLine.append(np.concatenate(loadedVids, axis=2)) combined = np.concatenate(vidLine, axis=1) # save combined file if combineVids[0] == "": newName = os.path.join(dest, "%s_%s_%s.mp4" % (topDir, sim, vidFile.replace("00.mp4", ".mp4"))) else: newName = os.path.join(dest, "%s_%s.mp4" % (topDir, sim)) imageio.mimwrite(newName, combined, quality=6, fps=11, ffmpeg_log_level="error") # save copy if combineVids[0] == "": newNameCopy = os.path.join(outDirVidCopy, "%s_%s_%s.mp4" % (topDir, sim, vidFile.replace("00.mp4", ".mp4"))) else: newNameCopy = os.path.join(outDirVidCopy, "%s_%s.mp4" % (topDir, sim)) imageio.mimwrite(newNameCopy, combined, quality=6, fps=11, ffmpeg_log_level="error") # copy description files to destination elif os.path.splitext(filePath)[1] == ".json" or os.path.splitext(filePath)[1] == ".py" or os.path.splitext(filePath)[1] == ".log": shutil.copy(filePath, destDir) # ignore other dirs and non .npz files elif os.path.isdir(filePath) or os.path.splitext(filePath)[1] != ".npz" or "part00" not in file: continue # combine part files else: if not convertData: continue if ignoreFrameDict: filterFrames = [] for key, value in ignoreFrameDict.items(): if key in topDir: filterFrames = value break assert (filterFrames != []), "Keys in filterFrameDict don't match dataDir structure!" # continue for frames when excluding or including according to filter if excludeIgnoreFrame == any( item in file for item in filterFrames ): continue # find all part files corresponding to current one parts = [file] i = 1 while os.path.exists(os.path.join(currentDir, file.replace("part00", "part%02d" % i))): parts.append(file.replace("part00", "part%02d" % i)) i += 1 assert len(parts) == 11 # combine each part file domain = np.load(os.path.join(currentDir, parts[0]))['arr_0'] res = domain.shape[0] combined = np.zeros([len(parts), res, res, res, domain.shape[3]]) for f in range(len(parts)): currentFile = os.path.join(currentDir, parts[f]) loaded = np.load(currentFile)['arr_0'] combined[f] = loaded # save combined file newName = file.replace("_part00", "") np.savez_compressed( os.path.join(destDir, newName), combined ) loaded = np.load( os.path.join(destDir, newName) )['arr_0'] mantaMsg(os.path.join(sim, newName) + "\t" + str(loaded.shape))
43.613095
153
0.512079
a0d159678318f4de46108d8e3c19f4a355d8744f
14,238
py
Python
qiskit/aqua/operators/base_operator.py
Sahar2/qiskit-aqua
a228fbe6b9613cff43e47796a7e4843deba2b051
[ "Apache-2.0" ]
null
null
null
qiskit/aqua/operators/base_operator.py
Sahar2/qiskit-aqua
a228fbe6b9613cff43e47796a7e4843deba2b051
[ "Apache-2.0" ]
null
null
null
qiskit/aqua/operators/base_operator.py
Sahar2/qiskit-aqua
a228fbe6b9613cff43e47796a7e4843deba2b051
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # This code is part of Qiskit. # # (C) Copyright IBM 2019. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. from abc import ABC, abstractmethod import warnings from qiskit import QuantumCircuit def _to_dia_matrix(self, mode=None): warnings.warn("_to_dia_matrix method is removed, use the `MatrixOperator` class to get diagonal matrix. And " "the current deprecated method does NOT modify the original object, it returns the dia_matrix", DeprecationWarning) from .op_converter import to_matrix_operator mat_op = to_matrix_operator(self) return mat_op.dia_matrix def enable_summarize_circuits(self): warnings.warn("enable_summarize_circuits method is removed. Enable the summary at QuantumInstance", DeprecationWarning) def disable_summarize_circuits(self): warnings.warn("disable_summarize_circuits method is removed. Disable the summary at QuantumInstance", DeprecationWarning) def find_Z2_symmetries(self): warnings.warn("The `find_Z2_symmetries` method is deprecated and it will be removed after 0.6, " "Use the class method in the `Z2Symmetries` class instead", DeprecationWarning) from .weighted_pauli_operator import Z2Symmetries from .op_converter import to_weighted_pauli_operator wp_op = to_weighted_pauli_operator(self) self._z2_symmetries = Z2Symmetries.find_Z2_symmetries(wp_op) return self._z2_symmetries.symmetries, self._z2_symmetries.sq_paulis, \ self._z2_symmetries.cliffords, self._z2_symmetries.sq_list def to_grouped_paulis(self): warnings.warn("to_grouped_paulis method is deprecated and it will be removed after 0.6. And the current " "deprecated method does NOT modify the original object, it returns the grouped weighted pauli " "operator. Please check the qiskit.aqua.operators.op_convertor for converting to different " "types of operators. For grouping paulis, you can create your own grouping func to create the " "class you need.", DeprecationWarning) from .op_converter import to_tpb_grouped_weighted_pauli_operator from .tpb_grouped_weighted_pauli_operator import TPBGroupedWeightedPauliOperator return to_tpb_grouped_weighted_pauli_operator(self, grouping_func=TPBGroupedWeightedPauliOperator.sorted_grouping) def to_paulis(self): warnings.warn("to_paulis method is deprecated and it will be removed after 0.6. And the current deprecated " "method does NOT modify the original object, it returns the weighted pauli operator." "Please check the qiskit.aqua.operators.op_convertor for converting to different types of " "operators", DeprecationWarning) from .op_converter import to_weighted_pauli_operator return to_weighted_pauli_operator(self) def to_matrix(self): warnings.warn("to_matrix method is deprecated and it will be removed after 0.6. And the current deprecated " "method does NOT modify the original object, it returns the matrix operator." "Please check the qiskit.aqua.operators.op_convertor for converting to different types of " "operators", DeprecationWarning) from .op_converter import to_matrix_operator return to_matrix_operator(self) def to_weighted_pauli_operator(self): warnings.warn("to_weighted_apuli_operator method is temporary helper method and it will be removed after 0.6. " "Please check the qiskit.aqua.operators.op_convertor for converting to different types of " "operators", DeprecationWarning) from .op_converter import to_weighted_pauli_operator return to_weighted_pauli_operator(self) def to_matrix_operator(self): warnings.warn("to_matrix_operator method is temporary helper method and it will be removed after 0.6. " "Please check the qiskit.aqua.operators.op_convertor for converting to different types of " "operators", DeprecationWarning) from .op_converter import to_matrix_operator return to_matrix_operator(self) def to_tpb_grouped_weighted_pauli_operator(self): warnings.warn("to_tpb_grouped_weighted_pauli_operator method is temporary helper method and it will be " "removed after 0.6. Please check the qiskit.aqua.operators.op_convertor for converting to " "different types of operators", DeprecationWarning) from .op_converter import to_tpb_grouped_weighted_pauli_operator from .tpb_grouped_weighted_pauli_operator import TPBGroupedWeightedPauliOperator return to_tpb_grouped_weighted_pauli_operator( self, grouping_func=TPBGroupedWeightedPauliOperator.sorted_grouping)
44.633229
122
0.666877
a0d37d7e9574c755f53a5c193de3f30cb81ee61a
4,447
py
Python
DataAnalysis/utils.py
Timlo512/AnomalyStockDetection
29f9aaef14f1d9823980d8022cdce1f7f6310813
[ "MIT" ]
2
2020-12-19T05:24:29.000Z
2021-05-15T19:35:40.000Z
DataAnalysis/utils.py
Timlo512/AnomalyStockDetection
29f9aaef14f1d9823980d8022cdce1f7f6310813
[ "MIT" ]
null
null
null
DataAnalysis/utils.py
Timlo512/AnomalyStockDetection
29f9aaef14f1d9823980d8022cdce1f7f6310813
[ "MIT" ]
5
2020-11-21T02:25:13.000Z
2022-01-31T12:46:02.000Z
import pandas as pd import numpy as np from sklearn.metrics import confusion_matrix import re def convert_data_sparse_matrix(df, row_label = 'stock_code', col_label = 'name_of_ccass_participant', value_label = 'shareholding'): """ Pivot table """ try: # Prepare zero matrix row_dim = len(df[row_label].unique()) col_dim = len(df[col_label].unique()) sparse_matrix = np.zeros((row_dim, col_dim)) # Prepare label to index dictionaries row_ind_dict = {label: ind for ind, label in enumerate(sorted(df[row_label].unique().tolist()))} col_ind_dict = {label: ind for ind, label in enumerate(sorted(df[col_label].unique().tolist()))} # Transform row_label column and col_label column to index df['row_ind'] = df[row_label].apply(lambda x: row_ind_dict[x]) df['col_ind'] = df[col_label].apply(lambda x: col_ind_dict[x]) for ind, row in df.iterrows(): # Get index and shareholding row_ind = row['row_ind'] col_ind = row['col_ind'] value = row[value_label] # Assign to sparse matrix sparse_matrix[row_ind, col_ind] += value return sparse_matrix, row_ind_dict, col_ind_dict except Exception as e: print(e) return None def cluster_predict(label, min_pts = 'auto'): """ Input: an array of clsutered label for each instance return: an array of anomal label for each instance """ try: # Get Unqiue label and its counts (unique, counts) = np.unique(label, return_counts = True) # Define minimum points that it should have in a cluster, if auto, it will take the min count if min_pts == 'auto': min_pts = min(counts) print('Minimum points of a cluster among the clusters: ', min_pts) else: min_pts = int(min_pts) # Prepare label_dict for mapping label_dict = {label: 0 if count > min_pts else 1 for label, count in zip(unique, counts)} # Map label_dict to label return np.array([label_dict[i] for i in label]) except Exception as e: print(e) return None
32.698529
132
0.614796
a0d5155e320c1b2b6704a06d42d9b58088cb485b
1,429
py
Python
scripts/prepare_upload_files.py
MaayanLab/scAVI
7f3f83657d749520243535581db1080075e48aa5
[ "Apache-2.0" ]
3
2020-01-23T08:48:33.000Z
2021-07-21T02:42:28.000Z
scripts/prepare_upload_files.py
MaayanLab/scAVI
7f3f83657d749520243535581db1080075e48aa5
[ "Apache-2.0" ]
21
2019-10-25T15:38:37.000Z
2022-01-27T16:04:04.000Z
scripts/prepare_upload_files.py
MaayanLab/scAVI
7f3f83657d749520243535581db1080075e48aa5
[ "Apache-2.0" ]
1
2019-10-24T18:15:26.000Z
2019-10-24T18:15:26.000Z
''' Prepare some files to test the upload functionality. ''' import sys sys.path.append('../') from database import * from pymongo import MongoClient mongo = MongoClient(MONGOURI) db = mongo['SCV'] coll = db['dataset'] from gene_expression import * expr_df, meta_doc = load_read_counts_and_meta(organism='mouse', gse='GSE96870') # rename the samples expr_df.columns = ['sample_%d' % i for i in range(len(expr_df.columns))] meta_df = pd.DataFrame(meta_doc['meta_df']) meta_df.index = expr_df.columns meta_df.index.name = 'sample_ID' # parse the meta_df a bit meta_df['Sample_characteristics_ch1'] = meta_df['Sample_characteristics_ch1'].map(lambda x:x.split('\t')) keys_from_char_ch1 = [item.split(': ')[0] for item in meta_df['Sample_characteristics_ch1'][0]] for i, key in enumerate(keys_from_char_ch1): meta_df[key] = meta_df['Sample_characteristics_ch1'].map(lambda x:x[i].split(': ')[1]) # drop unnecessary columns in meta_df meta_df = meta_df.drop(['Sample_characteristics_ch1', 'Sample_relation', 'Sample_geo_accession', 'Sample_supplementary_file_1'], axis=1) # fake a column of continuous values meta_df['random_continuous_attr'] = np.random.randn(meta_df.shape[0]) meta_df.to_csv('../data/sample_metadata.csv') # raw read counts expr_df.to_csv('../data/sample_read_counts_%dx%d.csv' % expr_df.shape) # CPMs expr_df = compute_CPMs(expr_df) expr_df.to_csv('../data/sample_CPMs_%dx%d.csv' % expr_df.shape)
30.404255
105
0.751575
a0d646ba03a4465fe2514a5e2b0f73386fb45c4c
2,321
py
Python
app/api/V1/views/products.py
Paulvitalis200/Store-Manager-API
d61e91bff7fc242da2a93d1caf1012465c7c904a
[ "MIT" ]
null
null
null
app/api/V1/views/products.py
Paulvitalis200/Store-Manager-API
d61e91bff7fc242da2a93d1caf1012465c7c904a
[ "MIT" ]
4
2018-10-21T18:28:03.000Z
2018-10-24T12:48:24.000Z
app/api/V1/views/products.py
Paulstar200/Store-Manager-API
d61e91bff7fc242da2a93d1caf1012465c7c904a
[ "MIT" ]
null
null
null
from flask import Flask, request from flask_restful import Resource, reqparse from flask_jwt_extended import create_access_token, jwt_required from app.api.V1.models import Product, products # Get a single specific product
35.166667
111
0.616545
a0d68497a4530b9b9bb8366ff9da7d608dd9a751
1,155
py
Python
51-100/p87.py
YiWeiShen/Project-Euler-Hints
a79cacab075dd98d393516f083aaa7ffc6115a06
[ "MIT" ]
1
2019-02-25T13:00:31.000Z
2019-02-25T13:00:31.000Z
51-100/p87.py
YiWeiShen/Project-Euler-Hints
a79cacab075dd98d393516f083aaa7ffc6115a06
[ "MIT" ]
null
null
null
51-100/p87.py
YiWeiShen/Project-Euler-Hints
a79cacab075dd98d393516f083aaa7ffc6115a06
[ "MIT" ]
null
null
null
import time from multiprocessing.pool import Pool if __name__ == '__main__': t = time.time() p1 = Pool(processes=30) p2 = Pool(processes=30) p3 = Pool(processes=30) num1 = range(2, 7072) num2 = range(2, 369) num3 = range(2, 85) prime_list1 = p1.map(is_prime, num1) p1.close() p1.join() prime_list2 = p2.map(is_prime, num2) p2.close() p2.join() prime_list3 = p3.map(is_prime, num3) p3.close() p3.join() prime_list1_clear = [x for x in prime_list1 if x is not None] prime_list2_clear = [x for x in prime_list2 if x is not None] prime_list3_clear = [x for x in prime_list3 if x is not None] result_list = [] for i in prime_list1_clear: print(i) for j in prime_list2_clear: for k in prime_list3_clear: test_num = i**2 + j**3 + k**4 if test_num < 50000000: result_list.append(test_num) print(str(len(list(set(result_list))))) print('time:'+str(time.time()-t))
26.860465
65
0.587013
a0d6b47a07ed18120ebb9b10352d658a22a11ecb
267
py
Python
Clean Word/index.py
Sudani-Coder/python
9c35f04a0521789ba91b7058695139ed074f7796
[ "MIT" ]
null
null
null
Clean Word/index.py
Sudani-Coder/python
9c35f04a0521789ba91b7058695139ed074f7796
[ "MIT" ]
null
null
null
Clean Word/index.py
Sudani-Coder/python
9c35f04a0521789ba91b7058695139ed074f7796
[ "MIT" ]
null
null
null
# recursion function (Clean Word) print(CleanWord("wwwooooorrrrllddd"))
19.071429
44
0.58427
a0d7aa3f87b3b51ae56654591cba7faff73f9f8f
665
py
Python
commands/rotatecamera.py
1757WestwoodRobotics/mentorbot
3db344f3b35c820ada4e1aef3eca9b1fc4c5b85a
[ "MIT" ]
2
2021-11-13T20:18:44.000Z
2021-11-13T20:27:04.000Z
commands/rotatecamera.py
1757WestwoodRobotics/mentorbot
3db344f3b35c820ada4e1aef3eca9b1fc4c5b85a
[ "MIT" ]
null
null
null
commands/rotatecamera.py
1757WestwoodRobotics/mentorbot
3db344f3b35c820ada4e1aef3eca9b1fc4c5b85a
[ "MIT" ]
1
2021-11-14T01:38:53.000Z
2021-11-14T01:38:53.000Z
import typing from commands2 import CommandBase from subsystems.cameracontroller import CameraSubsystem
28.913043
70
0.667669
a0d85ead79155e87bca877ab2df552ddd4292930
8,188
py
Python
instapp/views.py
uwamahororachel/instagram
d5b7127e62047287dfadec15743676df48f278a9
[ "MIT" ]
null
null
null
instapp/views.py
uwamahororachel/instagram
d5b7127e62047287dfadec15743676df48f278a9
[ "MIT" ]
null
null
null
instapp/views.py
uwamahororachel/instagram
d5b7127e62047287dfadec15743676df48f278a9
[ "MIT" ]
null
null
null
from django.shortcuts import render,redirect from django.http import HttpResponse, Http404,HttpResponseRedirect import datetime as dt from .models import Post,Comment,Follow,Profile from django.contrib.auth.decorators import login_required from .forms import NewPostForm, NewCommentForm, AddProfileForm from django.contrib.auth.models import User def delete_post(request,post_id): post= Post.objects.get(pk=post_id) post.delete_post() return redirect('my_profile') return render(request, 'my_profile')
36.882883
151
0.626282
a0d898d83393f9e2a6f4299d21f948ceddccd556
238
py
Python
2008/wxpytris/wxpytris.py
mikiec84/code-for-blog
79b2264f9a808eb14f624cb3c5ae7624038c043a
[ "Unlicense" ]
1,199
2015-01-06T14:09:37.000Z
2022-03-29T19:39:51.000Z
2008/wxpytris/wxpytris.py
mikiec84/code-for-blog
79b2264f9a808eb14f624cb3c5ae7624038c043a
[ "Unlicense" ]
25
2016-07-29T15:44:01.000Z
2021-11-19T16:21:01.000Z
2008/wxpytris/wxpytris.py
mikiec84/code-for-blog
79b2264f9a808eb14f624cb3c5ae7624038c043a
[ "Unlicense" ]
912
2015-01-04T00:39:50.000Z
2022-03-29T06:50:22.000Z
import sys import wx sys.path.insert(0, 'lib.zip') from lib.TetrisGame import TetrisGame if __name__ == '__main__': app = wx.PySimpleApp() frame = TetrisGame(None) frame.Show(True) app.MainLoop()
11.9
38
0.617647
a0d89d58810bc392058c43540e5719fda8ed9934
6,822
py
Python
cfg.py
alexandonian/relational-set-abstraction
8af6a6a58883ce59c7b29e4161ff970e3bded642
[ "MIT" ]
9
2020-09-17T23:09:42.000Z
2021-12-29T09:56:24.000Z
cfg.py
alexandonian/relational-set-abstraction
8af6a6a58883ce59c7b29e4161ff970e3bded642
[ "MIT" ]
null
null
null
cfg.py
alexandonian/relational-set-abstraction
8af6a6a58883ce59c7b29e4161ff970e3bded642
[ "MIT" ]
1
2021-01-16T07:19:42.000Z
2021-01-16T07:19:42.000Z
import argparse import torch import logger import models import utils NUM_NODES = { 'moments': 391, 'multimoments': 391, 'kinetics': 608, } CRITERIONS = { 'CE': {'func': torch.nn.CrossEntropyLoss}, 'MSE': {'func': torch.nn.MSELoss}, 'BCE': {'func': torch.nn.BCEWithLogitsLoss}, } OPTIMIZERS = { 'SGD': { 'func': torch.optim.SGD, 'lr': 0.001, 'momentum': 0.9, 'weight_decay': 5e-4, }, 'Adam': {'func': torch.optim.Adam, 'weight_decay': 5e-4}, } SCHEDULER_DEFAULTS = {'CosineAnnealingLR': {'T_max': 100}} METAFILE_FILE = { 'moments': { 'train': 'metadata/moments_train_abstraction_sets.json', 'val': 'metadata/moments_val_abstraction_sets.json', }, 'kinetics': { 'train': 'metadata/kinetics_train_abstraction_sets.json', 'val': 'metadata/kinetics_val_abstraction_sets.json', }, } FEATURES_FILE = { 'moments': { 'train': 'metadata/resnet3d50_moments_train_features.pth', 'val': 'metadata/resnet3d50_moments_val_features.pth', 'test': 'metadata/resnet3d50_moments_test_features.pth', }, 'kinetics': { 'train': 'metadata/resnet3d50_kinetics_train_features.pth', 'val': 'metadata/resnet3d50_kinetics_val_features.pth', 'test': 'metadata/resnet3d50_kinetics_test_features.pth', }, } EMBEDDING_FILE = { 'moments': { 'train': 'metadata/moments_train_embeddings.pth', 'val': 'metadata/moments_val_embeddings.pth', }, 'kinetics': { 'train': 'metadata/kinetics_train_embeddings.pth', 'val': 'metadata/kinetics_val_embeddings.pth', 'test': 'metadata/kinetics_test_embeddings.pth', }, } EMBEDDING_CATEGORIES_FILE = { 'moments': 'metadata/moments_category_embeddings.pth', 'kinetics': 'metadata/kinetics_category_embeddings.pth', } LIST_FILE = { 'moments': { 'train': 'metadata/moments_train_listfile.txt', 'val': 'metadata/moments_val_listfile.txt', 'test': 'metadata/moments_test_listfile.txt', }, 'kinetics': { 'train': 'metadata/kinetics_train_listfile.txt', 'val': 'metadata/kinetics_val_listfile.txt', 'test': 'metadata/kinetics_test_listfile.txt', }, } RANKING_FILE = { 'moments': 'metadata/moments_human_abstraction_sets.json', 'kinetics': 'metadata/kinetics_human_abstraction_sets.json', } GRAPH_FILE = { 'moments': 'metadata/moments_graph.json', 'kinetics': 'metadata/kinetics_graph.json', }
32.956522
86
0.650836
a0dac9d01fbc63e4052a6ea761aeaa779debac1b
2,021
py
Python
Spider/SpiderLab/lab3/lab3/spiders/spider_msg.py
JimouChen/python-application
b7b16506a17e2c304d1c5fabd6385e96be211c56
[ "Apache-2.0" ]
1
2020-08-09T12:47:27.000Z
2020-08-09T12:47:27.000Z
Spider/SpiderLab/lab3/lab3/spiders/spider_msg.py
JimouChen/Python_Application
b7b16506a17e2c304d1c5fabd6385e96be211c56
[ "Apache-2.0" ]
null
null
null
Spider/SpiderLab/lab3/lab3/spiders/spider_msg.py
JimouChen/Python_Application
b7b16506a17e2c304d1c5fabd6385e96be211c56
[ "Apache-2.0" ]
null
null
null
import scrapy from bs4 import BeautifulSoup from lab3.items import Lab3Item
40.42
102
0.568036
a0db51a733ae0c8c54da89e34dba10cbd38f7150
1,236
py
Python
Aditya/Parametric_Models/WeiExpLog.py
cipheraxat/Survival-Analysis
fb7ecbe4a61fc72785a4327c86e0f81a58c5b3df
[ "Apache-2.0" ]
7
2020-06-14T20:43:55.000Z
2020-06-23T06:07:08.000Z
Aditya/Parametric_Models/WeiExpLog.py
Abhijit2505/Survival-Analysis
94c0c386aacfe03a9f2f018511236292f36c4ed9
[ "Apache-2.0" ]
14
2020-06-20T06:28:50.000Z
2020-09-08T15:54:29.000Z
Aditya/Parametric_Models/WeiExpLog.py
Abhijit2505/Survival-Analysis
94c0c386aacfe03a9f2f018511236292f36c4ed9
[ "Apache-2.0" ]
9
2020-06-19T03:50:21.000Z
2021-05-10T18:19:26.000Z
import matplotlib.pyplot as plt from lifelines import (WeibullFitter, ExponentialFitter, LogNormalFitter, LogLogisticFitter) import pandas as pd data = pd.read_csv('Dataset/telco_customer.csv') data['tenure'] = pd.to_numeric(data['tenure']) data = data[data['tenure'] > 0] # Replace yes and No in the Churn column to 1 and 0. 1 for the event and 0 for the censured data. data['Churn'] = data['Churn'].apply(lambda x: 1 if x == 'Yes' else 0) fig, axes = plt.subplots(2, 2, figsize=( 16, 12)) T = data['tenure'] E = data['Churn'] wbf = WeibullFitter().fit(T, E, label='WeibullFitter') ef = ExponentialFitter().fit(T, E, label='ExponentialFitter') lnf = LogNormalFitter().fit(T, E, label='LogNormalFitter') llf = LogLogisticFitter().fit(T, E, label='LogLogisticFitter') wbf.plot_cumulative_hazard(ax=axes[0][0]) ef.plot_cumulative_hazard(ax=axes[0][1]) lnf.plot_cumulative_hazard(ax=axes[1][0]) llf.plot_cumulative_hazard(ax=axes[1][1]) plt.suptitle( 'Parametric Model Implementation of cumulative hazard function on the Telco dataset') fig.text(0.5, 0.04, 'Timeline', ha='center') fig.text(0.04, 0.5, 'Probability', va='center', rotation='vertical') plt.savefig('Images/WeiExpLogx.jpeg') plt.show()
34.333333
97
0.711974
a0de95c4112c071280835a86de6b15a92fec2e83
2,260
py
Python
spoteno/steps/numbers.py
Z-80/spoteno
5d2ae7da437cfd8f9cf351b9602269c115dcd46f
[ "MIT" ]
2
2020-01-16T10:23:05.000Z
2021-11-17T15:44:29.000Z
spoteno/steps/numbers.py
Z-80/spoteno
5d2ae7da437cfd8f9cf351b9602269c115dcd46f
[ "MIT" ]
null
null
null
spoteno/steps/numbers.py
Z-80/spoteno
5d2ae7da437cfd8f9cf351b9602269c115dcd46f
[ "MIT" ]
2
2021-03-25T12:06:36.000Z
2021-11-17T15:44:30.000Z
import re import num2words INT_PATTERN = re.compile(r'^-?[0-9]+$') FLOAT_PATTERN = re.compile(r'^-?[0-9]+[,\.][0-9]+$') ORDINAL_PATTERN = re.compile(r'^[0-9]+\.?$') NUM_PATTERN = re.compile(r'^-?[0-9]+([,\.][0-9]+$)?')
23.541667
61
0.511504
a0e1d41f3732cef98c2895b100facec425069d9c
4,252
py
Python
src/django_website/django_website/tests/test_views.py
jdheinz/project-ordo_ab_chao
4063f93b297bab43cff6ca64fa5ba103f0c75158
[ "MIT" ]
2
2019-09-23T18:42:32.000Z
2019-09-27T00:33:38.000Z
src/django_website/django_website/tests/test_views.py
jdheinz/project-ordo_ab_chao
4063f93b297bab43cff6ca64fa5ba103f0c75158
[ "MIT" ]
6
2021-03-19T03:25:33.000Z
2022-02-10T08:48:14.000Z
src/django_website/django_website/tests/test_views.py
jdheinz/project-ordo_ab_chao
4063f93b297bab43cff6ca64fa5ba103f0c75158
[ "MIT" ]
6
2019-09-23T18:53:41.000Z
2020-02-06T00:20:06.000Z
from django.test import TransactionTestCase from django.test import TestCase from django.urls import reverse from home_page.models import Search from ebaysdk.finding import Connection as finding
38.654545
107
0.670508
a0e28476be0fa65ebedd554ed275a8386f751e73
869
py
Python
tests/string/generate_string.py
om719/Bloom-Filter-CPP
8093448b3ea357831b6de25aee9e0e7271b762fa
[ "MIT" ]
3
2021-05-31T18:41:34.000Z
2021-06-01T04:44:15.000Z
tests/string/generate_string.py
om719/Bloom-Filter-CPP
8093448b3ea357831b6de25aee9e0e7271b762fa
[ "MIT" ]
null
null
null
tests/string/generate_string.py
om719/Bloom-Filter-CPP
8093448b3ea357831b6de25aee9e0e7271b762fa
[ "MIT" ]
2
2021-05-31T18:41:48.000Z
2021-05-31T18:47:14.000Z
from key_generator.key_generator import generate all_sizes_required = [(100, '100'), (500, '500'), (1000, '1K'), (5000, '5K'), (10000, '10K'), (50000, '50K'), (100000, '100K'), (500000, '500K')] for file_size in all_sizes_required: OUTPUT_PATH = "./string_test_" + file_size[1] + ".txt" STRING_COUNT = file_size[0] output_file = open(OUTPUT_PATH, "w") for i in range(STRING_COUNT): string = "" recipient = generate( num_of_atom = 1, type_of_value = "hex", capital = "mix", extras = ["-", "_"], seed = i ).get_key() domain = generate( num_of_atom = 2, separator = ".", min_atom_len = 3, max_atom_len = 5, type_of_value = "hex", capital = "mix", extras = ["-"], seed = i ).get_key() string = recipient + "@" + domain output_file.write(string + "\n") output_file.close() print("Done with " + OUTPUT_PATH)
22.868421
145
0.611047
a0e444f5e01631d54753ab517309246502cc9089
4,950
py
Python
resources/portfolio_book.py
basgir/bibliotek
42456ced804a2c9570227b393de662847283c76f
[ "MIT" ]
null
null
null
resources/portfolio_book.py
basgir/bibliotek
42456ced804a2c9570227b393de662847283c76f
[ "MIT" ]
null
null
null
resources/portfolio_book.py
basgir/bibliotek
42456ced804a2c9570227b393de662847283c76f
[ "MIT" ]
null
null
null
########################################### # Author : Bastien Girardet, Deborah De Wolff # Date : 13.05.2018 # Course : Applications in Object-oriented Programming and Databases # Teachers : Binswanger Johannes, Zrcher Ruben # Project : Bibliotek # Name : portfolio_book.py Portfolio_book Flask_restful resource # ######################################### from flask_restful import Resource, reqparse from models.portfolio_book import PortfolioBookModel from models.book import BookModel
40.57377
149
0.625051
a0e4dae891748b8a01307ae7aac7bc7715d4cc4e
9,199
py
Python
examples/the-feeling-of-success/run_experiments.py
yujialuo/erdos
7a631b55895f1a473b0f4d38a0d6053851e65b5d
[ "Apache-2.0" ]
null
null
null
examples/the-feeling-of-success/run_experiments.py
yujialuo/erdos
7a631b55895f1a473b0f4d38a0d6053851e65b5d
[ "Apache-2.0" ]
null
null
null
examples/the-feeling-of-success/run_experiments.py
yujialuo/erdos
7a631b55895f1a473b0f4d38a0d6053851e65b5d
[ "Apache-2.0" ]
null
null
null
import logging from absl import app from sensor_msgs.msg import Image from insert_table_op import InsertTableOperator from insert_block_op import InsertBlockOperator from init_robot_op import InitRobotOperator from gel_sight_op import GelSightOperator from mock_loc_obj_op import MockLocateObjectOperator from goto_xyz_op import GoToXYZOperator from move_above_object_op import MoveAboveObjectOperator from mock_gripper_op import MockGripperOperator from mock_grasp_object_op import MockGraspObjectOperator from raise_object_op import RaiseObjectOperator from mock_predict_grip_op import MockPredictGripOperator from random_position_op import RandomPositionOperator from mock_ungrasp_object_op import MockUngraspObjectOperator import erdos.graph from erdos.ros.ros_subscriber_op import ROSSubscriberOp logger = logging.getLogger(__name__) table_init_arguments = {"_x": 0.75, "_y": 0.0, "_z": 0.0, "ref_frame": "world"} block_init_arguments = { "_x": 0.4225, "_y": 0.1265, "_z": 0.7725, "ref_frame": "world" } robot_init_arguments = { "joint_angles": { 'right_j0': -0.041662954890248294, 'right_j1': -1.0258291091425074, 'right_j2': 0.0293680414401436, 'right_j3': 2.17518162913313, 'right_j4': -0.06703022873354225, 'right_j5': 0.3968371433926965, 'right_j6': 1.7659649178699421 }, "limb_name": "right" } if __name__ == "__main__": app.run(main)
35.245211
79
0.655941
a0e5feb7c20a84c78be8423f81add0bb2c5c4589
2,686
py
Python
junction/tickets/migrations/0001_initial.py
theSage21/junction
ac713edcf56c41eb3f066da776a0a5d24e55b46a
[ "MIT" ]
192
2015-01-12T06:21:24.000Z
2022-03-10T09:57:37.000Z
junction/tickets/migrations/0001_initial.py
theSage21/junction
ac713edcf56c41eb3f066da776a0a5d24e55b46a
[ "MIT" ]
621
2015-01-01T09:19:17.000Z
2021-05-28T09:27:35.000Z
junction/tickets/migrations/0001_initial.py
theSage21/junction
ac713edcf56c41eb3f066da776a0a5d24e55b46a
[ "MIT" ]
207
2015-01-05T16:39:06.000Z
2022-02-15T13:18:15.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals import jsonfield.fields from django.conf import settings from django.db import migrations, models
35.813333
87
0.44341
a0e63766143621d523ba6066faa521d14ec9c390
1,300
py
Python
src/bin/calc_stats.py
sw005320/PytorchWaveNetVocoder
b92d7af7d5f2794291e0d462694c0719f75ca469
[ "Apache-2.0" ]
1
2021-01-18T06:22:30.000Z
2021-01-18T06:22:30.000Z
src/bin/calc_stats.py
sw005320/PytorchWaveNetVocoder
b92d7af7d5f2794291e0d462694c0719f75ca469
[ "Apache-2.0" ]
null
null
null
src/bin/calc_stats.py
sw005320/PytorchWaveNetVocoder
b92d7af7d5f2794291e0d462694c0719f75ca469
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2017 Tomoki Hayashi (Nagoya University) # Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0) from __future__ import print_function import argparse import numpy as np from sklearn.preprocessing import StandardScaler from utils import read_hdf5 from utils import read_txt from utils import write_hdf5 if __name__ == "__main__": main()
24.074074
60
0.665385
a0e69b24115a09b931149b369f1062a566ff2b61
727
py
Python
python/p002.py
RUiNtheExtinct/project-euler
5c3e64c7dfcbf52d5213df88d2310550f4ee9ce1
[ "MIT" ]
null
null
null
python/p002.py
RUiNtheExtinct/project-euler
5c3e64c7dfcbf52d5213df88d2310550f4ee9ce1
[ "MIT" ]
null
null
null
python/p002.py
RUiNtheExtinct/project-euler
5c3e64c7dfcbf52d5213df88d2310550f4ee9ce1
[ "MIT" ]
null
null
null
# from decimal import Decimal import collections as coll import sys import math as mt # import random as rd # import bisect as bi import time sys.setrecursionlimit(1000000) # import numpy as np # Starting Time time1 = time.time() ######## CODE STARTS FROM HERE ######## n = uno() a, b, c, ans = 0, 1, 0, 0 while c <= n: c = a + b if ~c & 1: ans += c b, a = c, b print(ans) # End Time time2 = time.time() print("\nTime Taken:", (time2 - time1) * 1000)
14.836735
57
0.612105
a0e7af4439dc68e76e3dc02f0c28bddc41d0fe5c
7,662
py
Python
robosuite/models/objects/xml_objects.py
ClaireLC/robosuite
b5c37f1110aefc02106ffd2aed0dfb106bc1bb33
[ "MIT" ]
1
2021-12-22T13:10:46.000Z
2021-12-22T13:10:46.000Z
robosuite/models/objects/xml_objects.py
wangcongrobot/robosuite-jr
738be7a3a83447e78763f6a082faafc8b479c95d
[ "MIT" ]
null
null
null
robosuite/models/objects/xml_objects.py
wangcongrobot/robosuite-jr
738be7a3a83447e78763f6a082faafc8b479c95d
[ "MIT" ]
1
2020-12-29T01:38:01.000Z
2020-12-29T01:38:01.000Z
from robosuite.models.objects import MujocoXMLObject from robosuite.utils.mjcf_utils import xml_path_completion, array_to_string, string_to_array
26.512111
111
0.658053
a0e9174ff5dee90055733752e0b8cd4f3423f64e
1,654
py
Python
SoftUni-Python-Programming-Course/Exam-Preparation/medicines_in_carton.py
vladislav-karamfilov/Python-Playground
ed83a693d37ff0c1565ece49d2a5d9ecd32c9aac
[ "MIT" ]
1
2019-04-07T23:10:27.000Z
2019-04-07T23:10:27.000Z
SoftUni-Python-Programming-Course/Exam-Preparation/medicines_in_carton.py
vladislav-karamfilov/Python-Playground
ed83a693d37ff0c1565ece49d2a5d9ecd32c9aac
[ "MIT" ]
null
null
null
SoftUni-Python-Programming-Course/Exam-Preparation/medicines_in_carton.py
vladislav-karamfilov/Python-Playground
ed83a693d37ff0c1565ece49d2a5d9ecd32c9aac
[ "MIT" ]
null
null
null
# Problem description: http://python3.softuni.bg/student/lecture/assignment/56b749af7e4f59b649b7e626/ if __name__ == '__main__': main()
29.535714
107
0.638452
a0e9473241e626ba8085d5563079fd7bc9d6eeb6
1,111
py
Python
var/app_template/views.py
michailbrynard/django-skeleton
772cd579cad1b8853ed6f1a2c14cbacac2ba41da
[ "MIT" ]
null
null
null
var/app_template/views.py
michailbrynard/django-skeleton
772cd579cad1b8853ed6f1a2c14cbacac2ba41da
[ "MIT" ]
null
null
null
var/app_template/views.py
michailbrynard/django-skeleton
772cd579cad1b8853ed6f1a2c14cbacac2ba41da
[ "MIT" ]
null
null
null
# LOGGING # ---------------------------------------------------------------------------------------------------------------------# import logging logger = logging.getLogger('django') # IMPORTS # ---------------------------------------------------------------------------------------------------------------------# # shortcuts from django.shortcuts import render # contrib.auth from django.contrib.auth.decorators import login_required # views.generic from django.views.generic import DetailView # from .models import * # GENERIC CLASS BASED VIEWS # ---------------------------------------------------------------------------------------------------------------------# # CUSTOM VIEWS # ---------------------------------------------------------------------------------------------------------------------#
32.676471
120
0.407741
a0e9bc2b96c3d8a0da5092d2ce1abf89a56a046d
858
py
Python
circuitpy_examples/week1/04_ramp_LED_brightness.py
WSU-Physics/phys150
043ebf8212b56a988ef8e41a4464400bec5a7dc1
[ "MIT" ]
null
null
null
circuitpy_examples/week1/04_ramp_LED_brightness.py
WSU-Physics/phys150
043ebf8212b56a988ef8e41a4464400bec5a7dc1
[ "MIT" ]
null
null
null
circuitpy_examples/week1/04_ramp_LED_brightness.py
WSU-Physics/phys150
043ebf8212b56a988ef8e41a4464400bec5a7dc1
[ "MIT" ]
null
null
null
# Adam Beardsley # starting from from adafruit example # https://learn.adafruit.com/welcome-to-circuitpython/creating-and-editing-code # import board import digitalio import time led = digitalio.DigitalInOut(board.LED) led.direction = digitalio.Direction.OUTPUT ramp_time = 3 # Time to ramp up, in seconds period = 0.01 # Time per cycle, in seconds step = period / ramp_time # how much to increment the brightness each cycle while True: brightness = 0 # Start off while brightness < 1: T_on = brightness * period T_off = period - T_on led.value = True time.sleep(T_on) led.value = False time.sleep(T_off) brightness += step # Convince yourself the expression for step (line 14) is correct # How can you *test* that step is correct? # Can you reverse the program (start bright, get dim)
28.6
79
0.698135
a0ead277852aac4f9b24d58dbb1630e69b9f9cac
1,099
py
Python
__main__.py
Makeeyaf/SiteChecker
969bdedd2d5df36220ff9fcc41e44cf1db0cca00
[ "MIT" ]
1
2021-01-06T01:45:41.000Z
2021-01-06T01:45:41.000Z
__main__.py
Makeeyaf/SiteChecker
969bdedd2d5df36220ff9fcc41e44cf1db0cca00
[ "MIT" ]
2
2021-01-03T13:25:39.000Z
2021-01-03T15:57:01.000Z
__main__.py
Makeeyaf/SiteChecker
969bdedd2d5df36220ff9fcc41e44cf1db0cca00
[ "MIT" ]
null
null
null
import argparse from site_checker import SiteChecker if __name__ == "__main__": parser = argparse.ArgumentParser(description="Check sites text.") parser.add_argument("config", type=str, nargs=1, help="Path to config json file.") parser.add_argument( "-a", dest="apiKey", type=str, nargs=1, required=True, help="Pushbullet API key.", ) parser.add_argument( "-m", dest="maxFailCount", type=int, nargs=1, help="Max fail count." ) parser.add_argument( "-u", dest="updateCycle", type=int, nargs=1, help="Update cycle in second" ) parser.add_argument( "-v", dest="isVerbose", action="store_true", help="Verbose mode." ) parser.add_argument( "-q", dest="isQuiet", action="store_true", help="Quiet mode. Does not call pushbullet", ) args = parser.parse_args() k = SiteChecker( args.config[0], args.apiKey[0], args.isQuiet, args.isVerbose, args.maxFailCount, args.updateCycle, ) k.check()
26.166667
86
0.586897
a0eb34e703fb20df0982cbdc1702ff56c69d7bb6
1,563
py
Python
autop-listener/autop-listener.py
yuriel-v/ansible
f6e8fcb1edfbef550da2fe217cfd84941523f692
[ "MIT" ]
null
null
null
autop-listener/autop-listener.py
yuriel-v/ansible
f6e8fcb1edfbef550da2fe217cfd84941523f692
[ "MIT" ]
null
null
null
autop-listener/autop-listener.py
yuriel-v/ansible
f6e8fcb1edfbef550da2fe217cfd84941523f692
[ "MIT" ]
null
null
null
import os from pathlib import Path from datetime import datetime from json import dumps import flask as fsk from flask import request, jsonify, Response app = fsk.Flask(__name__) app.config['DEBUG'] = False homedir = os.getenv('HOME') if __name__ == "__main__": app.run(host='0.0.0.0', port=4960)
32.5625
136
0.658989
a0ed35cd2a2fcaf79d84a20f492250006d069eb3
3,586
py
Python
dz_se_comm.py
strebrah/Solaredge_Domoticz_Modbus
802bfde4f4b458ad0d30d3a9433315e12e3aa837
[ "MIT" ]
null
null
null
dz_se_comm.py
strebrah/Solaredge_Domoticz_Modbus
802bfde4f4b458ad0d30d3a9433315e12e3aa837
[ "MIT" ]
null
null
null
dz_se_comm.py
strebrah/Solaredge_Domoticz_Modbus
802bfde4f4b458ad0d30d3a9433315e12e3aa837
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 #################################################################################################### # Created by EH (NL) https://github.com/strebrah/Solaredge_Domoticz_Modbus # # Date: August 2020 # # Version: 0.1 # # Designed for python 3.7 (based on the requirements of the 'solaredge_modbus' library.) # # Thanks to Niels for the 'solaredge_modbus' library https://pypi.org/project/solaredge-modbus/ # # Capabilities: # # * Creating a hardware device in Domoticz # # * Creating sensors for the data types in Domoticz # # * Sending the solaredge modbus data to Domoticz # # How to use # # 1. Enter your configuration in the 'dz_se_settings.ini' file # # 2. configure crontab task for periodic data transfer to Domoticz. # # example: # # sudo crontab -e # # for example, every minute # # */1 * * * * /usr/bin/python3 /home/pi/domoticz/scripts/python/dz_se_comm.py # #################################################################################################### import requests import configparser import time import solaredge_modbus from dz_se_lib import domoticz_create_hardware from dz_se_lib import domoticz_create_devices from dz_se_lib import domoticz_retrieve_device_idx from dz_se_lib import domoticz_transceive_data from dz_se_lib import get_path_to_init_file if __name__ == "__main__": settings = configparser.ConfigParser() settings._interpolation = configparser.ExtendedInterpolation() settings.read(get_path_to_init_file()) domoticz_ip = settings.get('GENERAL SETTINGS', 'domoticz_ip') domoticz_port = settings.get('GENERAL SETTINGS', 'domoticz_port') inverter = solaredge_modbus.Inverter(host=settings.get('GENERAL SETTINGS', 'solaredge_inverter_ip'), port=settings.get('GENERAL SETTINGS', 'solaredge_inverter_port'), timeout=1, unit=1) # Get values from Solaredge inverter over TCP Modbus if settings.get('GENERAL SETTINGS', 'domoticz_solaredge_comm_init_done') == '0': session = requests.Session() # SET HARDWARE IN DOMOTICZ DOMOTICZ_HW_IDX = domoticz_create_hardware(domoticz_ip, domoticz_port, settings, session) # CREATE DEVICES IN DOMOTICZ domoticz_create_devices(domoticz_ip, domoticz_port, settings, session, DOMOTICZ_HW_IDX) # GET ALL SENSOR IDX VALUES AND STORE domoticz_retrieve_device_idx(domoticz_ip, domoticz_port, settings, session) session.close() else: time.sleep(0.5) session = requests.Session() domoticz_transceive_data(domoticz_ip, domoticz_port, settings, session, inverter) session.close()
59.766667
118
0.499721
a0edb39559fc23e931152b94ffea25ac01150fa0
10,632
py
Python
parse_mitchell.py
cfwelch/targeted_sentiment
1c1b063339cdead8f5860df784a0fa170bcdd3ef
[ "MIT" ]
1
2020-12-28T13:51:02.000Z
2020-12-28T13:51:02.000Z
parse_mitchell.py
cfwelch/targeted_sentiment
1c1b063339cdead8f5860df784a0fa170bcdd3ef
[ "MIT" ]
2
2018-04-23T02:13:44.000Z
2018-04-25T04:58:35.000Z
parse_mitchell.py
cfwelch/targeted_sentiment
1c1b063339cdead8f5860df784a0fa170bcdd3ef
[ "MIT" ]
null
null
null
import senti_lexis import datetime, string, numpy, spwrap, random time, sys, re from sklearn import svm from sklearn import cross_validation from sklearn.feature_extraction.text import CountVectorizer from sklearn.cross_validation import KFold from scipy.sparse import csr_matrix if __name__ == "__main__": main()
28.891304
93
0.60506
a0ee65cec9b822e4705a0e2c457a3bbab820bf6b
1,314
py
Python
cryptographyMachine/cryptographyMachine.py
anuranjan08/CryptoMachine
5a1d68adbe88708f21902d1d44a636c043f6ed28
[ "MIT" ]
null
null
null
cryptographyMachine/cryptographyMachine.py
anuranjan08/CryptoMachine
5a1d68adbe88708f21902d1d44a636c043f6ed28
[ "MIT" ]
null
null
null
cryptographyMachine/cryptographyMachine.py
anuranjan08/CryptoMachine
5a1d68adbe88708f21902d1d44a636c043f6ed28
[ "MIT" ]
null
null
null
print(machine())
27.375
89
0.547945
a0ee8d887762a2061e866ff6d3e72e86639288e1
645
py
Python
tests/test_ioeeg_abf.py
wonambi-python/wonambi
4e2834cdd799576d1a231ecb48dfe4da1364fe3a
[ "BSD-3-Clause" ]
63
2017-12-30T08:11:17.000Z
2022-01-28T10:34:20.000Z
tests/test_ioeeg_abf.py
wonambi-python/wonambi
4e2834cdd799576d1a231ecb48dfe4da1364fe3a
[ "BSD-3-Clause" ]
23
2017-09-08T08:29:49.000Z
2022-03-17T08:19:13.000Z
tests/test_ioeeg_abf.py
wonambi-python/wonambi
4e2834cdd799576d1a231ecb48dfe4da1364fe3a
[ "BSD-3-Clause" ]
12
2017-09-18T12:48:36.000Z
2021-09-22T07:16:07.000Z
from numpy import isnan from wonambi import Dataset from .paths import axon_abf_file d = Dataset(axon_abf_file)
21.5
59
0.662016
a0f1fbf8cfec77c2b1ef56f17fd04592b977c305
9,115
py
Python
tests/Preprocessing_Test.py
Maxence-Labesse/MLKit
7f8d92b5d3e025dc3719c3bbaf1f2e55afda5107
[ "MIT" ]
1
2022-01-11T14:13:22.000Z
2022-01-11T14:13:22.000Z
tests/Preprocessing_Test.py
Maxence-Labesse/MLKit
7f8d92b5d3e025dc3719c3bbaf1f2e55afda5107
[ "MIT" ]
null
null
null
tests/Preprocessing_Test.py
Maxence-Labesse/MLKit
7f8d92b5d3e025dc3719c3bbaf1f2e55afda5107
[ "MIT" ]
1
2020-07-10T09:51:22.000Z
2020-07-10T09:51:22.000Z
from AutoMxL.Preprocessing.Categorical import * from AutoMxL.Preprocessing.Date import * from AutoMxL.Preprocessing.Outliers import * from AutoMxL.Preprocessing.Missing_Values import * import unittest import pandas as pd import math # test config df = pd.read_csv('tests/df_test_bis.csv') """ ------------------------------------------------------------------------------------------------ """ df_to_date = all_to_date(df, ['Date_nai', 'American_date_nai'], verbose=False) df_to_anc, new_var_list = date_to_anc(df_to_date, l_var=['American_date_nai', 'Date_nai'], date_ref='27/10/2010') """ ------------------------------------------------------------------------------------------------ """ """ ------------------------------------------------------------------------------------------------ """
43.822115
120
0.622929
a0f259a7948c591dd236fbcc2a29325e01018267
218
py
Python
PythonTutor/session-4/conditionIfelse.py
krishnamanchikalapudi/examples.py
7a373d24df06b8882d07b850435b268a24317b1e
[ "MIT" ]
null
null
null
PythonTutor/session-4/conditionIfelse.py
krishnamanchikalapudi/examples.py
7a373d24df06b8882d07b850435b268a24317b1e
[ "MIT" ]
1
2020-02-14T13:24:01.000Z
2020-02-14T13:24:01.000Z
PythonTutor/session-4/conditionIfelse.py
krishnamanchikalapudi/examples.py
7a373d24df06b8882d07b850435b268a24317b1e
[ "MIT" ]
2
2020-02-14T13:21:20.000Z
2021-06-30T00:50:33.000Z
""" Session: 4 Topic: Conditional: IF ELSE statement """ x = 20 y = 100 if (x > y): print ('x > y is true') print ('new line 1') else: print('x > y is false') print('new line 2') print ('new line 3')
13.625
37
0.550459
a0f3c7164fd5d0e03360ed4d29df99912a368e12
915
py
Python
day02/day02.py
pogross/adventofcode2021
33fc177d30e1104a6203e435f83594c4d3774cdb
[ "MIT" ]
null
null
null
day02/day02.py
pogross/adventofcode2021
33fc177d30e1104a6203e435f83594c4d3774cdb
[ "MIT" ]
null
null
null
day02/day02.py
pogross/adventofcode2021
33fc177d30e1104a6203e435f83594c4d3774cdb
[ "MIT" ]
null
null
null
if __name__ == "__main__": with open("input.txt") as f: raw = f.read() commands = [x for x in raw.split("\n")] horizontal, depth = chain_commands(commands) print(f"First answer is {horizontal*depth}") # print(f"Second answer is {count_increasing(measurements, 3)}")
26.911765
68
0.636066
a0f92a83ae88dda1724d8249cb3715aea8d6c4ad
2,073
py
Python
execute.py
r-kapoor/ranking-extractions
59ed7f23d120d1bc7f0ee2af48ffa61817fd1715
[ "MIT" ]
null
null
null
execute.py
r-kapoor/ranking-extractions
59ed7f23d120d1bc7f0ee2af48ffa61817fd1715
[ "MIT" ]
null
null
null
execute.py
r-kapoor/ranking-extractions
59ed7f23d120d1bc7f0ee2af48ffa61817fd1715
[ "MIT" ]
null
null
null
import codecs import json import rank import train_ranker #Files to be present in home dir TRAINING_FILE_CITIES = 'manual_7_cities.jl' TRAINING_FILE_NAMES = 'manual_50_names.jl' TRAINING_FILE_ETHNICITIES = 'manual_50_ethnicities.jl' ACTUAL_FILE_CITIES = 'manual_50_cities.jl' ACTUAL_FILE_NAMES = 'manual_50_names.jl' ACTUAL_FILE_ETHNICITIES = 'manual_50_ethnicities.jl' EMBEDDINGS_FILE = 'unigram-part-00000-v2.json' FIELD_NAMES_CITIES = { "text_field": "readability_text", "annotated_field":"annotated_cities", "correct_field":"correct_cities" } FIELD_NAMES_NAMES = { "text_field": "readability_text", "annotated_field":"annotated_names", "correct_field":"correct_names" } FIELD_NAMES_ETHNICITIES = { "text_field": "readability_text", "annotated_field":"annotated_ethnicities", "correct_field":"correct_ethnicities" } def get_texts(json_object): """ Parsing logic for getting texts """ texts = list() texts.append(json_object.get(FIELD_NAMES_CITIES['text_field'])) return texts def get_annotated_list(json_object): """ Parsing logic for getting annotated field """ return json_object.get(FIELD_NAMES_CITIES['annotated_field']) embeddings_dict = read_embedding_file(EMBEDDINGS_FILE) classifier = train_ranker.train_ranker(embeddings_dict, TRAINING_FILE_CITIES, FIELD_NAMES_CITIES) with codecs.open(ACTUAL_FILE_CITIES, 'r', 'utf-8') as f: for line in f: obj = json.loads(line) list_of_texts = get_texts(obj) annotated_list = get_annotated_list(obj) print "Annotated tokens:", print annotated_list ranked_list = rank.rank(embeddings_dict, list_of_texts, annotated_list, classifier) print "Ranked List:", print ranked_list
29.614286
97
0.721177
a0f9341f558e2700ed30e7586738a7942212308d
336
py
Python
Python-codes-CeV/32-Leap_year.py
engcristian/Python
726a53e9499fd5d0594572298e59e318f98e2d36
[ "MIT" ]
1
2021-02-22T03:53:23.000Z
2021-02-22T03:53:23.000Z
Python-codes-CeV/32-Leap_year.py
engcristian/Python
726a53e9499fd5d0594572298e59e318f98e2d36
[ "MIT" ]
null
null
null
Python-codes-CeV/32-Leap_year.py
engcristian/Python
726a53e9499fd5d0594572298e59e318f98e2d36
[ "MIT" ]
null
null
null
''' Calculat the leap year''' from datetime import date year = int(input('What year do you want to analyse? Type 0 for the current year.')) if year == 0: year = date.today().year if year%4 ==0 and year%100 != 0 or year%400 == 0: print(F"The year {year} it's a LEAP year.".) else: print(F"The year {year} isn't a LEAP year.")
37.333333
84
0.645833
a0f9bbfc405c03e8dff904c969ce60482f1a635c
567
py
Python
thesis/code/fairness/gen.py
fz1989/master-thesis
e47af8c90d8d18d87f906a7a4bcadb64669e70db
[ "MIT" ]
null
null
null
thesis/code/fairness/gen.py
fz1989/master-thesis
e47af8c90d8d18d87f906a7a4bcadb64669e70db
[ "MIT" ]
null
null
null
thesis/code/fairness/gen.py
fz1989/master-thesis
e47af8c90d8d18d87f906a7a4bcadb64669e70db
[ "MIT" ]
null
null
null
#!/usr/bin/env python #coding=utf-8 import numpy if __name__ == "__main__": task_list = get_task_list() for task in task_list: print "%d\t%d" % (task.cpu, task.mem)
21
47
0.589065
a0fa30f527e6c86b6cb9dc5b7f38c0821721deb9
71
py
Python
tests/routes/__init__.py
Bachhofer/spottydata
e9334c2a32bb65018b57d83fc4522ae241427db7
[ "MIT" ]
null
null
null
tests/routes/__init__.py
Bachhofer/spottydata
e9334c2a32bb65018b57d83fc4522ae241427db7
[ "MIT" ]
null
null
null
tests/routes/__init__.py
Bachhofer/spottydata
e9334c2a32bb65018b57d83fc4522ae241427db7
[ "MIT" ]
null
null
null
# This is an empty python file to expose this directory to it's parent
35.5
70
0.774648
a0fccc7e51abcecde4662d4c35aa618544e6087c
7,500
py
Python
Perceptual Hash -Asher/ex1/example_solution.py
kidist-amde/image-search-engine
467d022f7248a74822dd9ae938b5b86333ce417a
[ "MIT" ]
null
null
null
Perceptual Hash -Asher/ex1/example_solution.py
kidist-amde/image-search-engine
467d022f7248a74822dd9ae938b5b86333ce417a
[ "MIT" ]
null
null
null
Perceptual Hash -Asher/ex1/example_solution.py
kidist-amde/image-search-engine
467d022f7248a74822dd9ae938b5b86333ce417a
[ "MIT" ]
null
null
null
import os import cv2 from sklearn.cluster import KMeans, DBSCAN, MiniBatchKMeans from scipy import spatial from sklearn.preprocessing import StandardScaler import numpy as np from tqdm import tqdm import argparse parser = argparse.ArgumentParser(description='Challenge presentation example') parser.add_argument('--data_path', '-d', type=str, default='dataset', help='Dataset path') parser.add_argument('--output_dim', '-o', type=int, default=20, help='Descriptor length') parser.add_argument('--save_dir', '-s', type=str, default=None, help='Save or not gallery/query feats') parser.add_argument('--random', '-r', action='store_true', help='Random run') args = parser.parse_args() def topk_accuracy(gt_label, matched_label, k=1): matched_label = matched_label[:, :k] total = matched_label.shape[0] correct = 0 for q_idx, q_lbl in enumerate(gt_label): correct+= np.any(q_lbl == matched_label[q_idx, :]).item() acc_tmp = correct/total return acc_tmp def main(): data_path = 'C:/Users/21032/Desktop/dataset' # we define training dataset training_path = os.path.join(data_path, 'training') # we define validation dataset validation_path = os.path.join(data_path, 'validation') gallery_path = os.path.join(validation_path, 'gallery') query_path = os.path.join(validation_path, 'query') training_dataset = Dataset(data_path=training_path) gallery_dataset = Dataset(data_path=gallery_path) query_dataset = Dataset(data_path=query_path) # get training data and classes training_paths, training_classes = training_dataset.get_data_paths() # we get validation gallery and query data gallery_paths, gallery_classes = gallery_dataset.get_data_paths() query_paths, query_classes = query_dataset.get_data_paths() if not args.random: feature_extractor = cv2.SIFT_create() # we define model for clustering model = KMeans(n_clusters=args.output_dim, n_init=10, max_iter=5000, verbose=False) # model = MiniBatchKMeans(n_clusters=args.output_dim, random_state=0, batch_size=100, max_iter=100, verbose=False) scale = StandardScaler() # we define the feature extractor providing the model extractor = FeatureExtractor(feature_extractor=feature_extractor, model=model, scale=scale, out_dim=args.output_dim) # we fit the KMeans clustering model extractor.fit_model(training_paths) extractor.fit_scaler(training_paths) # now we can use features # we get query features query_features = extractor.extract_features(query_paths) query_features = extractor.scale_features(query_features) # we get gallery features gallery_features = extractor.extract_features(gallery_paths) gallery_features = extractor.scale_features(gallery_features) print(gallery_features.shape, query_features.shape) pairwise_dist = spatial.distance.cdist(query_features, gallery_features, 'minkowski', p=2.) print('--> Computed distances and got c-dist {}'.format(pairwise_dist.shape)) indices = np.argsort(pairwise_dist, axis=-1) else: indices = np.random.randint(len(gallery_paths), size=(len(query_paths), len(gallery_paths))) gallery_matches = gallery_classes[indices] print('########## RESULTS ##########') for k in [1, 3, 10]: topk_acc = topk_accuracy(query_classes, gallery_matches, k) print('--> Top-{:d} Accuracy: {:.3f}'.format(k, topk_acc)) if __name__ == '__main__': main()
34.246575
122
0.608133
a0fd132d4d35c39d83a7f211d5d4e4443ddf2030
1,399
py
Python
src/modeling/train_test.py
samsonq/Macroeconomic-Default-Analysis
1a155873f951b1584c33c2d91bd525b67f78136d
[ "MIT" ]
4
2020-06-12T22:20:48.000Z
2021-08-08T15:49:38.000Z
src/modeling/train_test.py
samsonq/Macroeconomic-Default-Analysis
1a155873f951b1584c33c2d91bd525b67f78136d
[ "MIT" ]
1
2020-04-15T07:11:43.000Z
2020-04-15T07:11:43.000Z
src/modeling/train_test.py
samsonq/Macroeconomic-Default-Analysis
1a155873f951b1584c33c2d91bd525b67f78136d
[ "MIT" ]
3
2020-09-18T02:27:58.000Z
2021-10-30T21:22:10.000Z
""" Prepare training, validation, and testing data after preprocessing of the large dataset. Used in training and evaluating models. """ import numpy as np import pandas as pd from sklearn.model_selection import train_test_split def feature_selection(data, features): """ Choose which features to use for training. :param data: preprocessed dataset :param features: list of features to use :return: data with selected features """ return data[features] def prepare_data(data, label="loan_status", valid_split=0.2, test_split=0.3): """ Splits and returns the training and validation sets for the data. :param data: preprocessed dataset :param label: label of data :param valid_split: percentage to use as validation data :param test_split: percentage to use as test data :returns: training, validation, testing sets """ X_train = data.drop(label, axis=1) # define training features set y_train = data[label] # define training label set # use part of the data as testing data X_train, X_test, y_train, y_test = train_test_split(X_train, y_train, test_size=test_split, random_state=0) # use part of the training data as validation data X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size=valid_split, random_state=0) return X_train, X_valid, X_test, y_train, y_valid, y_test
35.871795
114
0.735525
a0fd2af6803ffa9be2e8f4bfae48a6a7e68eb4ea
179,927
py
Python
cyberradiodriver/CyberRadioDriver/radio.py
CyberRadio/CyberRadioDriver
44e6fc0e805981981514e6edc18d11d5fa33e659
[ "MIT" ]
null
null
null
cyberradiodriver/CyberRadioDriver/radio.py
CyberRadio/CyberRadioDriver
44e6fc0e805981981514e6edc18d11d5fa33e659
[ "MIT" ]
null
null
null
cyberradiodriver/CyberRadioDriver/radio.py
CyberRadio/CyberRadioDriver
44e6fc0e805981981514e6edc18d11d5fa33e659
[ "MIT" ]
null
null
null
#!/usr/bin/env python ############################################################### # \package CyberRadioDriver.radio # # \brief Defines basic functionality for radio handler objects. # # \note This module defines basic behavior only. To customize # a radio handler class for a particular radio, derive a new # class from the appropriate base class. It is recommended # that behavior specific to a given radio be placed in the # module that supports that radio. # # \author NH # \author DA # \author MN # \copyright Copyright (c) 2014-2021 CyberRadio Solutions, Inc. # All rights reserved. # ############################################################### # Imports from other modules in this package from . import command from . import components from . import configKeys from . import log from . import transport # Imports from external modules # Python standard library imports import ast import copy import datetime import json import math import sys import time import traceback import threading ## # \internal # \brief Returns the MAC address and IP address for a given Ethernet interface. # # \param ifname The name of t# \author DA # \param ifname The Ethernet system interface ("eth0", for example). # \returns A 2-tuple: (MAC Address, IP Address). ## # \internal # \brief VITA 49 interface specification class. # # The _ifSpec class describes how the VITA 49 interface is set up for # a particular radio. Each radio should have its own interface # specification, implemented as a subclass of _ifSpec. # # Radio handler classes need to set static member "ifSpec" to the interface # specification class that the radio uses. #-- Radio Handler Objects ---------------------------------------------# ## # \brief Base radio handler class. # # This class implements the CyberRadioDriver.IRadio interface. # # To add a supported radio to this driver, derive a class from # _radio and change the static members of the new class to describe the # capabilities of that particular radio. Each supported radio should # have its own module under the CyberRadioDriver.radios package tree. # # A radio handler object maintains a series of component objects, one # per component of each type (tuner, WBDDC, NBDDC, etc.). Each component # object is responsible for managing the hardware object that it represents. # Each component object is also responsible for querying the component's # current configuration and for maintaining the object's configuration # as it changes during radio operation. # # A radio handler object also maintains its own configuration, for settings # that occur at the radio level and are not managed by a component object. # # \note Several static members of this class have no function within the # code, but instead help CyberRadioDriver.getRadioObjectDocstring() generate # appropriate documentation for derived radio handler classes. # # \implements CyberRadioDriver::IRadio ## # \brief Gets the pulse-per-second (PPS) rising edge from the radio. # # \copydetails CyberRadioDriver::IRadio::getPps() def getPps(self): if self.ppsCmd is not None: cmd = command.pps(parent=self,query=True, verbose=self.verbose, logFile=self.logFile) cmd.send(self.sendCommand, timeout=cmd.timeout) return cmd.success else: return False ## # \brief Sets the time for the next PPS rising edge on the radio. # # \copydetails CyberRadioDriver::IRadio::setTimeNextPps() ## # \brief Gets the current radio time. # # \copydetails CyberRadioDriver::IRadio::getTimeNow() ## # \brief Gets the time for the next PPS rising edge on the radio. # # \copydetails CyberRadioDriver::IRadio::getTimeNextPps() ## # \brief Gets the status from the radio. # # \copydetails CyberRadioDriver::IRadio::getStatus() ## # \brief Gets the RF tuner status from the radio. # # \copydetails CyberRadioDriver::IRadio::getTstatus() ## # \brief Sets the reference mode on the radio. # # \copydetails CyberRadioDriver::IRadio::setReferenceMode() ## # \brief Sets the reference bypass mode on the radio. # # \copydetails CyberRadioDriver::IRadio::setBypassMode() ## # \brief Sets the time adjustment for tuners on the radio. # # \copydetails CyberRadioDriver::IRadio::setTimeAdjustment() ## # \brief Sets the calibration frequency on the radio. # # \copydetails CyberRadioDriver::IRadio::setCalibrationFrequency() ## # \brief Gets the current GPS position. # # \copydetails CyberRadioDriver::IRadio::getGpsPosition() ## # \brief Gets the current radio temperature. # # \copydetails CyberRadioDriver::IRadio::getTemperature() ## # \brief Gets the current GPIO output bits. # # \copydetails CyberRadioDriver::IRadio::getGpioOutput() ## # \brief Gets the GPIO output settings for a given sequence index. # # \copydetails CyberRadioDriver::IRadio::getGpioOutputByIndex() ## # \brief Sets the current GPIO output bits. # # \copydetails CyberRadioDriver::IRadio::setGpioOutput() ## # \brief Sets the GPIO output settings for a given sequence index. # # \copydetails CyberRadioDriver::IRadio::setGpioOutputByIndex() ## # \brief Gets the current bandwith of the given tuner. # \copydetails CyberRadioDriver::IRadio::getTunerBandwidth() ## # \brief Gets the name of the radio. # # \copydetails CyberRadioDriver::IRadio::getName() ## # \brief Gets the number of tuners on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumTuner() ## # \brief Gets the number of tuner boards on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumTunerBoards() ## # \brief Gets the index range for the tuners on the radio. # # \copydetails CyberRadioDriver::IRadio::getTunerIndexRange() ## # \brief Gets the frequency range for the tuners on the radio. # # \copydetails CyberRadioDriver::IRadio::getTunerFrequencyRange() ## # \brief Gets the frequency resolution for tuners on the radio. # # \copydetails CyberRadioDriver::IRadio::getTunerFrequencyRes() ## # \brief Gets the frequency unit for tuners on the radio. # # \copydetails CyberRadioDriver::IRadio::getTunerFrequencyUnit() ## # \brief Gets the attenuation range for the tuners on the radio. # # \copydetails CyberRadioDriver::IRadio::getTunerAttenuationRange() ## # \brief Gets the attenuation resolution for tuners on the radio. # # \copydetails CyberRadioDriver::IRadio::getTunerAttenuationRes() ## # \brief Gets the ifFilter list for the tuners of the radio # # \copydetails CyberRadioDriver::IRadio::getTunerIfFilterList() ## # \brief Gets whether or not the radio supports setting tuner # bandwidth # # \copydetails CyberRadioDriver::IRadio::isTunerBandwidthSettable() ## # \brief Gets the number of wideband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumWbddc() ## # \brief Gets whether the DDCs on the radio have selectable sources. # # \copydetails CyberRadioDriver::IRadio::isDdcSelectableSource() ## # \brief Gets whether the wideband or narrowband DDCs on the radio are tunable. # # \copydetails CyberRadioDriver::IRadio::isNbddcTunable() ## # \brief Gets the index range for the wideband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcIndexRange() ## # \brief Gets whether the wideband DDCs on the radio are tunable. # # \copydetails CyberRadioDriver::IRadio::isWbddcSelectableSource() ## # \brief Gets whether the wideband DDCs on the radio have selectable # sources. # # \copydetails CyberRadioDriver::IRadio::isWbddcTunable() ## # \brief Gets the frequency offset range for the wideband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcFrequencyRange() ## # \brief Gets the frequency offset resolution for wideband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcFrequencyRes() ## # \brief Gets the allowed rate set for the wideband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcRateSet() ## # \brief Gets the allowed rate list for the wideband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcRateList() ## # \brief Gets the allowed rate set for the wideband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcBwSet() ## # \brief Gets the allowed rate list for the wideband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcBwList() ## # \brief Gets the number of narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumNbddc() ## # \brief Gets the index range for the narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcIndexRange() ## # \brief Gets whether the narrowband DDCs on the radio are tunable. # # \copydetails CyberRadioDriver::IRadio::isNbddcTunable() ## # \brief Gets whether the narrowband DDCs on the radio have selectable # sources. # # \copydetails CyberRadioDriver::IRadio::isNbddcSelectableSource() ## # \brief Gets the frequency offset range for the narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcFrequencyRange() ## # \brief Gets the frequency offset resolution for narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcFrequencyRes() ## # \brief Gets the allowed rate set for the narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcRateSet() ## # \brief Gets the allowed rate list for the narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcRateList() ## # \brief Gets the allowed rate set for the narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcBwSet() ## # \brief Gets the allowed rate list for the narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcBwList() ## # \brief Gets the number of narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumFftStream() ## # \brief Gets the index range for the narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getFftStreamIndexRange() ## # \brief Gets the allowed rate set for the FFTs on the radio. # # \copydetails CyberRadioDriver::IRadio::getFftStreamRateSet() ## # \brief Gets the allowed rate list for the FFTs on the radio. # # \copydetails CyberRadioDriver::IRadio::getFftStreamRateList() ## # \brief Gets the allowed window set for the FFTs on the radio. # # \copydetails CyberRadioDriver::IRadio::getFftStreamWindowSet() ## # \brief Gets the allowed window list for the FFTs on the radio. # # \copydetails CyberRadioDriver::IRadio::getFftStreamWindowList() ## # \brief Gets the allowed size set for the FFTs on the radio. # # \copydetails CyberRadioDriver::IRadio::getFftStreamSizeSet() ## # \brief Gets the allowed size list for the FFTs on the radio. # # \copydetails CyberRadioDriver::IRadio::getFftStreamSizeList() ## # \brief Gets the ADC sample rate for the radio. # # \copydetails CyberRadioDriver::IRadio::getAdcRate() ## # \brief Gets the VITA 49 header size for the radio. # # \copydetails CyberRadioDriver::IRadio::getVitaHeaderSize() ## # \brief Gets the VITA 49 payload size for the radio. # # \copydetails CyberRadioDriver::IRadio::getVitaPayloadSize() ## # \brief Gets the VITA 49 tail size for the radio. # # \copydetails CyberRadioDriver::IRadio::getVitaTailSize() ## # \brief Gets dictionary with information about VITA 49 framing. # # \copydetails CyberRadioDriver::IRadio::getVitaFrameInfoDict() # \brief Gets whether data coming from the radio is byte-swapped with # respect to the endianness of the host operating system. # # \copydetails CyberRadioDriver::IRadio::isByteswapped() ## # \brief Gets whether data coming from the radio has I and Q data swapped. # # \copydetails CyberRadioDriver::IRadio::isIqSwapped() ## # \brief Gets the byte order for data coming from the radio. # # \copydetails CyberRadioDriver::IRadio::getByteOrder() ## # \brief Gets the number of Gigabit Ethernet interfaces on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumGigE() ## # \brief Gets the index range for the Gigabit Ethernet interfaces on the radio. # # \copydetails CyberRadioDriver::IRadio::getGigEIndexRange() ## # \brief Gets the number of destination IP address table entries available for # each Gigabit Ethernet interface on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumGigEDipEntries() ## # \brief Gets the index range for the destination IP address table entries # available for the Gigabit Ethernet interfaces on the radio. # # \copydetails CyberRadioDriver::IRadio::getGigEDipEntryIndexRange() ## # \brief Gets the list of connection modes that the radio supports. # # \copydetails CyberRadioDriver::IRadio::getConnectionModeList() ## # \brief Gets whether the radio supports a given connection mode. # # \copydetails CyberRadioDriver::IRadio::isConnectionModeSupported() ## # \brief Gets the radio's default baud rate. # # \copydetails CyberRadioDriver::IRadio::getDefaultBaudrate() ## # \brief Gets the radio's default control port. # # \copydetails CyberRadioDriver::IRadio::getDefaultControlPort() ## # \brief Gets the allowed VITA enable options set for the radio. # # \copydetails CyberRadioDriver::IRadio::getVitaEnableOptionSet() ## # \brief Gets the number of transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumTransmitters() ## # \brief Gets the index range for the transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterIndexRange() ## # \brief Gets the frequency range for the transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterFrequencyRange() ## # \brief Gets the frequency resolution for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterFrequencyRes() ## # \brief Gets the frequency unit for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterFrequencyUnit() ## # \brief Gets the attenuation range for the transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterAttenuationRange() ## # \brief Gets the attenuation resolution for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterAttenuationRes() ## # \brief Gets whether transmitters on the radio support continuous-wave # (CW) tone generation. # # \copydetails CyberRadioDriver::IRadio::transmitterSupportsCW() ## # \brief Gets the number of CW tone generators for each transmitter. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWNum() ## # \brief Gets the CW tone generator index range for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWIndexRange() ## # \brief Gets the CW tone generator frequency range for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWFrequencyRange() ## # \brief Gets the CW tone generator frequency resolution for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWFrequencyRes() ## # \brief Gets the CW tone generator amplitude range for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWAmplitudeRange() ## # \brief Gets the CW tone generator amplitude resolution for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWAmplitudeRes() ## # \brief Gets the CW tone generator phase range for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWPhaseRange() ## # \brief Gets the CW tone generator phase resolution for transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWPhaseRes() ## # \brief Gets whether transmitters on the radio support sweep functions # during continuous-wave (CW) tone generation. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWPhaseRes() ## # \brief Gets the CW tone generator sweep start frequency range for # transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWSweepStartRange() ## # \brief Gets the CW tone generator sweep start frequency resolution for # transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWSweepStartRes() ## # \brief Gets the CW tone generator sweep stop frequency range for # transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWSweepStopRange() ## # \brief Gets the CW tone generator sweep stop frequency resolution for # transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWSweepStopRes() ## # \brief Gets the CW tone generator sweep step frequency range for # transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWSweepStepRange() ## # \brief Gets the CW tone generator sweep step frequency resolution for # transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWSweepStepRes() ## # \brief Gets the CW tone generator sweep dwell time range for # transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWSweepDwellRange() ## # \brief Gets the CW tone generator sweep dwell time resolution for # transmitters on the radio. # # \copydetails CyberRadioDriver::IRadio::getTransmitterCWSweepDwellRes() ## # \brief Gets the number of wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumWbduc() ## # \brief Gets the index range for the wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducIndexRange() ## # \brief Gets the frequency offset range for the wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducFrequencyRange() ## # \brief Gets the frequency resolution for wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducFrequencyRes() ## # \brief Gets the frequency unit for wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducFrequencyUnit() ## # \brief Gets the attenuation range for the wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducAttenuationRange() ## # \brief Gets the attenuation resolution for wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducAttenuationRes() ## # \brief Gets the allowed rate set for the wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducRateSet() ## # \brief Gets the allowed rate list for the wideband DUCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducRateList() ## # \brief Gets whether or not the wideband DUCs on the radio support loading # sample snapshots. # # \copydetails CyberRadioDriver::IRadio::wbducSupportsSnapshotLoad() ## # \brief Gets whether or not the wideband DUCs on the radio support # transmitting sample snapshots. # # \copydetails CyberRadioDriver::IRadio::wbducSupportsSnapshotTransmit() ## # \brief Gets the index range for the DDC groups on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcGroupIndexRange() ## # \brief Gets the number of wideband DDC groups on the radio. # \copydetails CyberRadioDriver::IRadio::getNumWbddcGroups() ## # \brief Gets the index range for the wideband DDC groups on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbddcGroupIndexRange() ## # \brief Gets the number of narrowband DDC groups on the radio. # \copydetails CyberRadioDriver::IRadio::getNumNbddcGroups() ## # \brief Gets the index range for the narrowband DDC groups on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcGroupIndexRange() ## # \brief Gets the number of combined DDC groups on the radio. # \copydetails CyberRadioDriver::IRadio::getNumCombinedDdcGroups() ## # \brief Gets the index range for the combined DDC groups on the radio. # # \copydetails CyberRadioDriver::IRadio::getCombinedDdcGroupIndexRange() ## # \brief Gets the number of wideband DUC groups on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumWbducGroups() ## # \brief Gets the index range for the wideband DUC groups on the radio. # # \copydetails CyberRadioDriver::IRadio::getWbducGroupIndexRange() # ------------- Deprecated/Helper Methods ----------------- # ## # \internal # \brief Define this object's string representation. def __str__(self): return self.name ## # \internal # \brief Helper function that returns an index list. def _getIndexList(self,objIndex,objDict): if objIndex is None: return list(objDict.keys()) elif type(objIndex) is int: return [objIndex,] if objIndex in list(objDict.keys()) else [] elif type(objIndex) is list: return [i for i in objIndex if i in list(objDict.keys())] else: return [] ## # \internal # \brief Helper function that "normalizes" an input configuration dictionary # section by doing the following: # <ul> # <li> Ensuring that keys for any enumerated entries are integers # <li> Expanding sub-dictionaries with the special "all" key # <li> Performing specialization for individual entries # # \param configDict The incoming configuration dictionary. # \param entryIndexList The list of entry indices (used in expanding "all" keys). # \return The new configuration dictionary. ## # \internal # \brief Helper function that "normalizes" an input configuration dictionary # by doing the following: # <ul> # <li> Ensuring that keys for component enumerations are integers # <li> Expanding sub-dictionaries with the special "all" key # <li> Performing specialization for individual components or entries # \param configDict The incoming configuration dictionary. # \return The new configuration dictionary. ## # \brief Gets the radio configuration. # # \deprecated Use getConfiguration() instead. # # \return The dictionary of radio settings. ## # \internal # \brief Helper function for setting the tuner configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for getting the tuner configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the tuner configuration. # # Deprecated in favor of queryConfiguration(). ## # \internal # \brief Helper function for setting the DDC configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for getting the DDC configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the DDC configuration. # # Deprecated in favor of queryConfiguration(). ## # \internal # \brief Helper function for setting the IP configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for querying the IP configuration. # \param gigEPortIndex 10-Gig data port index, or None to query all data ports. ## # \internal # \brief Helper function for querying the IP configuration for radios without # 10-Gig Ethernet interfaces. ## # \internal # \brief Helper function for querying the IP configuration for radios with # 10-Gig Ethernet interfaces. # \param gigEPortIndex 10-Gig data port index, or None to query all data ports. ## # \internal # \brief Helper function for setting the transmitter configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for getting the transmitter configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the transmitter configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for setting the DUC configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for getting the DUC configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the DUC configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for getting the DDC group configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the DDC group configuration. # # Deprecated in favor of queryConfiguration(). ## # \internal # \brief Helper function for setting the DDC group configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for getting the combined DDC group configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the combined DDC group configuration. # # Deprecated in favor of queryConfiguration(). ## # \internal # \brief Helper function for setting the combined DDC group configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for getting the DUC group configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the DUC group configuration. # # Deprecated in favor of queryConfiguration(). ## # \internal # \brief Helper function for setting the DUC group configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for getting the tuner group configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the tuner group configuration. # # Deprecated in favor of queryConfiguration(). ## # \internal # \brief Helper function for setting the tuner group configuration. # # Deprecated in favor of setConfiguration(). ## # \internal # \brief Helper function for setting the FFT stream configuration. # # Deprecated in favor of setConfiguration(). # ## # \internal # \brief Helper function for getting the FFT stream configuration. # # Deprecated in favor of getConfiguration(). ## # \internal # \brief Helper function for querying the FFT stream configuration. # # Deprecated in favor of queryConfiguration(). ## # \internal # \brief Helper function for configuring the IP addresses. ## # \brief Gets the number of DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNumDdc() ## # \brief Gets the allowed rate set for the DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getDdcRateSet() ## # \brief Gets the allowed rate list for the DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getDdcRateList() ## # \brief Gets the allowed bandwidth set for the DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getDdcBwSet() ## # \brief Gets the allowed bandwidth list for the DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getDdcBwList() ## # \brief Gets the set of available DDC data formats. # # \copydetails CyberRadioDriver::IRadio::getDdcDataFormat() ## # \brief Gets the frequency offset range for the narrowband DDCs on the radio. # # \copydetails CyberRadioDriver::IRadio::getNbddcFrequencyRange() ## # \brief Gets the list of DDC indexes for a specified type. # # \copydetails CyberRadioDriver::IRadio::getDdcIndexRange() ## # \internal # \brief Convenience method for configuring the Ethernet addresses on a radio that does not # have Gigabit Ethernet ports. # # \param sip The source IP address. If this is None, the source IP address will not # be changed. # \param dip The destination IP address. If this is None, the destination IP address # will not be changed. # \param dmac The destination MAC address. If this is None, the destination MAC address # will not be changed. # \return True if the configuration succeeded, False otherwise. def setIpConfiguration(self, sip=None, dip=None, dmac=None): configDict = { configKeys.CONFIG_IP: { } } if sip is not None: configDict[configKeys.CONFIG_IP][configKeys.IP_SOURCE] = copy.deepcopy(sip) if dip is not None: configDict[configKeys.CONFIG_IP][configKeys.IP_DEST] = copy.deepcopy(dip) if dmac is not None: configDict[configKeys.CONFIG_IP][configKeys.MAC_DEST] = copy.deepcopy(dmac) return self._setConfiguration(configDict) ## # \internal ## # \internal # \brief Sets tuner configuration (old-style). # # \deprecated Use setConfiguration() instead. # # \param frequency Tuner frequency. # \param attenuation Tuner attenuation. # \param tunerIndex Either None (configure all tuners), an index number (configure # a specific tuner), or a list of index numbers (configure a set of tuners). # \return True if successful, False otherwise. ## # \internal # \brief Gets tuner configuration (old-style). # # \deprecated Use getConfiguration() instead. # # \param tunerIndex Either None (get for all tuners), an index number (get for # a specific tuner), or a list of index numbers (get for a set of tuners). # \return A dictionary with configuration information. ## # \internal # \brief Sets tuner frequency (old-style). # # \deprecated Use setConfiguration() instead. # # \param frequency Tuner frequency. # \param tunerIndex Either None (configure all tuners), an index number (configure # a specific tuner), or a list of index numbers (configure a set of tuners). # \return True if successful, False otherwise. ## # \internal # \brief Gets tuner frequency information (old-style). # # \deprecated Use getConfiguration() instead. # # \param tunerIndex Either None (get for all tuners), an index number (get for # a specific tuner), or a list of index numbers (get for a set of tuners). # \return A dictionary with frequency information. ## # \internal # \brief Sets tuner attenuation (old-style). # # \deprecated Use setConfiguration() instead. # # \param attenuation Tuner attenuation. # \param tunerIndex Either None (configure all tuners), an index number (configure # a specific tuner), or a list of index numbers (configure a set of tuners). # \return True if successful, False otherwise. ## # \internal # \brief Gets tuner attenuation information (old-style). # # \deprecated Use getConfiguration() instead. # # \param tunerIndex Either None (get for all tuners), an index number (get for # a specific tuner), or a list of index numbers (get for a set of tuners). # \return A dictionary with attenuation information. ## # \internal # \brief Sets DDC configuration (old-style). # # \deprecated Use setConfiguration() instead. # # \param wideband Whether the DDC is a wideband DDC. # \param ddcIndex Either None (configure all DDCs), an index number (configure # a specific DDC), or a list of index numbers (configure a set of DDCs). # \param rfIndex DDC RF index number. # \param rateIndex DDC rate index number. # \param udpDest UDP destination. # \param frequency Frequency offset. # \param enable 1 if DDC is enabled, 0 if not. # \param vitaEnable VITA 49 streaming option, as appropriate for the radio. # \param streamId VITA 49 stream ID. # \return True if successful, False otherwise. ## # \brief Disables ethernet flow control on the radio. # # \copydetails CyberRadioDriver::IRadio::disableTenGigFlowControl() ## # \brief Enables ethernet flow control on the radio. # # \copydetails CyberRadioDriver::IRadio::enableTenGigFlowControl() ## # \brief method to enable or disable ethernet flow control on the radio. # # \copydetails CyberRadioDriver::IRadio::getTenGigFlowControlStatus() ## # \brief Queries status of flow control handling. # # \copydetails CyberRadioDriver::IRadio::getTenGigFlowControlStatus() ## # \brief Performs coherent tuning. # # \copydetails CyberRadioDriver::IRadio::coherentTune() ## # \brief Gets the current FPGA state. # # \copydetails CyberRadioDriver::IRadio::getFpgaState() ## # \brief Sets the current FPGA state. # # \copydetails CyberRadioDriver::IRadio::setFpgaState() # OVERRIDE ## # \brief Sets whether or not the object is in verbose mode. # # \copydetails CyberRadioDriver::IRadio::setVerbose() ## # \brief Sets the log file. # # \copydetails CyberRadioDriver::IRadio::setLogFile() ## # \brief Gets the list of connected data port interface indices. # # \copydetails CyberRadioDriver::IRadio::getConnectedDataPorts() ## # \internal # \brief Converts a user-specified time string into a number of seconds # since 1/1/70. # # The time string can be either: # \li Absolute time, in any supported format # \li Relative time specified as now{-n}, where n is a number of seconds # \li Relative time specified as now{-[[H:]MM:]SS} # \li "begin", which is the beginning of known time (1/1/70) # \li "end", which is the end of trackable time and far beyond the # useful life of this utility (01/18/2038) # # \throws RuntimeException if the time string format cannot be understood. # \param timestr The time string. # \param utc Whether or not the user's time string is in UTC time. # \return The time, in number of seconds since the Epoch ## # Converts a time string ([+-][[H:]M:]S) to a time in seconds. # # \note Hours and minutes are not bounded in any way. These strings provide the # same result: # \li "7200" # \li "120:00" # \li "2:00:00" # # \throws RuntimeError if the time is formatted improperly. # \param timeStr The time string. # \return The number of seconds. ## # \internal # \brief Radio handler class that supports nothing more complicated than # identifying a connected radio. # # Used internally to support radio auto-detection. # # This class implements the CyberRadioDriver.IRadio interface. # ## # \brief Radio function (mode) command used by JSON-based radios. # ## # \internal # \brief Radio handler class that supports nothing more complicated than # identifying a connected radio. # # Used internally to support radio auto-detection. # # This class implements the CyberRadioDriver.IRadio interface. # #-- End Radio Handler Objects --------------------------------------------------# #-- NOTE: Radio handler objects for supporting specific radios need to be # implemented under the CyberRadioDriver.radios package tree.
43.884634
168
0.591156
a0fde969f3e2acaa6481f6fe003e765cdca46b4c
1,686
py
Python
alpha_zero/NeuralNet.py
blekinge/alpha-zero-general
7cc33e9b2e40602549b59fe753956e69a56f51f1
[ "MIT" ]
null
null
null
alpha_zero/NeuralNet.py
blekinge/alpha-zero-general
7cc33e9b2e40602549b59fe753956e69a56f51f1
[ "MIT" ]
null
null
null
alpha_zero/NeuralNet.py
blekinge/alpha-zero-general
7cc33e9b2e40602549b59fe753956e69a56f51f1
[ "MIT" ]
null
null
null
from typing import List, Tuple import numpy as np from alpha_zero.Board import Board
31.222222
102
0.6293
a0fef1eaf1459e3aa6754a55ca8204b402a0ab05
785
py
Python
server/app/forms.py
zhancongc/bugaboo
ac78e7e0274492273554b089122196b7869e8bfb
[ "Apache-2.0" ]
null
null
null
server/app/forms.py
zhancongc/bugaboo
ac78e7e0274492273554b089122196b7869e8bfb
[ "Apache-2.0" ]
null
null
null
server/app/forms.py
zhancongc/bugaboo
ac78e7e0274492273554b089122196b7869e8bfb
[ "Apache-2.0" ]
null
null
null
""" Project : bugaboo Filename : forms.py Author : zhancongc Description : """ from flask_wtf import FlaskForm from wtforms import StringField, BooleanField, TextAreaField, SelectField, FileField, IntegerField, PasswordField, SubmitField from wtforms.validators import DataRequired
23.787879
126
0.718471
9d006b0d7e89fe26f4e43d422a80339277272355
3,836
py
Python
synthdid/variance.py
MasaAsami/pysynthdid
01afe33ae22f513c65f9cfdec56a4b21ca547c28
[ "Apache-2.0" ]
null
null
null
synthdid/variance.py
MasaAsami/pysynthdid
01afe33ae22f513c65f9cfdec56a4b21ca547c28
[ "Apache-2.0" ]
null
null
null
synthdid/variance.py
MasaAsami/pysynthdid
01afe33ae22f513c65f9cfdec56a4b21ca547c28
[ "Apache-2.0" ]
2
2022-03-11T03:13:36.000Z
2022-03-20T22:55:13.000Z
import pandas as pd import numpy as np from tqdm import tqdm
36.884615
88
0.516945
9d01bb83bee5f2c4612c59332de6ea7b9e34ac2f
681
py
Python
todo/views.py
arascch/Todo_list
a4c88abaa4e6c1e158135b4fce4bcfbf64cb86e2
[ "Apache-2.0" ]
1
2020-03-24T09:26:23.000Z
2020-03-24T09:26:23.000Z
todo/views.py
arascch/Todo_list
a4c88abaa4e6c1e158135b4fce4bcfbf64cb86e2
[ "Apache-2.0" ]
null
null
null
todo/views.py
arascch/Todo_list
a4c88abaa4e6c1e158135b4fce4bcfbf64cb86e2
[ "Apache-2.0" ]
null
null
null
from django.shortcuts import render from django.utils import timezone from todo.models import Todo from django.http import HttpResponseRedirect
35.842105
82
0.737151
9d02e73cfc6d5e0a0462f594bbcafd9199cb2c88
816
py
Python
Easy/Hangman/HangMan - Stage 6.py
michael-act/HyperSkill
ce16eb3b6f755f7f8f21a57ef2679fcb8a4bd55c
[ "MIT" ]
1
2020-11-17T18:09:30.000Z
2020-11-17T18:09:30.000Z
Easy/Hangman/HangMan - Stage 6.py
michael-act/HyperSkill
ce16eb3b6f755f7f8f21a57ef2679fcb8a4bd55c
[ "MIT" ]
null
null
null
Easy/Hangman/HangMan - Stage 6.py
michael-act/HyperSkill
ce16eb3b6f755f7f8f21a57ef2679fcb8a4bd55c
[ "MIT" ]
null
null
null
import random category = ['python', 'java', 'kotlin', 'javascript'] computer = random.choice(category) hidden = list(len(computer) * "-") print("H A N G M A N") counter = 8 while counter > 0: print() print("".join(hidden)) letter = input("Input a letter: ") if (letter in hidden) or (letter in hidden and times == 7): counter -= 1 print("No improvements") elif letter in set(computer): where = 0 for i in range(computer.count(letter)): where = computer.index(letter, 0 + where) hidden[where] = letter where += where + 1 if "-" not in hidden: print() print("".join(hidden)) print("You guessed the word!") print("You survived!") break else: counter -= 1 print("No such letter in the word") print(counter) else: print("You are hanged!")
24
61
0.616422
9d03157b2910202ba3c53d84197f7000003a404d
6,536
py
Python
sklcc/taskEdit.py
pyxuweitao/MSZ_YCL
23323c4660f44af0a45d6ab81cd496b81976f5a0
[ "Apache-2.0" ]
null
null
null
sklcc/taskEdit.py
pyxuweitao/MSZ_YCL
23323c4660f44af0a45d6ab81cd496b81976f5a0
[ "Apache-2.0" ]
null
null
null
sklcc/taskEdit.py
pyxuweitao/MSZ_YCL
23323c4660f44af0a45d6ab81cd496b81976f5a0
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ task """ __author__ = "XuWeitao" import CommonUtilities import rawSql def getTasksList(UserID): """ :param UserID:IDALL :return:{ "SerialNo":, "CreateTime":, "LastModifiedTime":, "ProductNo":, "ColorNo":, "ArriveTime":, "Name":, "GongYingShang":{"id":, "name":}, "WuLiao":{"id":ID, "name":, "cata":}, "DaoLiaoZongShu":, "DanWei":{"id":ID, "name":} "DaoLiaoZongShu2":, "DanWei":{"id":ID, "name":}, "XieZuoRen": } """ raw = rawSql.Raw_sql() raw.sql = """SELECT SerialNo, CONVERT(VARCHAR(16), CreateTime, 20) CreateTime, CONVERT(VARCHAR(16), LastModifiedTime, 20) LastModifiedTime, ProductNo, ColorNo, CONVERT(VARCHAR(10), ArriveTime, 20) ArriveTime, dbo.getUserNameByUserID(UserID), SupplierID, dbo.getSupplierNameByID(SupplierID), MaterialID, dbo.getMaterialNameByID(MaterialID), dbo.getMaterialTypeNameByID(dbo.getMaterialTypeIDByMaterialID(MaterialID)), DaoLiaoZongShu, UnitID, dbo.getUnitNameByID(UnitID), DaoLiaoZongShu2, UnitID2, dbo.getUnitNameByID(UnitID2) AS DanWei2, Inspectors, UserID FROM RMI_TASK WITH(NOLOCK)""" # if UserID != 'ALL': raw.sql += " WHERE CHARINDEX('%s', Inspectors) > 0 AND State = 2" % UserID else: raw.sql += " WHERE State = 0" res = raw.query_all() jsonReturn = list() for row in res: #@ Inspectors = row[18].split('@') InspectorList = list() for inspectorNo in Inspectors: if inspectorNo == row[19]: continue raw.sql = "SELECT DBO.getUserNameByUserID('%s')"%inspectorNo inspectorName = raw.query_one() if inspectorName: inspectorName = inspectorName[0] InspectorList.append({'Name':inspectorName, 'ID':inspectorNo}) jsonReturn.append({ "SerialNo":row[0], "CreateTime":row[1], "LastModifiedTime":row[2], "ProductNo":row[3], "ColorNo":row[4], "ArriveTime":row[5], "Name":row[6], "GongYingShang":{"id":row[7], "name":row[8]}, "WuLiao":{"id":row[9], "name":row[10], "cata":row[11]}, "DaoLiaoZongShu":row[12], "DanWei":{"id":row[13], "name":row[14]}, "DaoLiaoZongShu2":row[15], "DanWei2":{"id":row[16], "name":row[17]}, "XieZuoRen":InspectorList }) return jsonReturn def editTaskInfo(taskInfo, userID): """ isNew :param taskInfo: :param userID:ID :return: """ raw = rawSql.Raw_sql() # if "isReturn" in taskInfo: raw.sql = "UPDATE RMI_TASK WITH(ROWLOCK) SET State = 2 WHERE SerialNo = '%s'"%taskInfo['SerialNo'] raw.update() else: isNew = True if taskInfo['isNew'] == "True" else False #NonenullJSONNone taskInfo['DaoLiaoZongShu2'] = False if 'DaoLiaoZongShu2' not in taskInfo else taskInfo['DaoLiaoZongShu2'] taskInfo['DanWei2'] = {'id':None} if 'DanWei2' not in taskInfo else taskInfo['DanWei2'] #ID if 'XieZuoRen' in taskInfo: taskInfo['XieZuoRen'].append({'ID':userID}) taskInfo['Inspectors'] = "@".join([User['ID'] for User in taskInfo['XieZuoRen']]) else: taskInfo['Inspectors'] = userID if isNew: raw.sql = """INSERT INTO RMI_TASK WITH(ROWLOCK) (CreateTime, LastModifiedTime, ProductNo, ColorNo, ArriveTime, UserID, FlowID, MaterialID, SupplierID, UnitID, DaoLiaoZongShu, DaoLiaoZongShu2, UnitID2, Inspectors) VALUES ( getdate(), getdate(),'%s','%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', %s, %s, '%s' );""" % ( taskInfo['ProductNo'], taskInfo['ColorNo'], taskInfo['ArriveTime'][:10], userID, taskInfo['FlowID'], taskInfo['WuLiao']['id'], taskInfo['GongYingShang']['id'], taskInfo['DanWei']['id'], taskInfo['DaoLiaoZongShu'], "'"+unicode(taskInfo['DaoLiaoZongShu2'])+"'" if taskInfo['DaoLiaoZongShu2'] else "NULL", "'"+unicode(taskInfo['DanWei2']['id'])+"'" if taskInfo['DanWei2']['id'] else "NULL", taskInfo['Inspectors'] ) raw.update() # raw.sql = "SELECT TOP 1 SerialNo FROM RMI_TASK WHERE UserID = '%s' AND State = 2 ORDER BY CreateTime desc"%userID return raw.query_one()[0] else: raw.sql = """UPDATE RMI_TASK WITH(ROWLOCK) SET MaterialID = '%s',SupplierID = '%s', UnitID = '%s', DaoLiaoZongShu = '%s', ProductNo = '%s', ColorNo = '%s', ArriveTime = '%s', DaoLiaoZongShu2 = %s, UnitID2 = %s, Inspectors = '%s' WHERE SerialNo = '%s'""" % ( taskInfo['WuLiao']['id'], taskInfo['GongYingShang']['id'], taskInfo['DanWei']['id'], taskInfo['DaoLiaoZongShu'], taskInfo['ProductNo'], taskInfo['ColorNo'], taskInfo['ArriveTime'][:10].replace('-',''), "'"+unicode(taskInfo['DaoLiaoZongShu2'])+"'" if taskInfo['DaoLiaoZongShu2'] else "NULL", "'"+unicode(taskInfo['DanWei2']['id'])+"'" if taskInfo['DanWei2']['id'] else "NULL", taskInfo['Inspectors'], taskInfo['SerialNo']) raw.update() def getFlowList(): """ :return:{"name":FlowName,"value":FlowID} """ raw = rawSql.Raw_sql() raw.sql = "SELECT FlowID AS value, FlowName AS name FROM RMI_WORK_FLOW WITH(NOLOCK)" res, columns = raw.query_all(needColumnName=True) return CommonUtilities.translateQueryResIntoDict(columns, res) def commitTaskBySerialNo(SerialNo): """ :param SerialNo: :return: """ raw = rawSql.Raw_sql() raw.sql = "UPDATE RMI_TASK SET State = 0 WHERE SerialNo = '%s'"%SerialNo raw.update() return def deleteTaskBySerialNo(SerialNo): """ RMI_TASK :param SerialNo: :return: """ #TODOupdate_other_tables_when_delete_rmi_taskF01 raw = rawSql.Raw_sql() raw.sql = "DELETE FROM RMI_TASK WHERE SerialNo='%s'"%SerialNo raw.update() #call trigger delete all task info in rmi_task_process... return def getAllMaterialByName(fuzzyName): """ :param fuzzyName: :return:{'id':ID,'name':,'cata':} """ raw = rawSql.Raw_sql() raw.sql = """SELECT MaterialID AS id, MaterialName AS name, dbo.getMaterialTypeNameByID(MaterialTypeID) AS cata FROM RMI_MATERIAL_NAME WITH(NOLOCK)""" if fuzzyName: raw.sql += """ WHERE MaterialName LIKE '%%%%%s%%%%'"""%fuzzyName res, cols = raw.query_all(needColumnName=True) return CommonUtilities.translateQueryResIntoDict(cols, res) else: # return [{"name":u'', "id":"", "cata":""}]
41.106918
140
0.671665
9d07e918f729733a967e2d67e465e2cf7ce7d2a4
11,417
py
Python
tensor2tensor/models/revnet.py
ysglh/tensor2tensor
f55462a9928f3f8af0b1275a4fb40d13cae6cc79
[ "Apache-2.0" ]
null
null
null
tensor2tensor/models/revnet.py
ysglh/tensor2tensor
f55462a9928f3f8af0b1275a4fb40d13cae6cc79
[ "Apache-2.0" ]
null
null
null
tensor2tensor/models/revnet.py
ysglh/tensor2tensor
f55462a9928f3f8af0b1275a4fb40d13cae6cc79
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # Copyright 2017 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Creates a RevNet with the bottleneck residual function. Implements the following equations described in the RevNet paper: y1 = x1 + f(x2) y2 = x2 + g(y1) However, in practice, the authors use the following equations to downsample tensors inside a RevNet block: y1 = h(x1) + f(x2) y2 = h(x2) + g(y1) In this case, h is the downsampling function used to change number of channels. These modified equations are evident in the authors' code online: https://github.com/renmengye/revnet-public For reference, the original paper can be found here: https://arxiv.org/pdf/1707.04585.pdf """ # Dependency imports from tensor2tensor.layers import common_hparams from tensor2tensor.layers import rev_block from tensor2tensor.utils import registry from tensor2tensor.utils import t2t_model import tensorflow as tf CONFIG = {'2d': {'conv': tf.layers.conv2d, 'max_pool': tf.layers.max_pooling2d, 'avg_pool': tf.layers.average_pooling2d, 'split_axis': 3, 'reduction_dimensions': [1, 2] }, '3d': {'conv': tf.layers.conv3d, 'max_pool': tf.layers.max_pooling3d, 'avg_pool': tf.layers.average_pooling2d, 'split_axis': 4, 'reduction_dimensions': [1, 2, 3] } } def f(x, depth1, depth2, dim='2d', first_batch_norm=True, layer_stride=1, training=True, padding='SAME'): """Applies bottleneck residual function for 104-layer RevNet. Args: x: input tensor depth1: Number of output channels for the first and second conv layers. depth2: Number of output channels for the third conv layer. dim: '2d' if 2-dimensional, '3d' if 3-dimensional. first_batch_norm: Whether to keep the first batch norm layer or not. Typically used in the first RevNet block. layer_stride: Stride for the first conv filter. Note that this particular 104-layer RevNet architecture only varies the stride for the first conv filter. The stride for the second conv filter is always set to 1. training: True for train phase, False for eval phase. padding: Padding for each conv layer. Returns: Output tensor after applying residual function for 104-layer RevNet. """ conv = CONFIG[dim]['conv'] with tf.variable_scope('f'): if first_batch_norm: net = tf.layers.batch_normalization(x, training=training) net = tf.nn.relu(net) else: net = x net = conv(net, depth1, 1, strides=layer_stride, padding=padding, activation=None) net = tf.layers.batch_normalization(net, training=training) net = tf.nn.relu(net) net = conv(net, depth1, 3, strides=1, padding=padding, activation=None) net = tf.layers.batch_normalization(net, training=training) net = tf.nn.relu(net) net = conv(net, depth2, 1, strides=1, padding=padding, activation=None) return net def h(x, output_channels, dim='2d', layer_stride=1, scope='h'): """Downsamples 'x' using a 1x1 convolution filter and a chosen stride. Args: x: input tensor of size [N, H, W, C] output_channels: Desired number of output channels. dim: '2d' if 2-dimensional, '3d' if 3-dimensional. layer_stride: What stride to use. Usually 1 or 2. scope: Optional variable scope for the h function. This function uses a 1x1 convolution filter and a chosen stride to downsample the input tensor x. Returns: A downsampled tensor of size [N, H/2, W/2, output_channels] if layer_stride is 2, else returns a tensor of size [N, H, W, output_channels] if layer_stride is 1. """ conv = CONFIG[dim]['conv'] with tf.variable_scope(scope): x = conv(x, output_channels, 1, strides=layer_stride, padding='SAME', activation=None) return x def init(images, num_channels, dim='2d', training=True, scope='init'): """Standard ResNet initial block used as first RevNet block. Args: images: [N, H, W, 3] tensor of input images to the model. num_channels: Output depth of convolutional layer in initial block. dim: '2d' if 2-dimensional, '3d' if 3-dimensional. training: True for train phase, False for eval phase. scope: Optional scope for the init block. Returns: Two [N, H, W, C] output activations from input images. """ conv = CONFIG[dim]['conv'] pool = CONFIG[dim]['max_pool'] with tf.variable_scope(scope): net = conv(images, num_channels, 7, strides=2, padding='SAME', activation=None) net = tf.layers.batch_normalization(net, training=training) net = tf.nn.relu(net) net = pool(net, pool_size=3, strides=2) x1, x2 = tf.split(net, 2, axis=CONFIG[dim]['split_axis']) return x1, x2 def unit(x1, x2, block_num, depth1, depth2, num_layers, dim='2d', first_batch_norm=True, stride=1, training=True): """Implements bottleneck RevNet unit from authors' RevNet-104 architecture. Args: x1: [N, H, W, C] tensor of network activations. x2: [N, H, W, C] tensor of network activations. block_num: integer ID of block depth1: First depth in bottleneck residual unit. depth2: Second depth in bottleneck residual unit. num_layers: Number of layers in the RevNet block. dim: '2d' if 2-dimensional, '3d' if 3-dimensional. first_batch_norm: Whether to keep the first batch norm layer or not. Typically used in the first RevNet block. stride: Stride for the residual function. training: True for train phase, False for eval phase. Returns: Two [N, H, W, C] output activation tensors. """ scope_name = 'unit_%d' % block_num with tf.variable_scope(scope_name): # Manual implementation of downsampling with tf.variable_scope('downsampling'): with tf.variable_scope('x1'): hx1 = h(x1, depth2, dim=dim, layer_stride=stride) fx2 = f(x2, depth1, depth2, dim=dim, layer_stride=stride, first_batch_norm=first_batch_norm, training=training) x1 = hx1 + fx2 with tf.variable_scope('x2'): hx2 = h(x2, depth2, dim=dim, layer_stride=stride) fx1 = f(x1, depth1, depth2, dim=dim, training=training) x2 = hx2 + fx1 # Full block using memory-efficient rev_block implementation. with tf.variable_scope('full_block'): residual_func = lambda x: f(x, depth1, depth2, dim=dim, training=training) x1, x2 = rev_block.rev_block(x1, x2, residual_func, residual_func, num_layers=num_layers) return x1, x2 def final_block(x1, x2, dim='2d', training=True, scope='final_block'): """Converts activations from last RevNet block to pre-logits. Args: x1: [NxHxWxC] tensor of network activations. x2: [NxHxWxC] tensor of network activations. dim: '2d' if 2-dimensional, '3d' if 3-dimensional. training: True for train phase, False for eval phase. scope: Optional variable scope for the final block. Returns: [N, hidden_dim] pre-logits tensor from activations x1 and x2. """ # Final batch norm and relu with tf.variable_scope(scope): y = tf.concat([x1, x2], axis=CONFIG[dim]['split_axis']) y = tf.layers.batch_normalization(y, training=training) y = tf.nn.relu(y) # Global average pooling net = tf.reduce_mean(y, CONFIG[dim]['reduction_dimensions'], name='final_pool', keep_dims=True) return net def revnet104(inputs, hparams, reuse=None): """Uses Tensor2Tensor memory optimized RevNet block to build a RevNet. Args: inputs: [NxHxWx3] tensor of input images to the model. hparams: HParams object that contains the following parameters, in addition to the parameters contained in the basic_params1() object in the common_hparams module: num_channels_first - A Python list where each element represents the depth of the first and third convolutional layers in the bottleneck residual unit for a given block. num_channels_second - A Python list where each element represents the depth of the second convolutional layer in the bottleneck residual unit for a given block. num_layers_per_block - A Python list containing the number of RevNet layers for each block. first_batch_norm - A Python list containing booleans representing the presence of a batch norm layer at the beginning of a given block. strides - A Python list containing integers representing the stride of the residual function for each block. num_channels_init_block - An integer representing the number of channels for the convolutional layer in the initial block. dimension - A string (either "2d" or "3d") that decides if the RevNet is 2-dimensional or 3-dimensional. reuse: Whether to reuse the default variable scope. Returns: [batch_size, hidden_dim] pre-logits tensor from the bottleneck RevNet. """ training = hparams.mode == tf.estimator.ModeKeys.TRAIN with tf.variable_scope('RevNet104', reuse=reuse): x1, x2 = init(inputs, num_channels=hparams.num_channels_init_block, dim=hparams.dim, training=training) for block_num in range(1, len(hparams.num_layers_per_block)): block = {'depth1': hparams.num_channels_first[block_num], 'depth2': hparams.num_channels_second[block_num], 'num_layers': hparams.num_layers_per_block[block_num], 'first_batch_norm': hparams.first_batch_norm[block_num], 'stride': hparams.strides[block_num]} x1, x2 = unit(x1, x2, block_num, dim=hparams.dim, training=training, **block) pre_logits = final_block(x1, x2, dim=hparams.dim, training=training) return pre_logits
38.441077
80
0.681177
9d08e38fa29119640133acdff959362b1c00409d
4,166
py
Python
tests/unit/test_services.py
BlooAM/Online-shopping-app
aa68d258fe32bf5a792e534dddd9def7c25460e2
[ "MIT" ]
null
null
null
tests/unit/test_services.py
BlooAM/Online-shopping-app
aa68d258fe32bf5a792e534dddd9def7c25460e2
[ "MIT" ]
null
null
null
tests/unit/test_services.py
BlooAM/Online-shopping-app
aa68d258fe32bf5a792e534dddd9def7c25460e2
[ "MIT" ]
null
null
null
import pytest from datetime import date, timedelta from adapters import repository from domain.model import Batch, OrderLine, allocate, OutOfStock from domain import model from service_layer import handlers, unit_of_work today = date.today() tomorrow = today + timedelta(days=1) later = tomorrow + timedelta(days=10)
32.046154
89
0.702112
9d08ebe64750ed4ee86af0207bca624b0391ff75
1,786
py
Python
DQMOffline/L1Trigger/python/L1TEGammaOffline_cfi.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
DQMOffline/L1Trigger/python/L1TEGammaOffline_cfi.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
DQMOffline/L1Trigger/python/L1TEGammaOffline_cfi.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
import FWCore.ParameterSet.Config as cms electronEfficiencyThresholds = [36, 68, 128, 176] electronEfficiencyBins = [] electronEfficiencyBins.extend(list(xrange(0, 120, 10))) electronEfficiencyBins.extend(list(xrange(120, 180, 20))) electronEfficiencyBins.extend(list(xrange(180, 300, 40))) electronEfficiencyBins.extend(list(xrange(300, 400, 100))) # just copy for now photonEfficiencyThresholds = electronEfficiencyThresholds photonEfficiencyBins = electronEfficiencyBins l1tEGammaOfflineDQM = cms.EDAnalyzer( "L1TEGammaOffline", electronCollection=cms.InputTag("gedGsfElectrons"), photonCollection=cms.InputTag("photons"), caloJetCollection=cms.InputTag("ak4CaloJets"), caloMETCollection=cms.InputTag("caloMet"), conversionsCollection=cms.InputTag("allConversions"), PVCollection=cms.InputTag("offlinePrimaryVerticesWithBS"), beamSpotCollection=cms.InputTag("offlineBeamSpot"), TriggerEvent=cms.InputTag('hltTriggerSummaryAOD', '', 'HLT'), TriggerResults=cms.InputTag('TriggerResults', '', 'HLT'), # last filter of HLTEle27WP80Sequence TriggerFilter=cms.InputTag('hltEle27WP80TrackIsoFilter', '', 'HLT'), TriggerPath=cms.string('HLT_Ele27_WP80_v13'), stage2CaloLayer2EGammaSource=cms.InputTag("caloStage2Digis", "EGamma"), histFolder=cms.string('L1T/L1TEGamma'), electronEfficiencyThresholds=cms.vdouble(electronEfficiencyThresholds), electronEfficiencyBins=cms.vdouble(electronEfficiencyBins), photonEfficiencyThresholds=cms.vdouble(photonEfficiencyThresholds), photonEfficiencyBins=cms.vdouble(photonEfficiencyBins), ) l1tEGammaOfflineDQMEmu = l1tEGammaOfflineDQM.clone( stage2CaloLayer2EGammaSource=cms.InputTag("simCaloStage2Digis"), histFolder=cms.string('L1TEMU/L1TEGamma'), )
37.208333
75
0.783875
9d092f6e945eea14883d51652329fcd4951dee46
18,548
py
Python
ion_networks/numba_functions.py
swillems/ion_networks
5304a92248ec007ac2253f246a3d44bdb58ae110
[ "MIT" ]
2
2020-10-28T16:11:56.000Z
2020-12-03T13:19:18.000Z
ion_networks/numba_functions.py
swillems/ion_networks
5304a92248ec007ac2253f246a3d44bdb58ae110
[ "MIT" ]
null
null
null
ion_networks/numba_functions.py
swillems/ion_networks
5304a92248ec007ac2253f246a3d44bdb58ae110
[ "MIT" ]
null
null
null
#!python # external import numpy as np import numba
34.864662
82
0.630418
9d099c325b8e8eb13555bc61afea2a208b9050c9
241
py
Python
Programming Fundamentals/Dictionaries/bakery.py
antonarnaudov/SoftUniProjects
01cbdce2b350b57240045d1bc3e21d34f9d0351d
[ "MIT" ]
null
null
null
Programming Fundamentals/Dictionaries/bakery.py
antonarnaudov/SoftUniProjects
01cbdce2b350b57240045d1bc3e21d34f9d0351d
[ "MIT" ]
null
null
null
Programming Fundamentals/Dictionaries/bakery.py
antonarnaudov/SoftUniProjects
01cbdce2b350b57240045d1bc3e21d34f9d0351d
[ "MIT" ]
null
null
null
tokens = input().split(' ') print(result(tokens))
18.538462
40
0.564315
9d0ab807d87d356a4a4fb529654e22486400f676
1,525
py
Python
vtrace/const.py
rnui2k/vivisect
b7b00f2d03defef28b4b8c912e3a8016e956c5f7
[ "ECL-2.0", "Apache-2.0" ]
716
2015-01-01T14:41:11.000Z
2022-03-28T06:51:50.000Z
vtrace/const.py
rnui2k/vivisect
b7b00f2d03defef28b4b8c912e3a8016e956c5f7
[ "ECL-2.0", "Apache-2.0" ]
266
2015-01-01T15:07:27.000Z
2022-03-30T15:19:26.000Z
vtrace/const.py
rnui2k/vivisect
b7b00f2d03defef28b4b8c912e3a8016e956c5f7
[ "ECL-2.0", "Apache-2.0" ]
159
2015-01-01T16:19:44.000Z
2022-03-21T21:55:34.000Z
# Order must match format junk # NOTIFY_ALL is kinda special, if you registerNotifier # with it, you get ALL notifications. NOTIFY_ALL = 0 # Get all notifications NOTIFY_SIGNAL = 1 # Callback on signal/exception NOTIFY_BREAK = 2 # Callback on breakpoint / sigtrap NOTIFY_STEP = 3 # Callback on singlestep complete NOTIFY_SYSCALL = 4 # Callback on syscall (linux only for now) NOTIFY_CONTINUE = 5 # Callback on continue (not done for step) NOTIFY_EXIT = 6 # Callback on process exit NOTIFY_ATTACH = 7 # Callback on successful attach NOTIFY_DETACH = 8 # Callback on impending process detach # The following notifiers are *only* available on some platforms # (and may be kinda faked out ala library load events on posix) NOTIFY_LOAD_LIBRARY = 9 NOTIFY_UNLOAD_LIBRARY = 10 NOTIFY_CREATE_THREAD = 11 NOTIFY_EXIT_THREAD = 12 NOTIFY_DEBUG_PRINT = 13 # Some platforms support this (win32). NOTIFY_MAX = 20 # File Descriptor / Handle Types FD_UNKNOWN = 0 # Unknown or we don't have a type for it FD_FILE = 1 FD_SOCKET = 2 FD_PIPE = 3 FD_LOCK = 4 # Win32 Mutant/Lock/Semaphore FD_EVENT = 5 # Win32 Event/KeyedEvent FD_THREAD = 6 # Win32 Thread FD_REGKEY = 7 # Win32 Registry Key # Vtrace Symbol Types SYM_MISC = -1 SYM_GLOBAL = 0 # Global (mostly vars) SYM_LOCAL = 1 # Locals SYM_FUNCTION = 2 # Functions SYM_SECTION = 3 # Binary section SYM_META = 4 # Info that we enumerate # Vtrace Symbol Offsets VSYM_NAME = 0 VSYM_ADDR = 1 VSYM_SIZE = 2 VSYM_TYPE = 3 VSYM_FILE = 4
33.152174
66
0.733115
9d0d12599f8d63386d38681b6e12a10636886357
3,248
py
Python
src/ezdxf/groupby.py
jkjt/ezdxf
2acc5611b81476ea16b98063b9f55446a9182b81
[ "MIT" ]
515
2017-01-25T05:46:52.000Z
2022-03-29T09:52:27.000Z
src/ezdxf/groupby.py
jkjt/ezdxf
2acc5611b81476ea16b98063b9f55446a9182b81
[ "MIT" ]
417
2017-01-25T10:01:17.000Z
2022-03-29T09:22:04.000Z
src/ezdxf/groupby.py
jkjt/ezdxf
2acc5611b81476ea16b98063b9f55446a9182b81
[ "MIT" ]
149
2017-02-01T15:52:02.000Z
2022-03-17T10:33:38.000Z
# Purpose: Grouping entities by DXF attributes or a key function. # Copyright (c) 2017-2021, Manfred Moitzi # License: MIT License from typing import Iterable, Hashable, Dict, List, TYPE_CHECKING from ezdxf.lldxf.const import DXFValueError, DXFAttributeError if TYPE_CHECKING: from ezdxf.eztypes import DXFEntity, KeyFunc def groupby( entities: Iterable["DXFEntity"], dxfattrib: str = "", key: "KeyFunc" = None ) -> Dict[Hashable, List["DXFEntity"]]: """ Groups a sequence of DXF entities by a DXF attribute like ``'layer'``, returns a dict with `dxfattrib` values as key and a list of entities matching this `dxfattrib`. A `key` function can be used to combine some DXF attributes (e.g. layer and color) and should return a hashable data type like a tuple of strings, integers or floats, `key` function example:: def group_key(entity: DXFEntity): return entity.dxf.layer, entity.dxf.color For not suitable DXF entities return ``None`` to exclude this entity, in this case it's not required, because :func:`groupby` catches :class:`DXFAttributeError` exceptions to exclude entities, which do not provide layer and/or color attributes, automatically. Result dict for `dxfattrib` = ``'layer'`` may look like this:: { '0': [ ... list of entities ], 'ExampleLayer1': [ ... ], 'ExampleLayer2': [ ... ], ... } Result dict for `key` = `group_key`, which returns a ``(layer, color)`` tuple, may look like this:: { ('0', 1): [ ... list of entities ], ('0', 3): [ ... ], ('0', 7): [ ... ], ('ExampleLayer1', 1): [ ... ], ('ExampleLayer1', 2): [ ... ], ('ExampleLayer1', 5): [ ... ], ('ExampleLayer2', 7): [ ... ], ... } All entity containers (modelspace, paperspace layouts and blocks) and the :class:`~ezdxf.query.EntityQuery` object have a dedicated :meth:`groupby` method. Args: entities: sequence of DXF entities to group by a DXF attribute or a `key` function dxfattrib: grouping DXF attribute like ``'layer'`` key: key function, which accepts a :class:`DXFEntity` as argument and returns a hashable grouping key or ``None`` to ignore this entity """ if all((dxfattrib, key)): raise DXFValueError( "Specify a dxfattrib or a key function, but not both." ) if dxfattrib != "": key = lambda entity: entity.dxf.get_default(dxfattrib) if key is None: raise DXFValueError( "no valid argument found, specify a dxfattrib or a key function, " "but not both." ) result: Dict[Hashable, List["DXFEntity"]] = dict() for dxf_entity in entities: if not dxf_entity.is_alive: continue try: group_key = key(dxf_entity) except DXFAttributeError: # ignore DXF entities, which do not support all query attributes continue if group_key is not None: group = result.setdefault(group_key, []) group.append(dxf_entity) return result
35.692308
79
0.601293
9d0e38af685d991cde1a6a41f4c243ad673af7b8
1,839
py
Python
tests/test_basic.py
nk412/companycase
5b93478a79293a4bc93112b805eff56c44756f18
[ "MIT" ]
7
2016-09-08T15:25:33.000Z
2022-02-01T13:21:40.000Z
tests/test_basic.py
nk412/companycase
5b93478a79293a4bc93112b805eff56c44756f18
[ "MIT" ]
1
2016-07-12T10:36:02.000Z
2016-07-12T10:36:02.000Z
tests/test_basic.py
nk412/companycase
5b93478a79293a4bc93112b805eff56c44756f18
[ "MIT" ]
2
2016-09-17T17:41:28.000Z
2020-02-29T22:58:09.000Z
# coding=utf-8 import unittest from companycase import CompanyCase if __name__ == '__main__': unittest.main()
39.12766
113
0.659598
9d0eed15b3c0630d157c26b0aac4e458a282e19f
8,527
py
Python
main_single.py
wang-chen/AirLoop
12fb442c911002427a51f00d43f747ef593bd186
[ "BSD-3-Clause" ]
39
2021-09-28T19:48:13.000Z
2022-03-17T06:44:19.000Z
main_single.py
wang-chen/AirLoop
12fb442c911002427a51f00d43f747ef593bd186
[ "BSD-3-Clause" ]
null
null
null
main_single.py
wang-chen/AirLoop
12fb442c911002427a51f00d43f747ef593bd186
[ "BSD-3-Clause" ]
3
2021-10-04T01:26:17.000Z
2022-02-12T04:48:50.000Z
#!/usr/bin/env python3 import os import tqdm import torch import random import numpy as np import torch.nn as nn import configargparse import torch.optim as optim from tensorboard import program from torch.utils.tensorboard import SummaryWriter import yaml from models import FeatureNet from datasets import get_dataset from losses import MemReplayLoss from utils.evaluation import RecognitionEvaluator from utils.misc import save_model, load_model, GlobalStepCounter, ProgressBarDescription if __name__ == "__main__": run()
45.844086
137
0.673273
9d0fc4d37e8008ce4ffedc8ff1748729bd11a8f1
271
py
Python
skilletlib/skillet/__init__.py
annabarone/skilletlib
d1298218a1a0be35eb9fac2ae79323df600d8900
[ "Apache-2.0" ]
6
2020-04-27T18:08:02.000Z
2022-01-14T13:27:19.000Z
skilletlib/skillet/__init__.py
annabarone/skilletlib
d1298218a1a0be35eb9fac2ae79323df600d8900
[ "Apache-2.0" ]
85
2019-10-28T19:13:55.000Z
2021-07-14T13:00:28.000Z
skilletlib/skillet/__init__.py
annabarone/skilletlib
d1298218a1a0be35eb9fac2ae79323df600d8900
[ "Apache-2.0" ]
7
2019-12-05T20:17:16.000Z
2021-12-09T01:16:58.000Z
# from .panos import PanosSkillet # from .docker import DockerSkillet # from .pan_validation import PanValidationSkillet # from .python3 import Python3Skillet # from .rest import RestSkillet # from .template import TemplateSkillet # from .workflow import WorkflowSkillet
33.875
50
0.819188
9d10f233df729f37438c93bc6d49f9504b03d459
1,192
py
Python
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/rss_proxy/views.py
osoco/better-ways-of-thinking-about-software
83e70d23c873509e22362a09a10d3510e10f6992
[ "MIT" ]
3
2021-12-15T04:58:18.000Z
2022-02-06T12:15:37.000Z
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/rss_proxy/views.py
osoco/better-ways-of-thinking-about-software
83e70d23c873509e22362a09a10d3510e10f6992
[ "MIT" ]
null
null
null
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/rss_proxy/views.py
osoco/better-ways-of-thinking-about-software
83e70d23c873509e22362a09a10d3510e10f6992
[ "MIT" ]
1
2019-01-02T14:38:50.000Z
2019-01-02T14:38:50.000Z
""" Views for the rss_proxy djangoapp. """ import requests from django.conf import settings from django.core.cache import cache from django.http import HttpResponse, HttpResponseNotFound from lms.djangoapps.rss_proxy.models import WhitelistedRssUrl CACHE_KEY_RSS = "rss_proxy.{url}" def proxy(request): """ Proxy requests for the given RSS url if it has been whitelisted. """ url = request.GET.get('url') if url and WhitelistedRssUrl.objects.filter(url=url).exists(): # Check cache for RSS if the given url is whitelisted cache_key = CACHE_KEY_RSS.format(url=url) status_code = 200 rss = cache.get(cache_key, '') print(cache_key) print('Cached rss: %s' % rss) if not rss: # Go get the RSS from the URL if it was not cached resp = requests.get(url) status_code = resp.status_code if status_code == 200: # Cache RSS rss = resp.content cache.set(cache_key, rss, settings.RSS_PROXY_CACHE_TIMEOUT) return HttpResponse(rss, status=status_code, content_type='application/xml') return HttpResponseNotFound()
29.8
84
0.653523
9d1115c99ef6af6ee80e12df2bf5eac7ff811ea7
149
py
Python
CorePythonProg/ch02/0206.py
mallius/CppPrimer
0285fabe5934492dfed0a9cf67ba5650982a5f76
[ "MIT" ]
null
null
null
CorePythonProg/ch02/0206.py
mallius/CppPrimer
0285fabe5934492dfed0a9cf67ba5650982a5f76
[ "MIT" ]
null
null
null
CorePythonProg/ch02/0206.py
mallius/CppPrimer
0285fabe5934492dfed0a9cf67ba5650982a5f76
[ "MIT" ]
1
2022-01-25T15:51:34.000Z
2022-01-25T15:51:34.000Z
#!/usr/bin/env python numTemp = raw_input('Enter a number: ') num = int(numTemp) if num > 0: print '>0' elif num ==0: print '0' else: print '<0'
13.545455
39
0.61745
9d123f052b89aece17eb457b8ad9cafa6d71e501
314
py
Python
bootcamp/accounts/urls.py
elbakouchi/bootcamp
2c7a0cd2ddb7632acb3009f94d728792ddf9644f
[ "MIT" ]
null
null
null
bootcamp/accounts/urls.py
elbakouchi/bootcamp
2c7a0cd2ddb7632acb3009f94d728792ddf9644f
[ "MIT" ]
null
null
null
bootcamp/accounts/urls.py
elbakouchi/bootcamp
2c7a0cd2ddb7632acb3009f94d728792ddf9644f
[ "MIT" ]
null
null
null
from django.conf.urls import url from .views import * app_name = "accounts" urlpatterns = [ url(r"^signup/$", CustomSignupView.as_view(), name="custom_signup"), url(r"^destroy/$", AjaxLogoutView.as_view(), name="destroy"), url(r"^(?P<username>[\w.@+-]+)/$", ProfileView.as_view(), name="profile"), ]
28.545455
78
0.652866
9d1277aded11ab70c99a610d14fb0758ed951638
8,195
py
Python
utils/mininet/mininet_builder.py
jstavr/SDN_Project
9fe5a65f46eadf15e1da43d9f8125b8c15161bbd
[ "Apache-2.0" ]
null
null
null
utils/mininet/mininet_builder.py
jstavr/SDN_Project
9fe5a65f46eadf15e1da43d9f8125b8c15161bbd
[ "Apache-2.0" ]
null
null
null
utils/mininet/mininet_builder.py
jstavr/SDN_Project
9fe5a65f46eadf15e1da43d9f8125b8c15161bbd
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python Description: Load topology in Mininet Author: James Hongyi Zeng (hyzeng_at_stanford.edu) ''' from argparse import ArgumentParser from socket import gethostbyname from os import getuid from mininet.log import lg, info from mininet.cli import CLI from mininet.net import Mininet from mininet.topo import Topo from mininet.link import Link, Intf from mininet.node import Host, OVSKernelSwitch, Controller, RemoteController class StanfordTopo( Topo ): "Topology for Stanford backbone" PORT_ID_MULTIPLIER = 1 INTERMEDIATE_PORT_TYPE_CONST = 1 OUTPUT_PORT_TYPE_CONST = 2 PORT_TYPE_MULTIPLIER = 10000 SWITCH_ID_MULTIPLIER = 100000 DUMMY_SWITCH_BASE = 1000 PORT_MAP_FILENAME = "data/port_map.txt" TOPO_FILENAME = "data/backbone_topology.tf" dummy_switches = set() def __init__( self ): # Read topology info ports = self.load_ports(self.PORT_MAP_FILENAME) links = self.load_topology(self.TOPO_FILENAME) switches = ports.keys() # Add default members to class. super( StanfordTopo, self ).__init__() # Create switch nodes for s in switches: self.add_switch( "s%s" % s ) # Wire up switches self.create_links(links, ports) # Wire up hosts host_id = len(switches) + 1 for s in switches: # Edge ports for port in ports[s]: self.add_host( "h%s" % host_id ) self.add_link( "h%s" % host_id, "s%s" % s, 0, port ) host_id += 1 # Consider all switches and hosts 'on' # self.enable_all() def load_ports(self, filename): ports = {} f = open(filename, 'r') for line in f: if not line.startswith("$") and line != "": tokens = line.strip().split(":") port_flat = int(tokens[1]) dpid = port_flat / self.SWITCH_ID_MULTIPLIER port = port_flat % self.PORT_TYPE_MULTIPLIER if dpid not in ports.keys(): ports[dpid] = set() if port not in ports[dpid]: ports[dpid].add(port) f.close() return ports def load_topology(self, filename): links = set() f = open(filename, 'r') for line in f: if line.startswith("link"): tokens = line.split('$') src_port_flat = int(tokens[1].strip('[]').split(', ')[0]) dst_port_flat = int(tokens[7].strip('[]').split(', ')[0]) links.add((src_port_flat, dst_port_flat)) f.close() return links def create_links(self, links, ports): '''Generate dummy switches For example, interface A1 connects to B1 and C1 at the same time. Since Mininet uses veth, which supports point to point communication only, we need to manually create dummy switches @param links link info from the file @param ports port info from the file ''' # First pass, find special ports with more than 1 peer port first_pass = {} for (src_port_flat, dst_port_flat) in links: src_dpid = src_port_flat / self.SWITCH_ID_MULTIPLIER dst_dpid = dst_port_flat / self.SWITCH_ID_MULTIPLIER src_port = src_port_flat % self.PORT_TYPE_MULTIPLIER dst_port = dst_port_flat % self.PORT_TYPE_MULTIPLIER if (src_dpid, src_port) not in first_pass.keys(): first_pass[(src_dpid, src_port)] = set() first_pass[(src_dpid, src_port)].add((dst_dpid, dst_port)) if (dst_dpid, dst_port) not in first_pass.keys(): first_pass[(dst_dpid, dst_port)] = set() first_pass[(dst_dpid, dst_port)].add((src_dpid, src_port)) # Second pass, create new links for those special ports dummy_switch_id = self.DUMMY_SWITCH_BASE for (dpid, port) in first_pass.keys(): # Special ports! if(len(first_pass[(dpid,port)])>1): self.add_switch( "s%s" % dummy_switch_id ) self.dummy_switches.add(dummy_switch_id) self.add_link( node1="s%s" % dpid, node2="s%s" % dummy_switch_id, port1=port, port2=1 ) dummy_switch_port = 2 for (dst_dpid, dst_port) in first_pass[(dpid,port)]: first_pass[(dst_dpid, dst_port)].discard((dpid,port)) self.add_link( node1="s%s" % dummy_switch_id, node2="s%s" % dst_dpid, port1=dummy_switch_port, port2=dst_port) ports[dst_dpid].discard(dst_port) dummy_switch_port += 1 dummy_switch_id += 1 first_pass[(dpid,port)] = set() ports[dpid].discard(port) # Third pass, create the remaining links for (dpid, port) in first_pass.keys(): for (dst_dpid, dst_port) in first_pass[(dpid,port)]: self.add_link( node1="s%s" % dpid, node2="s%s" % dst_dpid, port1=port, port2=dst_port ) ports[dst_dpid].discard(dst_port) ports[dpid].discard(port) class StanfordMininet ( Mininet ): def build( self ): super( StanfordMininet, self ).build() # FIXME: One exception... Dual links between yoza and yozb # Need _manual_ modification for different topology files!!! self.topo.add_link( node1="s%s" % 15, node2="s%s" % 16, port1=7, port2=4 ) def StanfordTopoTest( controller_ip, controller_port, dummy_controller_ip, dummy_controller_port ): topo = StanfordTopo() main_controller = lambda a: RemoteController( a, ip=controller_ip, port=controller_port) net = StanfordMininet( topo=topo, switch=OVSKernelSwitch, controller=main_controller) net.start() # These switches should be set to a local controller.. dummy_switches = topo.dummy_switches dummyClass = lambda a: RemoteController( a, ip=dummy_controller_ip, port=dummy_controller_port) dummy_controller = net.addController( name='dummy_controller', controller=dummyClass) dummy_controller.start() for dpid in dummy_switches: switch = net.nameToNode["s%s" % dpid] switch.pause() switch.start( [dummy_controller] ) # Turn on STP for switchName in topo.switches(): switch = net.nameToNode[switchName] cmd = "ovs-vsctl set Bridge %s stp_enable=true" % switch.name switch.cmd(cmd) switch.cmd('ovs-vsctl set Bridge s1 other_config:stp-priority=0x10') CLI( net ) net.stop() if __name__ == '__main__': if getuid()!=0: print "Please run this script as root / use sudo." exit(-1) lg.setLogLevel( 'info') description = "Put Stanford backbone in Mininet" parser = ArgumentParser(description=description) parser.add_argument("-c", dest="controller_name", default="localhost", help="Controller's hostname or IP") parser.add_argument("-p", dest="controller_port",type=int, default=6633, help="Controller's port") parser.add_argument("-c2", dest="dummy_controller_name", default="localhost", help="Dummy controller's hostname or IP") parser.add_argument("-p2", dest="dummy_controller_port",type=int, default=6633, help="Dummy ontroller's port") args = parser.parse_args() print description print "Starting with primary controller %s:%d" % (args.controller_name, args.controller_port) print "Starting with dummy controller %s:%d" % (args.dummy_controller_name, args.dummy_controller_port) Mininet.init() StanfordTopoTest(gethostbyname(args.controller_name), args.controller_port, gethostbyname(args.dummy_controller_name), args.dummy_controller_port)
39.210526
150
0.598292
9d1338f96592532b4f49b0f4d8c0180dee99ffe0
1,833
py
Python
tests/integration/test_translated_content.py
asmeurer/nikola
ea1c651bfed0fd6337f1d22cf8dd99899722912c
[ "MIT" ]
1,901
2015-01-02T02:49:51.000Z
2022-03-30T23:31:35.000Z
tests/integration/test_translated_content.py
asmeurer/nikola
ea1c651bfed0fd6337f1d22cf8dd99899722912c
[ "MIT" ]
1,755
2015-01-01T08:17:16.000Z
2022-03-24T18:02:22.000Z
tests/integration/test_translated_content.py
asmeurer/nikola
ea1c651bfed0fd6337f1d22cf8dd99899722912c
[ "MIT" ]
421
2015-01-02T18:06:37.000Z
2022-03-28T23:18:54.000Z
""" Test a site with translated content. Do not test titles as we remove the translation. """ import io import os import shutil import lxml.html import pytest import nikola.plugins.command.init from nikola import __main__ from .helper import cd from .test_empty_build import ( # NOQA test_archive_exists, test_avoid_double_slash_in_rss, test_check_files, test_check_links, test_index_in_sitemap, ) def test_translated_titles(build, output_dir, other_locale): """Check that translated title is picked up.""" normal_file = os.path.join(output_dir, "pages", "1", "index.html") translated_file = os.path.join(output_dir, other_locale, "pages", "1", "index.html") # Files should be created assert os.path.isfile(normal_file) assert os.path.isfile(translated_file) # And now let's check the titles with io.open(normal_file, "r", encoding="utf8") as inf: doc = lxml.html.parse(inf) assert doc.find("//title").text == "Foo | Demo Site" with io.open(translated_file, "r", encoding="utf8") as inf: doc = lxml.html.parse(inf) assert doc.find("//title").text == "Bar | Demo Site"
29.095238
88
0.681942
9d13de1d5fcb7bb17eb81bbe83f7d14929b0ec78
8,826
py
Python
src/train.py
weiyi1991/UA_Concurrent
11238c778c60095abf326800d6e6a13a643bf071
[ "MIT" ]
null
null
null
src/train.py
weiyi1991/UA_Concurrent
11238c778c60095abf326800d6e6a13a643bf071
[ "MIT" ]
1
2020-09-02T12:24:59.000Z
2020-09-02T12:24:59.000Z
src/train.py
weiyi1991/UA_Concurrent
11238c778c60095abf326800d6e6a13a643bf071
[ "MIT" ]
null
null
null
import argparse import os import torch import torch.nn.functional as F from model_ST import * import data import numpy as np import matplotlib.pyplot as plt from torch.utils.data import Dataset, DataLoader import sys from predict import evaluate_MA from tensorboardX import SummaryWriter # print model parameter # Training settings parser = argparse.ArgumentParser(description='Relation network for concurrent activity detection') parser.add_argument('--BATCH_SIZE', type=int, default=256, help='Training batch size. Default=256') parser.add_argument('--save_every', type=int, default=5, help='Save model every save_every epochs. Defualt=5') parser.add_argument('--EPOCH', type=int, default=500, help='Number of epochs to train. Default=600') parser.add_argument('--LR', type=float, default=0.001, help='Learning Rate. Default=0.001') parser.add_argument('--TRAIN', action='store_true', default=True, help='Train or test? ') parser.add_argument('--DEBUG', action='store_true', default=False, help='Debug mode (load less data)? Defualt=False') parser.add_argument('--clip_grad', type=float, default=5.0, help='Gradient clipping parameter. Default=5,0') parser.add_argument('--dataPath', type=str, default='/home/yi/PycharmProjects/relation_network/data/UCLA/new273', help='path to the data folder') parser.add_argument('--checkpoint', type=str, help='Checkpoint folder name under ./model/') parser.add_argument('--verbose', type=int, default=1, help='Print verbose information? Default=True') # model parameters parser.add_argument('--n_input', type=int, default=37, help='Input feature vector size. Default=37') parser.add_argument('--n_hidden', type=int, default=128, help='Hidden units for LSTM baseline. Default=128') parser.add_argument('--n_layers', type=int, default=2, help='LSTM layer number. Default=2') parser.add_argument('--n_class', type=int, default=12, help='Class label number. Default=12') parser.add_argument('--use_lstm', action='store_true', default=True, help='Use LSTM for relation network classifier. Default=True') parser.add_argument('--df', type=int, default=64, help='Relation feature dimension. Default=64') parser.add_argument('--dk', type=int, default=8, help='Key feature dim. Default=8') parser.add_argument('--nr', type=int, default=4, help='Multihead number. Default=4') opt = parser.parse_args() checkpoint_dir = './model/{}/'.format(opt.checkpoint) if not os.path.exists(checkpoint_dir): os.makedirs(checkpoint_dir) orig_stdout = sys.stdout f = open(checkpoint_dir + '/parameter.txt', 'w') sys.stdout = f print(opt) f.close() sys.stdout = orig_stdout # data preparation train_dataset = data.ConActDataset(opt.dataPath) test_dataset = data.ConActDataset(opt.dataPath, train=not opt.TRAIN) writer = SummaryWriter() # only take few sequences for debuging debug_seq = 3 if opt.DEBUG: train_data = [] for i in range(debug_seq): input, labels = train_dataset[i] train_data.append((input, labels)) print("%s loaded." % train_dataset.seq_list[i]) else: print('Loading training data ----------------------') train_data = [] train_labels = [] for i, (input, labels) in enumerate(train_dataset): train_data.append((input, labels)) train_labels.append(labels) print("%s loaded." % train_dataset.seq_list[i]) print('Loading testing data ----------------------') test_data = [] for i, (input, labels) in enumerate(test_dataset): test_data.append((input, labels)) print("%s loaded." % test_dataset.seq_list[i]) # for model_lstm if opt.use_lstm: rnn = RNN(opt.n_input, opt.n_hidden, opt.n_layers, opt.n_class, opt.BATCH_SIZE, opt.df, opt.dk, opt.nr).cuda() # use lstm as classifier else: rnn = RNN(opt.n_input, opt.n_hidden, opt.n_layers, opt.n_class, opt.use_lstm).cuda() # use fc as classifier print(rnn.state_dict().keys()) optimizer = torch.optim.Adam(rnn.parameters(), lr=opt.LR) scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'min', factor=0.5) # set up scheduler # Keep track of losses for plotting best_loss = 10000 all_losses = [] current_loss = 3 FAA = [] # false area ration on test set INTAP = [] # overall interval AP on test set save_epoch = [] # list to save the model saving epoch # train model total_step = len(train_data) for epoch in range(opt.EPOCH): all_losses.append(current_loss) current_loss = 0 for i, (input, labels) in enumerate(train_data): optimizer.zero_grad() feats = torch.from_numpy(input).float() nframes, _ = input.shape feats = feats.reshape(-1, nframes, 273).cuda() #feats = feats.reshape(-1, nframes, opt.n_input*6).cuda() # change label 0 to -1 labels[labels<1]=-1 labels = torch.from_numpy(labels) labels = labels.float().cuda() # Forward pass outputs = rnn(feats) outputs = torch.squeeze(outputs) loss = F.mse_loss(outputs, labels) # print model parameter if loss is NaN if opt.verbose > 0: if torch.isnan(loss): print_model(rnn) print('Epoch {}, step {}'.format(epoch+1, i+1)) raw_input("Press Enter to continue ...") # Backward and optimize loss.backward() # This line is used to prevent the vanishing / exploding gradient problem torch.nn.utils.clip_grad_norm_(rnn.parameters(), opt.clip_grad) optimizer.step() print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}' .format(epoch + 1, opt.EPOCH, i + 1, total_step, loss.item())) current_loss = current_loss + loss.item() writer.add_scalar('loss/loss', current_loss, epoch) scheduler.step(current_loss) # update lr if needed # save model parameters and loss figure if ((epoch+1) % opt.save_every) == 0: # compute false area on test set if not opt.DEBUG: false_area, overall_IAPlist = evaluate_MA(rnn, test_data) FAA.append(torch.sum(false_area).item()) INTAP.append(overall_IAPlist[-2]) # get the interval AP at threshold 0.8 save_epoch.append(epoch+1) if FAA[-1] == min(FAA): # if has the minimum test error, save model checkpoint_dir = './model/{}/'.format(opt.checkpoint) if not os.path.exists(checkpoint_dir): os.makedirs(checkpoint_dir) if epoch > 100: model_str = checkpoint_dir + 'net-best.pth' torch.save(rnn, model_str) checkpoint_dir = './model/{}/'.format(opt.checkpoint) if not os.path.exists(checkpoint_dir): os.makedirs(checkpoint_dir) if opt.verbose == 2: print('Making dir: {}'.format(checkpoint_dir)) model_str = checkpoint_dir + 'net-{}'.format(str(epoch+1)) if opt.verbose > 0: print('Model saved to: {}.pth'.format(model_str)) if epoch >= 100: torch.save(rnn, model_str+'.pth') # save interval AP np.savetxt(model_str + 'AP.csv', np.asarray(overall_IAPlist), fmt='%0.5f') # save miss detection np.savetxt(model_str + 'MD.txt', np.asarray(FAA), fmt='%0.5f') # draw miss detection v.s. epoch figure fig, ax1 = plt.subplots() color = 'tab:red' ax1.plot(range(epoch+1), all_losses, color=color) ax1.set_xlabel('Epochs') ax1.set_ylabel('Loss', color=color) ax2 = ax1.twinx() color = 'tab:blue' ax2.set_ylabel('Miss detection area ratio', color=color) ax2.plot(save_epoch, FAA, 'bd') fig.savefig(model_str+'.png') plt.close() # draw intervalAP v.s. epoch figure fig1, ax3 = plt.subplots() color = 'tab:red' ax3.plot(range(epoch+1), all_losses, color=color) ax3.set_xlabel('Epochs') ax3.set_ylabel('Loss', color=color) ax4 = ax3.twinx() color = 'tab:blue' ax4.set_ylabel('Overall interval AP', color=color) ax4.plot(save_epoch, INTAP, 'bd') fig1.savefig(model_str+'_AP.png') plt.close() # print the loss on training set and evaluation metrics on test set to file orig_stdout = sys.stdout f = open(checkpoint_dir + '/loss.txt', 'w') sys.stdout = f print('Loss over epochs:') print(all_losses) if not opt.DEBUG: print('Miss detection area ratio:') print(FAA) f.close() sys.stdout = orig_stdout
41.051163
140
0.643327
9d192ebb1226024bcb7fe7faa5cd19ef549419f8
130
py
Python
illud/exceptions/quit_exception.py
AustinScola/illud
a6aca1de38bbe9d5a795aaa084bcbd6731767d18
[ "MIT" ]
1
2020-12-05T00:59:15.000Z
2020-12-05T00:59:15.000Z
illud/exceptions/quit_exception.py
AustinScola/illud
a6aca1de38bbe9d5a795aaa084bcbd6731767d18
[ "MIT" ]
112
2021-01-15T21:42:27.000Z
2021-04-17T19:11:21.000Z
illud/exceptions/quit_exception.py
AustinScola/illud
a6aca1de38bbe9d5a795aaa084bcbd6731767d18
[ "MIT" ]
null
null
null
"""Raised to quit.""" from illud.exception import IlludException
18.571429
42
0.723077
9d19f0ff06adc850dcf2436e1f6a4aeadf9e7144
1,130
py
Python
example/undistort_ir_images.py
greeknerd1/stereo-rectify
98a23c3ff96dd4344ecad13d4ff145060c8fb992
[ "MIT" ]
null
null
null
example/undistort_ir_images.py
greeknerd1/stereo-rectify
98a23c3ff96dd4344ecad13d4ff145060c8fb992
[ "MIT" ]
null
null
null
example/undistort_ir_images.py
greeknerd1/stereo-rectify
98a23c3ff96dd4344ecad13d4ff145060c8fb992
[ "MIT" ]
null
null
null
#!/usr/bin/env python import cv2 import numpy as np import os import glob import itertools import json from numpy.core.fromnumeric import argmax #SECTION 1: UNDISTORT FISHEYE #Read in OpenCV compatible instrinsics & distortion coeffs COLOR_INTRINSIC = np.load('./savedCoeff/colorIntr.npy') COLOR_DIST = np.load('./savedCoeff/colorDist.npy') IR_INTRINSIC = np.load('./savedCoeff/irIntr.npy') IR_DIST = np.load('./savedCoeff/irDist.npy') print('Undistorting images-----------------') imageDir = 'december_callibration_images' ir_images = glob.glob('./' + imageDir + '/ir-*.png') DIMS = (1024, 1024) IDENTITY = np.eye(3) for i in range(len(ir_images)): ir_img = cv2.imread(ir_images[i], cv2.IMREAD_UNCHANGED) new_K, roi = cv2.getOptimalNewCameraMatrix(IR_INTRINSIC, IR_DIST, DIMS, 1) map1, map2 = cv2.initUndistortRectifyMap(IR_INTRINSIC, IR_DIST, IDENTITY, new_K, DIMS, cv2.CV_32FC1) undistorted_ir_img = cv2.remap(ir_img, map1, map2, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT) #save the undistorted image cv2.imwrite('./undistorted_december_ir_images/' + 'ir-' + str(i) + '.png', undistorted_ir_img)
36.451613
115
0.752212
9d1ab6609be43e89cc309b21cfc303cd71c0ffae
5,617
py
Python
tests/tensor/test_tensor_data.py
aspfohl/tinytorch
99ac1847b798f755d12876667ec7c3a6c7149857
[ "MIT" ]
null
null
null
tests/tensor/test_tensor_data.py
aspfohl/tinytorch
99ac1847b798f755d12876667ec7c3a6c7149857
[ "MIT" ]
null
null
null
tests/tensor/test_tensor_data.py
aspfohl/tinytorch
99ac1847b798f755d12876667ec7c3a6c7149857
[ "MIT" ]
null
null
null
import pytest from hypothesis import given from hypothesis.strategies import data from numpy import array, array_equal from tests.strategies import indices, tensor_data from tinytorch.tensor.data import ( IndexingError, TensorData, broadcast_index, shape_broadcast, ) # Check basic properties of layout and strides. def test_layout(): "Test basis properties of layout and strides" data = [0] * 3 * 5 tensor_data = TensorData(data, (3, 5), (5, 1)) assert tensor_data.is_contiguous() assert tensor_data.shape == (3, 5) assert tensor_data.index((1, 0)) == 5 assert tensor_data.index((1, 2)) == 7 tensor_data = TensorData(data, (5, 3), (1, 5)) assert tensor_data.shape == (5, 3) assert not tensor_data.is_contiguous() data = [0] * 4 * 2 * 2 tensor_data = TensorData(data, (4, 2, 2)) assert tensor_data.strides == (4, 2, 1) # Check basic properties of broadcasting. def test_broadcast_index_smaller(): "Tests broadcast mapping between higher and lower dim tensors" out_index = array([0, 0]) for big_index, expected_out_index in ( ([0, 0, 0], [0, 0]), ([0, 0, 1], [0, 0]), ([0, 0, 2], [0, 0]), ([0, 1, 0], [1, 0]), ([0, 1, 1], [1, 0]), ([0, 1, 2], [1, 0]), ([1, 0, 0], [0, 0]), ([1, 0, 1], [0, 0]), ([1, 0, 2], [0, 0]), ([1, 1, 0], [1, 0]), ([1, 1, 1], [1, 0]), ([1, 1, 2], [1, 0]), ): print(big_index, expected_out_index) _broadcast_index(big_index=array(big_index)) assert array_equal(out_index, expected_out_index)
27.534314
88
0.574862
9d1aff1bfb4da29713d9d7f9b89454bc608165f8
359
py
Python
terra_layer/apps.py
Terralego/terra-layer
6564a63d389503d3ae1f63ce46e674b228d6764b
[ "MIT" ]
1
2019-08-08T15:17:32.000Z
2019-08-08T15:17:32.000Z
terra_layer/apps.py
Terralego/terra-layer
6564a63d389503d3ae1f63ce46e674b228d6764b
[ "MIT" ]
65
2019-10-21T10:05:00.000Z
2022-03-08T14:08:27.000Z
terra_layer/apps.py
Terralego/terra-layer
6564a63d389503d3ae1f63ce46e674b228d6764b
[ "MIT" ]
null
null
null
from django.apps import AppConfig from terra_accounts.permissions_mixins import PermissionRegistrationMixin
29.916667
73
0.740947
9d1d92e0aac0102261fb87134d9195f41601abbb
2,813
py
Python
aps/tokenizer/word.py
ishine/aps
c814dc5a8b0bff5efa7e1ecc23c6180e76b8e26c
[ "Apache-2.0" ]
117
2021-02-02T13:38:16.000Z
2022-03-16T05:40:25.000Z
aps/tokenizer/word.py
ishine/aps
c814dc5a8b0bff5efa7e1ecc23c6180e76b8e26c
[ "Apache-2.0" ]
3
2021-11-11T07:07:31.000Z
2021-11-20T15:25:42.000Z
aps/tokenizer/word.py
ishine/aps
c814dc5a8b0bff5efa7e1ecc23c6180e76b8e26c
[ "Apache-2.0" ]
19
2021-02-04T10:04:25.000Z
2022-02-16T05:24:44.000Z
#!/usr/bin/env python # Copyright 2021 Jian Wu # License: Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0) from typing import List, Union from aps.tokenizer.base import TokenizerAbc, ApsTokenizer
30.247312
77
0.539637
9d1d953211acad0e8c4ba6634015c410a59e3522
1,736
py
Python
tests/test_session.py
StenSipma/astrometry-client
11d5b0cd0ae41a18b5bbd7f5570af60dbfbd9cc6
[ "MIT" ]
1
2020-08-06T17:55:52.000Z
2020-08-06T17:55:52.000Z
tests/test_session.py
StenSipma/astrometry-client
11d5b0cd0ae41a18b5bbd7f5570af60dbfbd9cc6
[ "MIT" ]
1
2021-12-18T17:03:21.000Z
2021-12-19T12:33:16.000Z
tests/test_session.py
StenSipma/astrometry-client
11d5b0cd0ae41a18b5bbd7f5570af60dbfbd9cc6
[ "MIT" ]
null
null
null
import os from unittest import mock import pytest import requests from constants import VALID_KEY from utils import FunctionCalledException, function_called_raiser from astrometry_net_client import Session from astrometry_net_client.exceptions import APIKeyError, LoginFailedException some_key = "somekey" # Start of tests
27.555556
78
0.75
9d1e173ec4f6da5495185d4e64e6ce6be159c672
2,184
py
Python
all_repos_depends/lang/python.py
mxr/all-repos-depends
dcf715dbfb7182899e2412dbfaaf1ef4cc50865c
[ "MIT" ]
11
2018-04-23T06:41:55.000Z
2022-01-27T13:37:59.000Z
all_repos_depends/lang/python.py
mxr/all-repos-depends
dcf715dbfb7182899e2412dbfaaf1ef4cc50865c
[ "MIT" ]
2
2018-04-23T06:03:18.000Z
2018-04-23T06:03:51.000Z
all_repos_depends/lang/python.py
mxr/all-repos-depends
dcf715dbfb7182899e2412dbfaaf1ef4cc50865c
[ "MIT" ]
2
2021-02-01T15:02:14.000Z
2021-09-25T15:49:44.000Z
import ast import os.path from typing import Iterable from packaging.requirements import InvalidRequirement from packaging.requirements import Requirement from packaging.utils import canonicalize_name from all_repos_depends.errors import DependsError from all_repos_depends.types import Depends NAME = 'python'
29.513514
79
0.617674
9d1fd039657947bcd1efbe3cb094639c4aa0c630
2,829
py
Python
mac/macos_app_audit.py
airdata/scripts
b24d62d70bbc70f02b3758ea14e47cc2b34646a9
[ "Apache-2.0" ]
null
null
null
mac/macos_app_audit.py
airdata/scripts
b24d62d70bbc70f02b3758ea14e47cc2b34646a9
[ "Apache-2.0" ]
null
null
null
mac/macos_app_audit.py
airdata/scripts
b24d62d70bbc70f02b3758ea14e47cc2b34646a9
[ "Apache-2.0" ]
null
null
null
from os import listdir from os.path import isfile, join default_applications = ['Utilities','App Store.app','Automator.app','Calculator.app','Calendar.app','Chess.app','Contacts.app','Dashboard.app','Dictionary.app','DVD Player.app','FaceTime.app','Font Book.app','iBooks.app','Image Capture.app','iTunes.app','Launchpad.app','Mail.app','Maps.app','Messages.app','Mission Control.app','Notes.app','Paste.app','Photo Booth.app','Photos.app','Preview.app','QuickTime Player.app','Reminders.app','Safari.app','Siri.app','Stickies.app','System Preferences.app','TextEdit.app','Time Machine.app','Utilities.app'] remaps = { "iTerm.app": "iTerm2", # brew cask install iterm2 gives iTerm.app "Alfred 3.app": "Alfred" # brew cask install alfred gives Alfred 3.app } mypath = "/Applications" installed_applications = [f for f in listdir(mypath) if not isfile(join(mypath, f))] cask_packages = Command('brew cask list').run().output.split() mac_app_store_apps = Command('mas list').run().output.splitlines() # collect applications that are not default ones. user_applications = [] for x in installed_applications: #first remap the names if(x in remaps): name = remaps[x] else: name = x #then check if they are defaults if name not in default_applications: user_applications.append(name) # determine which applications weren't installed via brew cask unmanged_applications = [] for x in user_applications: strip_dotapp = x[:-4] if (".app" in x) else x trimmed = strip_dotapp.replace(" ", "-").lower() is_casked = trimmed in cask_packages is_mas = any(strip_dotapp in s for s in mac_app_store_apps) # print('{} -> {}: {}|{}'.format(x, trimmed, is_casked, is_mas)) if(not is_casked and not is_mas): unmanged_applications.append(x) # print("-------------------") print("You have {} default applications.".format(len(default_applications))) print("Tou have {} brew cask applications.".format(len(cask_packages))) print("Tou have {} app store applications.".format(len(mac_app_store_apps))) print("You have {} user applications Applications not managed by brew cask or app store...\n------".format(len(unmanged_applications))) for x in unmanged_applications: print(x) # print(mac_app_store_apps)
41.602941
551
0.70555
9d208e0e14d75f5e83f5d7ca01135d1ab258d6e8
317
py
Python
src/hark_lang/machine/stdout_item.py
krrome/teal-lang
594ac0f0baae047fdb19ac9126d174408d487905
[ "Apache-2.0" ]
85
2020-04-29T13:51:33.000Z
2020-08-28T04:40:11.000Z
src/hark_lang/machine/stdout_item.py
krrome/teal-lang
594ac0f0baae047fdb19ac9126d174408d487905
[ "Apache-2.0" ]
15
2020-05-06T07:58:18.000Z
2020-08-28T10:29:28.000Z
src/hark_lang/machine/stdout_item.py
krrome/teal-lang
594ac0f0baae047fdb19ac9126d174408d487905
[ "Apache-2.0" ]
4
2020-05-31T09:42:08.000Z
2020-08-27T17:04:26.000Z
"""StdoutItem class""" from dataclasses import asdict, dataclass from .hark_serialisable import HarkSerialisable, now_str
19.8125
56
0.690852
9d20e8c21375abfa3aefb4fb09790b9ecbec1d58
6,911
py
Python
compress/algorithms/lzw.py
ShellCode33/CompressionAlgorithms
3b2e7b497ef0af4ba7ac8bc6f4d6e77ea4c4aedc
[ "MIT" ]
null
null
null
compress/algorithms/lzw.py
ShellCode33/CompressionAlgorithms
3b2e7b497ef0af4ba7ac8bc6f4d6e77ea4c4aedc
[ "MIT" ]
null
null
null
compress/algorithms/lzw.py
ShellCode33/CompressionAlgorithms
3b2e7b497ef0af4ba7ac8bc6f4d6e77ea4c4aedc
[ "MIT" ]
null
null
null
# coding: utf-8
38.825843
120
0.64911
9d20f94306c2d2e2215af2edce02e11edf2054d9
1,322
py
Python
app/models.py
ariqfadlan/donorojo-db-api
dd1a3241ead5738c94eb77ed0bbb23b26582618f
[ "MIT" ]
null
null
null
app/models.py
ariqfadlan/donorojo-db-api
dd1a3241ead5738c94eb77ed0bbb23b26582618f
[ "MIT" ]
null
null
null
app/models.py
ariqfadlan/donorojo-db-api
dd1a3241ead5738c94eb77ed0bbb23b26582618f
[ "MIT" ]
null
null
null
""" Contains database models """ from sqlalchemy import Column, ForeignKey, Integer, String, Float from sqlalchemy.orm import relationship from .database import Base
33.05
98
0.746596
9d2612bdf9b9d5fe13c734ed2826b9452f048d19
1,096
py
Python
hackerrank_contests/101Hack44/prime.py
rishabhiitbhu/hackerrank
acc300851c81a29472177f15fd8b56ebebe853ea
[ "MIT" ]
null
null
null
hackerrank_contests/101Hack44/prime.py
rishabhiitbhu/hackerrank
acc300851c81a29472177f15fd8b56ebebe853ea
[ "MIT" ]
null
null
null
hackerrank_contests/101Hack44/prime.py
rishabhiitbhu/hackerrank
acc300851c81a29472177f15fd8b56ebebe853ea
[ "MIT" ]
1
2020-01-30T06:47:09.000Z
2020-01-30T06:47:09.000Z
# a = rwh_primes2(100) # print(a) # http://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188 """ Input n>=6, Returns a list of primes, 2 <= p < n """ print(sieve_for_primes_to(3)) print(sieve_for_primes_to(1)) print(sieve_for_primes_to(100))
33.212121
110
0.519161
9d26ca6234d4434fd99a9aa1e9b161d86a72613c
2,649
py
Python
competitive_k_means.py
QLightman/competitive_k_means
264a3da409177e40f150da1107d00e149ff1e125
[ "MIT" ]
1
2019-09-03T09:56:43.000Z
2019-09-03T09:56:43.000Z
competitive_k_means.py
QLightman/competitive_k_means
264a3da409177e40f150da1107d00e149ff1e125
[ "MIT" ]
null
null
null
competitive_k_means.py
QLightman/competitive_k_means
264a3da409177e40f150da1107d00e149ff1e125
[ "MIT" ]
null
null
null
import numpy as np import matplotlib.pyplot as plt import copy k = 4 ratio=0.95 # push the competitive center if __name__ == '__main__': competitive_k_means()
33.1125
104
0.609287
9d280cecbd0d584acd8037cf6b0f18c473484417
3,031
py
Python
shiftmanager/redshift.py
whitmo/shiftmanager
49cd461854a9e8bc270b5cc6f9a2303cf87c2fb3
[ "BSD-2-Clause" ]
null
null
null
shiftmanager/redshift.py
whitmo/shiftmanager
49cd461854a9e8bc270b5cc6f9a2303cf87c2fb3
[ "BSD-2-Clause" ]
null
null
null
shiftmanager/redshift.py
whitmo/shiftmanager
49cd461854a9e8bc270b5cc6f9a2303cf87c2fb3
[ "BSD-2-Clause" ]
1
2020-09-02T04:37:37.000Z
2020-09-02T04:37:37.000Z
""" Defines a Redshift class which encapsulates a database connection and utility functions for managing that database. """ from __future__ import (absolute_import, division, print_function, unicode_literals) import os import psycopg2 from shiftmanager.mixins import AdminMixin, ReflectionMixin, S3Mixin from shiftmanager.memoized_property import memoized_property
32.244681
75
0.629165
9d2bc7d987bd63f2af30edb8519069c52527c5c7
387
py
Python
General Data Preprocessing/copyFile.py
yuxiawang1992/Python-Code
d457a1fd61742dfac08a82a26b66703e5ff6f780
[ "Apache-2.0" ]
null
null
null
General Data Preprocessing/copyFile.py
yuxiawang1992/Python-Code
d457a1fd61742dfac08a82a26b66703e5ff6f780
[ "Apache-2.0" ]
null
null
null
General Data Preprocessing/copyFile.py
yuxiawang1992/Python-Code
d457a1fd61742dfac08a82a26b66703e5ff6f780
[ "Apache-2.0" ]
null
null
null
#Python 3.4.3 #coding=gbk # copy file wangyuxia 20160920 import sys, shutil, os, string path = "E:\\test for qgis\\" target_path = "E:\\test for qgis\\HourScale\\" for i in range(2,31): for j in range(0,24): filename = 'N'+str(i).zfill(2)+str(j).zfill(2) shutil.copyfile(path+'d_02.hdr',target_path+filename+'.hdr') print("------------finished---------")
25.8
68
0.596899
9d2c26cb802d2c6da46e391e982eacb22cc6b08d
3,581
py
Python
convert_to_onnx.py
bhahn2004/FaceBoxes.PyTorch
be01c2449c6efa2a976a701dd8a052aa903a32b4
[ "MIT" ]
null
null
null
convert_to_onnx.py
bhahn2004/FaceBoxes.PyTorch
be01c2449c6efa2a976a701dd8a052aa903a32b4
[ "MIT" ]
null
null
null
convert_to_onnx.py
bhahn2004/FaceBoxes.PyTorch
be01c2449c6efa2a976a701dd8a052aa903a32b4
[ "MIT" ]
null
null
null
import sys from scipy.special import softmax import torch.onnx import onnxruntime as ort import numpy as np import tensorflow as tf from tensorflow.keras import backend as K from pytorch2keras.converter import pytorch_to_keras from models.faceboxes import FaceBoxes input_dim = 1024 num_classes = 2 model_path = "weights/FaceBoxesProd.pth" net = FaceBoxes('train', input_dim, num_classes) def remove_prefix(state_dict, prefix): ''' Old style model is stored with all names of parameters sharing common prefix 'module.' ''' print('remove prefix \'{}\''.format(prefix)) f = lambda x: x.split(prefix, 1)[-1] if x.startswith(prefix) else x return {f(key): value for key, value in state_dict.items()} net = load_model(net, model_path, False) net.eval() net.to("cuda") model_name = model_path.split("/")[-1].split(".")[0] onnx_model_path = f"models/onnx/base-model.onnx" # export ONNX model dummy_input = torch.randn(1, 3, input_dim, input_dim).to("cuda") torch.onnx.export(net, dummy_input, onnx_model_path, verbose=False, input_names=['input'], output_names=['output']) """ # try using pytorch2keras keras_model = pytorch_to_keras(net, dummy_input, [(3, input_dim, input_dim)]) keras_model_path = f"models/onnx/base-model" #keras_model.save(model_path) # 0. print PyTorch outputs out = net(dummy_input) dummy_input = dummy_input.cpu().detach().numpy() out = out.cpu().detach().numpy() loc = out[:, :, 2:] conf = out[:, :, :2] scores = softmax(conf, axis=-1) print(scores) # 1. check if ONNX outputs are the same ort_session = ort.InferenceSession(onnx_model_path) input_name = ort_session.get_inputs()[0].name out = ort_session.run(None, {input_name: dummy_input})[0] loc = out[:, :, 2:] conf = out[:, :, :2] scores = softmax(conf, axis=-1) print(scores) # 2. check if Keras outputs are the same keras_model_path = f"models/onnx/base-model" keras_model = tf.keras.models.load_model(keras_model_path) out = keras_model.predict(dummy_input) loc = out[:, :, 2:] conf = out[:, :, :2] scores = softmax(conf, axis=-1) print(scores) # 3. check if intermediate results of Keras are the same test_fn = K.function([keras_model.input], [keras_model.get_layer('334').output[0]]) test_out = test_fn(dummy_input) print(np.round(np.array(test_out), 4)[:30]) """
33.46729
115
0.729405
9d2c9923a0dda16187c578d67868231654968587
358
py
Python
setup.py
kckaiwei/pysteamcmd
273f114352975268b01cb8007cc2336115aea4fc
[ "MIT" ]
null
null
null
setup.py
kckaiwei/pysteamcmd
273f114352975268b01cb8007cc2336115aea4fc
[ "MIT" ]
null
null
null
setup.py
kckaiwei/pysteamcmd
273f114352975268b01cb8007cc2336115aea4fc
[ "MIT" ]
null
null
null
from setuptools import setup setup(name='pysteamcmd', version='0.1.2', description='Python package to install and utilize steamcmd', url='http://github.com/f0rkz/pysteamcmd', author='f0rkz', author_email='f0rkz@f0rkznet.net', license='MIT', packages=['pysteamcmd'], install_requires=[], zip_safe=False)
27.538462
67
0.648045
9d2f4723ec751e23b2b4a9d81dfaceee08d127d9
3,292
py
Python
x2py/links/strategies/buffer_transform_strategy.py
jaykang920/x2py
b8bd473f94ff4b9576e984cc384f4159ab71278d
[ "MIT" ]
null
null
null
x2py/links/strategies/buffer_transform_strategy.py
jaykang920/x2py
b8bd473f94ff4b9576e984cc384f4159ab71278d
[ "MIT" ]
1
2019-06-05T09:35:09.000Z
2020-07-02T09:46:46.000Z
x2py/links/strategies/buffer_transform_strategy.py
jaykang920/x2py
b8bd473f94ff4b9576e984cc384f4159ab71278d
[ "MIT" ]
null
null
null
# Copyright (c) 2017, 2018 Jae-jun Kang # See the file LICENSE for details. from x2py.event_factory import EventFactory from x2py.links.link_events import * from x2py.links.strategy import ChannelStrategy from x2py.util.trace import Trace
33.591837
82
0.637303
9d2ffa602fd2739373ede0b55f827179feb8572a
5,632
py
Python
ignite_trainer/_visdom.py
jinczing/AudioCLIP
b080fc946599290c91f9d3b203295e5968af1bf6
[ "MIT" ]
304
2021-06-28T09:59:13.000Z
2022-03-30T17:33:52.000Z
ignite_trainer/_visdom.py
AK391/AudioCLIP
45327aa203839bfeb58681dd36c04fd493ee72f4
[ "MIT" ]
176
2021-07-23T08:30:21.000Z
2022-03-14T12:29:06.000Z
ignite_trainer/_visdom.py
AK391/AudioCLIP
45327aa203839bfeb58681dd36c04fd493ee72f4
[ "MIT" ]
34
2021-06-29T11:50:19.000Z
2022-03-02T12:01:36.000Z
import os import sys import json import time import tqdm import socket import subprocess import numpy as np import visdom from typing import Tuple from typing import Optional
29.333333
109
0.552734
9d3007ae1a0b21a2c5b82a4a63774e81f6aa5a00
4,960
py
Python
anonybot.py
sp0oks/anonybot
864688f04231e3088737b12caed76f61a5128993
[ "MIT" ]
5
2019-12-17T17:53:51.000Z
2020-09-06T07:51:23.000Z
anonybot.py
CptSpookz/anonybot
864688f04231e3088737b12caed76f61a5128993
[ "MIT" ]
null
null
null
anonybot.py
CptSpookz/anonybot
864688f04231e3088737b12caed76f61a5128993
[ "MIT" ]
2
2020-01-20T01:01:20.000Z
2020-09-06T07:51:25.000Z
import os import time from sqlalchemy import create_engine, BigInteger, UnicodeText, Column, Integer from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session from sqlalchemy.exc import SQLAlchemyError from aiogram import Bot, Dispatcher, executor, types from aiogram.utils.exceptions import ChatNotFound from dotenv import load_dotenv load_dotenv() # Database configuration DB = os.getenv('DB_ADDR') ENGINE = create_engine(DB) Base = declarative_base() Session = scoped_session(sessionmaker(bind=ENGINE)) # Bot configuration USAGE = """\ /status -- show how many messages are pending /receive -- receive pending messages /send [user_id] -- reply to message to send it to given user /drop -- drop all pending messages /help -- shows this message """ TOKEN = os.getenv('BOT_TOKEN') bot = Bot(token=TOKEN) dp = Dispatcher(bot) if __name__ == '__main__': Base.metadata.create_all(ENGINE) executor.start_polling(dp)
36.20438
121
0.626008
9d303166d818d8f8f693a98022e31dfc5961d444
2,912
py
Python
tests/test_doc_cvnn_example.py
saugatkandel/cvnn
f6d7b5c17fd064a7eaa60e7af922914a974eb69a
[ "MIT" ]
38
2020-09-16T14:47:36.000Z
2022-03-30T13:35:05.000Z
tests/test_doc_cvnn_example.py
saugatkandel/cvnn
f6d7b5c17fd064a7eaa60e7af922914a974eb69a
[ "MIT" ]
25
2020-10-03T19:30:16.000Z
2022-03-29T15:24:44.000Z
tests/test_doc_cvnn_example.py
saugatkandel/cvnn
f6d7b5c17fd064a7eaa60e7af922914a974eb69a
[ "MIT" ]
9
2021-01-18T10:48:57.000Z
2022-02-11T10:34:52.000Z
import numpy as np import cvnn.layers as complex_layers import tensorflow as tf from pdb import set_trace if __name__ == '__main__': test_functional_api() test_regression() test_cifar()
45.5
109
0.730426
9d31c3b53c5a416e56a025e297cf9e335432c27b
2,580
py
Python
gkutils/commonutils/getCSVColumnSubset.py
genghisken/gkutils
0c8aa06d813de72b1cd9cba11219a78952799420
[ "MIT" ]
null
null
null
gkutils/commonutils/getCSVColumnSubset.py
genghisken/gkutils
0c8aa06d813de72b1cd9cba11219a78952799420
[ "MIT" ]
1
2021-11-19T19:28:52.000Z
2021-11-19T19:29:57.000Z
gkutils/commonutils/getCSVColumnSubset.py
genghisken/gkutils
0c8aa06d813de72b1cd9cba11219a78952799420
[ "MIT" ]
null
null
null
"""Write a subset of keys from one CSV to another. Don't use lots of memory. Usage: %s <filename> <outputfile> [--columns=<columns>] [--htm] [--racol=<racol>] [--deccol=<deccol>] [--filtercol=<filtercol>] %s (-h | --help) %s --version Options: -h --help Show this screen. --version Show version. --columns=<columns> Comma separated (no spaces) columns. --htm Generate HTM IDs and add to the column subset. --racol=<racol> RA column, ignored if htm not specified [default: ra] --deccol=<deccol> Declination column, ignored if htm not specified [default: dec] --filtercol=<filtercol> Only write the row when this column is not blank. """ import sys __doc__ = __doc__ % (sys.argv[0], sys.argv[0], sys.argv[0]) from docopt import docopt from gkutils.commonutils import Struct, readGenericDataFile, cleanOptions import csv from gkhtm._gkhtm import htmName if __name__ == '__main__': main()
35.342466
122
0.605039
9d3448187e277186c37746a8eee21eed655db199
1,030
py
Python
questions/univalued-binary-tree/Solution.py
marcus-aurelianus/leetcode-solutions
8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6
[ "MIT" ]
141
2017-12-12T21:45:53.000Z
2022-03-25T07:03:39.000Z
questions/univalued-binary-tree/Solution.py
marcus-aurelianus/leetcode-solutions
8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6
[ "MIT" ]
32
2015-10-05T14:09:52.000Z
2021-05-30T10:28:41.000Z
questions/univalued-binary-tree/Solution.py
marcus-aurelianus/leetcode-solutions
8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6
[ "MIT" ]
56
2015-09-30T05:23:28.000Z
2022-03-08T07:57:11.000Z
""" A binary tree is univalued if every node in the tree has the same value. Return trueif and only if the given tree is univalued. Example 1: Input: [1,1,1,1,1,null,1] Output: true Example 2: Input: [2,2,2,5,2] Output: false Note: The number of nodes in the given tree will be in the range [1, 100]. Each node's value will be an integer in the range [0, 99]. """ # Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None
20.196078
77
0.586408
9d35852cc4326c58c6eb53f1d5a84c6b35a5e6fb
1,006
py
Python
src/python/WMComponent/DBS3Buffer/MySQL/DBSBufferFiles/GetParentStatus.py
khurtado/WMCore
f74e252412e49189a92962945a94f93bec81cd1e
[ "Apache-2.0" ]
21
2015-11-19T16:18:45.000Z
2021-12-02T18:20:39.000Z
src/python/WMComponent/DBS3Buffer/MySQL/DBSBufferFiles/GetParentStatus.py
khurtado/WMCore
f74e252412e49189a92962945a94f93bec81cd1e
[ "Apache-2.0" ]
5,671
2015-01-06T14:38:52.000Z
2022-03-31T22:11:14.000Z
src/python/WMComponent/DBS3Buffer/MySQL/DBSBufferFiles/GetParentStatus.py
khurtado/WMCore
f74e252412e49189a92962945a94f93bec81cd1e
[ "Apache-2.0" ]
67
2015-01-21T15:55:38.000Z
2022-02-03T19:53:13.000Z
#!/usr/bin/env python """ _GetParentStatus_ MySQL implementation of DBSBufferFile.GetParentStatus """ from WMCore.Database.DBFormatter import DBFormatter
27.189189
74
0.614314