hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
32638416d54a115fde42bba19086c99e40948e61
| 802
|
py
|
Python
|
backend/events/tests/test_views.py
|
trfoss/parrot
|
2f120ee1ab82368f85b2b5a7f1c45afc26aa8963
|
[
"BSD-2-Clause"
] | 5
|
2019-02-25T02:24:51.000Z
|
2019-04-21T00:56:43.000Z
|
backend/events/tests/test_views.py
|
trfoss/parrot
|
2f120ee1ab82368f85b2b5a7f1c45afc26aa8963
|
[
"BSD-2-Clause"
] | 51
|
2019-02-06T03:36:27.000Z
|
2021-06-10T21:11:24.000Z
|
backend/events/tests/test_views.py
|
trfoss/parrot
|
2f120ee1ab82368f85b2b5a7f1c45afc26aa8963
|
[
"BSD-2-Clause"
] | 7
|
2019-02-06T04:37:10.000Z
|
2019-03-28T07:52:26.000Z
|
"""
backend/events/tests/test_views.py
Tests for the events page views. We use the test client. Read more at
https://docs.djangoproject.com/en/2.1/topics/testing/tools/
"""
import json
from django.test import TestCase
| 26.733333
| 69
| 0.63591
|
32649f15ad311acc51f598d331270d3f4fb588d6
| 497
|
py
|
Python
|
instructors/lessons/practical_utils/examples/os-path-walk.py
|
mgadagin/PythonClass
|
70b370362d75720b3fb0e1d6cc8158f9445e9708
|
[
"MIT"
] | 46
|
2017-09-27T20:19:36.000Z
|
2020-12-08T10:07:19.000Z
|
instructors/lessons/practical_utils/examples/os-path-walk.py
|
mgadagin/PythonClass
|
70b370362d75720b3fb0e1d6cc8158f9445e9708
|
[
"MIT"
] | 6
|
2018-01-09T08:07:37.000Z
|
2020-09-07T12:25:13.000Z
|
instructors/lessons/practical_utils/examples/os-path-walk.py
|
mgadagin/PythonClass
|
70b370362d75720b3fb0e1d6cc8158f9445e9708
|
[
"MIT"
] | 18
|
2017-10-10T02:06:51.000Z
|
2019-12-01T10:18:13.000Z
|
import os
import os.path
os.mkdir('example')
os.mkdir('example/one')
f = open('example/one/file.txt', 'wt')
f.write('contents')
f.close()
f = open('example/two.txt', 'wt')
f.write('contents')
f.close()
os.path.walk('example', visit, '(User data)')
| 22.590909
| 45
| 0.591549
|
326587ea3dd2af6a3849b34225b40c151ddc17b4
| 532
|
py
|
Python
|
tikplay/provider/tests/retriever_test.py
|
tietokilta-saato/tikplay
|
8061451c21f06bd07129a8a42543ea86b7518d4a
|
[
"MIT"
] | 2
|
2015-01-15T14:14:50.000Z
|
2015-10-23T05:37:34.000Z
|
tikplay/provider/tests/retriever_test.py
|
tietokilta-saato/tikplay
|
8061451c21f06bd07129a8a42543ea86b7518d4a
|
[
"MIT"
] | 8
|
2015-01-12T10:27:27.000Z
|
2015-05-11T12:05:03.000Z
|
tikplay/provider/tests/retriever_test.py
|
tietokilta-saato/tikplay
|
8061451c21f06bd07129a8a42543ea86b7518d4a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Part of tikplay
# Yes, this is a bit of a non-test.
from nose.tools import *
from tikplay.provider.retriever import Retriever
| 25.333333
| 73
| 0.682331
|
3265c12d40cc56aa2b76c483dff904dc52c43391
| 11,333
|
py
|
Python
|
myfunds/web/views/crypto/views.py
|
anzodev/myfunds
|
9f6cda99f443cec064d15d7ff7780f297cbdfe10
|
[
"MIT"
] | null | null | null |
myfunds/web/views/crypto/views.py
|
anzodev/myfunds
|
9f6cda99f443cec064d15d7ff7780f297cbdfe10
|
[
"MIT"
] | null | null | null |
myfunds/web/views/crypto/views.py
|
anzodev/myfunds
|
9f6cda99f443cec064d15d7ff7780f297cbdfe10
|
[
"MIT"
] | null | null | null |
import csv
import io
from datetime import datetime
import peewee as pw
from flask import Blueprint
from flask import g
from flask import make_response
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from wtforms import Form
from wtforms import IntegerField
from wtforms import validators as vals
from myfunds.core.constants import CryptoDirection
from myfunds.core.models import CryptoActionLog
from myfunds.core.models import CryptoBalance
from myfunds.core.models import CryptoCurrency
from myfunds.core.models import CryptoTransaction
from myfunds.core.models import db_proxy
from myfunds.modules import cmc
from myfunds.web import ajax
from myfunds.web import auth
from myfunds.web import notify
from myfunds.web import utils
from myfunds.web.constants import DATETIME_FORMAT
from myfunds.web.forms import AddCryptoBalanceForm
from myfunds.web.forms import AddCyptoTransactionForm
from myfunds.web.forms import DeleteCryptoBalanceForm
from myfunds.web.forms import UpdateCryptoBalanceQuantityForm
USD_CODE = "USD"
USD_PRECISION = 2
CRYPTO_PRECISION = 8
bp = Blueprint("crypto", __name__, template_folder="templates")
class ActionsFilterForm(Form):
offset = IntegerField(validators=[vals.Optional()])
limit = IntegerField(validators=[vals.Optional()])
| 29.667539
| 88
| 0.682344
|
32665f5e99814a1ca419ee599a7bb327ba8ffbf0
| 9,115
|
py
|
Python
|
src/modeci_mdf/interfaces/pytorch/mod_torch_builtins.py
|
29riyasaxena/MDF
|
476e6950d0f14f29463eb4f6e3be518dfb2160a5
|
[
"Apache-2.0"
] | 12
|
2021-01-18T20:38:21.000Z
|
2022-03-29T15:01:10.000Z
|
src/modeci_mdf/interfaces/pytorch/mod_torch_builtins.py
|
29riyasaxena/MDF
|
476e6950d0f14f29463eb4f6e3be518dfb2160a5
|
[
"Apache-2.0"
] | 101
|
2020-12-14T15:23:07.000Z
|
2022-03-31T17:06:19.000Z
|
src/modeci_mdf/interfaces/pytorch/mod_torch_builtins.py
|
29riyasaxena/MDF
|
476e6950d0f14f29463eb4f6e3be518dfb2160a5
|
[
"Apache-2.0"
] | 15
|
2020-12-04T22:37:14.000Z
|
2022-03-31T09:48:03.000Z
|
"""
Wrap commonly-used torch builtins in nn.Module subclass
for easier automatic construction of script
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
# TODO: Many more to be implemented
__all__ = [
"argmax",
"argmin",
"matmul",
"add",
"sin",
"cos",
"abs",
"flatten",
"clip",
"shape",
"det",
"And",
"Or",
"Xor",
"concat",
"ceil",
"floor",
"bitshift",
"conv",
"elu",
"hardsigmoid",
"hardswish",
"compress",
]
| 23.798956
| 88
| 0.501042
|
326698864c4df87b158debf66bd86b994c325aa0
| 8,068
|
py
|
Python
|
taf/testlib/snmphelpers.py
|
stepanandr/taf
|
75cb85861f8e9703bab7dc6195f3926b8394e3d0
|
[
"Apache-2.0"
] | 10
|
2016-12-16T00:05:58.000Z
|
2018-10-30T17:48:25.000Z
|
taf/testlib/snmphelpers.py
|
stepanandr/taf
|
75cb85861f8e9703bab7dc6195f3926b8394e3d0
|
[
"Apache-2.0"
] | 40
|
2017-01-04T23:07:05.000Z
|
2018-04-16T19:52:02.000Z
|
taf/testlib/snmphelpers.py
|
stepanandr/taf
|
75cb85861f8e9703bab7dc6195f3926b8394e3d0
|
[
"Apache-2.0"
] | 23
|
2016-12-30T05:03:53.000Z
|
2020-04-01T08:40:24.000Z
|
# Copyright (c) 2011 - 2017, Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""``snmphelpers.py``
`SNMP specific helpers functions`
"""
import sys
import os
import shutil
import tarfile
from subprocess import Popen, PIPE
import pytest
import paramiko as paramiko
from . import helpers
from . import loggers
# create logger for module
def is_mibs_folder_empty(path):
"""Checks is MIBs folder empty of not.
Args:
path(str): path to MIBs folder
Returns:
bool: True if empty and False if not
Examples::
is_mibs_folder_empty()
"""
empty = True
if os.path.exists(path):
for file_n in os.listdir(path):
if 'ONS' in file_n or "ons" in file_n:
empty = False
return empty
def clear_mibs_folder(path):
"""Removes all ONS mibs from MIBS folder.
Args:
path(str): path to MIBs folder
Examples::
clear_mibs_folder()
"""
if os.path.exists(path):
shutil.rmtree(path)
def get_remote_file(hostname, port, username, password, remotepath, localpath):
"""Get remote file to local machine.
Args:
hostname(str): Remote IP-address
port(int): Remote SSH port
username(str): Remote host username for authentication
password(str): Remote host password for authentication
remotepath(str): Remote file to download location path
localpath(str): Local path to save remote file
Examples::
get_remote_file(host, port, username, password, tar_remotepath, tar_localpath)
"""
transport = paramiko.Transport((hostname, port))
transport.connect(username=username, password=password)
sftp = paramiko.SFTPClient.from_transport(transport)
try:
sftp.get(remotepath=remotepath, localpath=localpath)
finally:
sftp.close()
transport.close()
def untar_file(tar_path, untar_path):
"""Unpack tar file.
Args:
tar_path(str): Path to tar file
untar_path(str): Path where to unpack
Examples::
untar_file(tar_localpath, mib_path_txt)
"""
old_folder = os.path.join(untar_path, 'mibs')
if os.path.isfile(old_folder):
os.remove(old_folder)
tar = tarfile.open(tar_path)
tar.extractall(untar_path)
tar.close()
os.remove(tar_path)
def file_convert(mib_txt_path, mib_py_path):
"""Convert .txt MIB to .py.
Args:
mib_txt_path(str): Full path to .txt MIB.
mib_py_path(str): Full path to .py MIB
Examples::
file_convert(mib_txt_path, mib_py_path)
"""
mod_logger_snmp = loggers.module_logger(name=__name__)
# translate .txt mib into python format using 3rd party tools 'smidump'
smidump = Popen(['smidump', '-k', '-f', 'python', mib_txt_path], stdout=PIPE)
list_stdout = smidump.communicate()[0]
if len(list_stdout) == 0:
return "Fail"
# create tmp directory for filling MIBs dictionary
mib_path_tmp = os.path.join(mib_py_path, 'tmp')
if not os.path.exists(mib_path_tmp):
os.makedirs(mib_path_tmp)
# added tmp path into sys.path for imports converted MIB's
sys.path.append(mib_path_tmp)
# get file without extension
file_name = os.path.splitext(os.path.basename(mib_txt_path))[0]
# create .py name
temp_file_name = "{0}.py".format(file_name)
# create .tmp file path for imports
temp_file_path = os.path.join(mib_path_tmp, temp_file_name)
# save and import converted MIB's
with open(temp_file_path, "ab") as a:
a.write(list_stdout)
temp_module = __import__(os.path.splitext(os.path.basename(mib_txt_path))[0])
# update helpers.MIBS_DICT with MIB data
if "moduleName" in list(temp_module.MIB.keys()) and "nodes" in list(temp_module.MIB.keys()):
helpers.MIBS_DICT.update({temp_module.MIB["moduleName"]: list(temp_module.MIB["nodes"].keys())})
# clear tmp file path
sys.path.remove(mib_path_tmp)
os.remove(temp_file_path)
# translate MIB from .py into pysnmp format using 3rd party tools 'libsmi2pysnmp'
pipe = Popen(['libsmi2pysnmp', '--no-text'], stdout=PIPE, stdin=PIPE)
stdout = pipe.communicate(input=list_stdout)
# get MIB name from itself, add .py and save it.
mib_name = "{0}.py".format(temp_module.MIB["moduleName"])
mib_py_path = os.path.join(mib_py_path, mib_name)
mod_logger_snmp.debug("Convert %s to %s" % (file_name, temp_file_name))
with open(mib_py_path, 'a') as py_file:
for string in stdout:
if string is not None:
str_dict = string.decode('utf-8').split('\n')
for each_str in str_dict:
if "ModuleCompliance" in each_str:
if "ObjectGroup" in each_str:
py_file.write(each_str + '\n')
elif "Compliance)" in each_str:
pass
else:
py_file.write(each_str + '\n')
return mib_name
def convert_to_py(txt_dir_path, py_dir_path):
"""Converts .txt MIB's to .py.
Args:
txt_dir_path(str): Path to dir with .txt MIB's.
py_dir_path(str): Path to dir with .py MIB's
Examples::
convert_to_py(mib_path_tmp, mib_path)
"""
mod_logger_snmp = loggers.module_logger(name=__name__)
txt_dir_path = os.path.join(txt_dir_path, "MIB")
mod_logger_snmp.debug("Converts .txt MIB's to .py")
os.environ['SMIPATH'] = txt_dir_path
for mib in os.listdir(txt_dir_path):
mib_txt_path = os.path.join(txt_dir_path, mib)
retry_count = 3
retry = 1
while retry <= retry_count:
mib_py = file_convert(mib_txt_path, py_dir_path)
if mib_py not in os.listdir(py_dir_path):
mod_logger_snmp.debug("Converted MIB %s is not present at %s" % (mib, py_dir_path))
retry += 1
if retry > retry_count:
mod_logger_snmp.debug("Can not convert %s" % (mib, ))
else:
mod_logger_snmp.debug("Converted MIB %s is present at %s" % (mib, py_dir_path))
retry = retry_count + 1
shutil.rmtree(txt_dir_path)
shutil.rmtree(os.path.join(py_dir_path, "tmp"))
def create_mib_folder(config, path, env):
"""Creates MIB folder.
Args:
config(dict): Configuration dictionary.
path(str): Path to MIB folder.
env(Environment): Environment object.
Examples::
create_mib_folder()
"""
if config is None:
pytest.fail("UI settings not fount in environment configuration.")
host = config['host']
port = int(config['port'])
username = config['username']
password = config['password']
tar_folder = config['tar_remotepath']
tar_file = os.path.split(tar_folder)[1]
branch = env.env_prop['switchppVersion']
platform = getattr(getattr(env.switch[1], 'hw', None), 'snmp_path', None)
tar_remotepath = tar_folder.format(**locals())
if not os.path.exists(path):
os.makedirs(path)
tar_localpath = os.path.join(path, tar_file)
mib_path_tmp = os.path.join(path, 'tmp')
if not os.path.exists(mib_path_tmp):
os.makedirs(mib_path_tmp)
mib_path_txt = os.path.join(path, 'txt')
if not os.path.exists(mib_path_txt):
os.makedirs(mib_path_txt)
get_remote_file(host, port, username, password, tar_remotepath, tar_localpath)
untar_file(tar_localpath, mib_path_txt)
convert_to_py(mib_path_txt, path)
| 29.992565
| 104
| 0.649603
|
3266f7d31cc045815dafabe76a68d2f3cebde4da
| 6,843
|
py
|
Python
|
cadence/apps/backend/views.py
|
BitLooter/Cadence
|
3adbe51f042120f7154711a58a614ce0e8b3664b
|
[
"BSD-2-Clause"
] | null | null | null |
cadence/apps/backend/views.py
|
BitLooter/Cadence
|
3adbe51f042120f7154711a58a614ce0e8b3664b
|
[
"BSD-2-Clause"
] | null | null | null |
cadence/apps/backend/views.py
|
BitLooter/Cadence
|
3adbe51f042120f7154711a58a614ce0e8b3664b
|
[
"BSD-2-Clause"
] | null | null | null |
import json
import logging
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotFound
from django.core.exceptions import ObjectDoesNotExist
from django.views.decorators.csrf import csrf_exempt # See note below on saveplaylist
import models
# Set up logging
logger = logging.getLogger("apps")
# View function decorators
##########################
def log_request(f):
"""Records request info to the log file"""
wrapper.__doc__ = f.__doc__
return wrapper
def handle_not_found(f):
"""
For views that request a specific object (e.g. a playlist), return a 404
page and log an error if the object was not found.
Assumes the object being looked for is passed as a kwarg named 'item_id'.
If this view does not fit this pattern, you will not be able to handle
404 errors for it with this decorator.
"""
wrapper.__doc__ = f.__doc__
return wrapper
# View functions
################
#TODO: check for external errors like database access problems
def playlists(request):
"""
Generic view for /data/playlists/, choosing a view function for the request type.
Saves a playlist or returns a list of them, depending on request type. A GET
request will return a list of available playlists in JSON format; a POST request
saves a new playlist to the server, using the POST data (also in JSON format).
Does not actually do anything itself, but rather calls the correct function for
the task.
"""
# If POST, we're saving a playlist
if request.method == "POST":
return saveplaylist(request)
# Otherwise, default behavior is to return a list of playlists
else:
return playlistlist(request)
# Utility methods
#################
def json_response(output):
"""Returns an HTTP Response with the data in output as the content in JSON format"""
return HttpResponse(json.dumps(output), mimetype="application/json")
| 34.736041
| 153
| 0.672366
|
32675e661c420861aca3a72ce984ac5043cdeab4
| 2,868
|
py
|
Python
|
elexon_api/utils.py
|
GiorgioBalestrieri/elexon_api_tool
|
5b271e9d4a52dec5585a232833a699b8392ee6b0
|
[
"MIT"
] | 4
|
2019-06-07T11:14:46.000Z
|
2021-04-01T14:15:14.000Z
|
elexon_api/utils.py
|
GiorgioBalestrieri/elexon_api_tool
|
5b271e9d4a52dec5585a232833a699b8392ee6b0
|
[
"MIT"
] | null | null | null |
elexon_api/utils.py
|
GiorgioBalestrieri/elexon_api_tool
|
5b271e9d4a52dec5585a232833a699b8392ee6b0
|
[
"MIT"
] | 6
|
2019-02-28T20:24:26.000Z
|
2021-03-30T18:08:23.000Z
|
import os
from pathlib import Path
import pandas as pd
from collections import defaultdict
from typing import Dict, List
from .config import REQUIRED_D, API_KEY_FILENAME
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
def get_required_parameters(service_code: str) -> List[str]:
"""Get list of required parameters for service."""
return REQUIRED_D[service_code]
def _get_path_to_module() -> Path:
"""Get path to this module."""
return Path(os.path.realpath(__file__)).parent
def get_api_key_path(filename=API_KEY_FILENAME) -> Path:
"""Load api key."""
path_to_dir = _get_path_to_module()
return path_to_dir / filename
def extract_df(r_dict: dict) -> pd.DataFrame:
"""Extract DataFrame from dictionary.
Parameters
----------
r_dict
Obtained from response through xmltodict.
"""
r_body = r_dict['responseBody']
r_items_list = r_body['responseList']['item']
try:
df_items = pd.DataFrame(r_items_list)
except Exception as e:
logger.warning(f"Failed to create DataFrame.", exc_info=True)
try:
df_items = pd.DataFrame(r_items_list, index=[0])
except Exception as e:
logger.error("Failed to create DataFrame.")
raise e
return df_items
def split_list_of_dicts(dict_list: List[dict], key: str) -> Dict[str,List[dict]]:
"""Split list of dictionaries into multiples lists based on a specific key.
Output lists are stored in a dicionary with the value used as key.
Example:
>>> dict_list = [
{
"recordType": "a",
"foo": 1,
"bar": 1,
},
{
"recordType": "b",
"foo": 2,
"bar": 2,
},
{
"recordType": "b",
"foo": 3,
"bar": 3,
}
]
>>> split_list_of_dicts(dict_list, 'recordType')
{
"a": [
{
"recordType": "a",
"foo": 1,
"bar": 1,
},
],
"b": [
{
"recordType": "b",
"foo": 2,
"bar": 2,
},
{
"recordType": "b",
"foo": 3,
"bar": 3,
}
]
}
]
"""
result = defaultdict(list)
for d in dict_list:
result[d[key]].append(d)
return result
| 25.380531
| 81
| 0.540098
|
326881582afe0e7d4f36578fa52df6c3b487641d
| 1,608
|
py
|
Python
|
relative_connectivity_of_subgraphs.py
|
doberse/RRI
|
e2fdc085d8040efc230a25eec670dd6839cbf1f7
|
[
"MIT"
] | null | null | null |
relative_connectivity_of_subgraphs.py
|
doberse/RRI
|
e2fdc085d8040efc230a25eec670dd6839cbf1f7
|
[
"MIT"
] | null | null | null |
relative_connectivity_of_subgraphs.py
|
doberse/RRI
|
e2fdc085d8040efc230a25eec670dd6839cbf1f7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import networkx as nx
import pandas as pd
#Other nodes connected by one node
r=open('input_data/BC-related_RRI_network.txt')
ll=r.readlines()
r.close()
rna_pairs=[]
node_to_nodes={}
for l in ll:
ws=l.strip().split('\t')
qx=sorted(ws[0:2])
rna_pairs.append((qx[0],qx[1]))
for i in [0,1]:
if i==0:
j=1
else:
j=0
if qx[i] not in node_to_nodes:
node_to_nodes[qx[i]]=[qx[j]]
else:
node_to_nodes[qx[i]].append(qx[j])
#Dictionary of Node No.
r=open('input_data/RRI_node.csv')
r.readline()
no2node={}
for l in r:
ws=l.strip().split(',')
no2node[ws[0]]='~'.join(ws[1:7])
r.close()
#Sort nodes by node degree
node_degree={}
for k in node_to_nodes:
node_degree[k]=len(node_to_nodes[k])
df=pd.DataFrame(node_degree,index=['Degree'])
df=df.sort_values(by='Degree',axis=1,ascending=False)
nodes=df.columns.values
#Compute the relative conectivity of subgraphs
G=nx.Graph()
node_G=[]
w=open('RC_in_BC-related_RRI_network.csv','w')
w.write('Node,No.,Relative connectivity\n')
k=0
lim=len(nodes)
while k<lim:
node_key=nodes[k]
node_G.append(node_key)
G.add_node(node_key)#Add the node in subgraphs
for node in node_G:
if node in set(node_to_nodes[node_key]):
G.add_edge(node_key,node)#Add the edge in subgraphs
largest_components=max(nx.connected_components(G),key=len)
k+=1
w.write(no2node[node_key]+','+str(k)+','+str(len(largest_components)/float(len(node_G)))+'\n')
w.close()
| 26.360656
| 99
| 0.625622
|
326bc9a28ede548053a0104238484ec204f3ccb0
| 1,518
|
py
|
Python
|
macdaily/cmd/install.py
|
JarryShaw/MacDaily
|
853b841dd1f1f7e6aae7bf2c305ff008bc76055c
|
[
"BSD-3-Clause"
] | 10
|
2018-09-20T19:57:56.000Z
|
2021-11-14T18:28:10.000Z
|
macdaily/cmd/install.py
|
JarryShaw/jsdaily
|
3ca7aa7c75a12dc08ab44f78af2b089e1ed41d3d
|
[
"BSD-3-Clause"
] | 2
|
2020-05-31T08:49:47.000Z
|
2021-12-28T16:57:42.000Z
|
macdaily/cmd/install.py
|
JarryShaw/jsdaily
|
3ca7aa7c75a12dc08ab44f78af2b089e1ed41d3d
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import abc
from macdaily.cls.command import Command
from macdaily.util.tools.print import print_info
| 25.3
| 99
| 0.607378
|
326dd27e7ff223645c2d0bf5d397fdea5ed20af2
| 2,632
|
py
|
Python
|
src/piotr/cmdline/fs.py
|
orangecms/piotr
|
f892ce6eaaa08ea81eb01943a388b64fbf3ccc44
|
[
"MIT"
] | 47
|
2021-07-02T08:39:02.000Z
|
2021-11-08T22:21:39.000Z
|
src/piotr/cmdline/fs.py
|
orangecms/piotr
|
f892ce6eaaa08ea81eb01943a388b64fbf3ccc44
|
[
"MIT"
] | 2
|
2021-07-08T09:25:30.000Z
|
2021-07-12T10:06:51.000Z
|
src/piotr/cmdline/fs.py
|
orangecms/piotr
|
f892ce6eaaa08ea81eb01943a388b64fbf3ccc44
|
[
"MIT"
] | 5
|
2021-07-08T08:29:17.000Z
|
2021-10-18T13:35:11.000Z
|
"""
FS commandline module.
Allows to:
- list host filesystems
- remove a specific host filesystem
- add a specific host filesystem
"""
from os.path import basename
from piotr.cmdline import CmdlineModule, module, command
from piotr.user import UserDirectory as ud
from piotr.util import confirm
| 32.9
| 112
| 0.549392
|
32717c3bd131867ffad78e96d71e4ee21ce9b1c6
| 61
|
py
|
Python
|
mct_logging/src/mct_logging/__init__.py
|
iorodeo/mct
|
fa8b85f36533c9b1486ca4f6b0c40c3daa6f4e11
|
[
"Apache-2.0"
] | null | null | null |
mct_logging/src/mct_logging/__init__.py
|
iorodeo/mct
|
fa8b85f36533c9b1486ca4f6b0c40c3daa6f4e11
|
[
"Apache-2.0"
] | null | null | null |
mct_logging/src/mct_logging/__init__.py
|
iorodeo/mct
|
fa8b85f36533c9b1486ca4f6b0c40c3daa6f4e11
|
[
"Apache-2.0"
] | null | null | null |
import tracking_pts_logger_master
import tracking_pts_logger
| 20.333333
| 33
| 0.934426
|
32723b5595559318393e20a40362e1d61e41c415
| 3,133
|
py
|
Python
|
singlecellmultiomics/modularDemultiplexer/demultiplexModules/scartrace.py
|
zztin/SingleCellMultiOmics
|
d3035c33eb1375f0703cc49537417b755ad8a693
|
[
"MIT"
] | 17
|
2019-05-21T09:12:16.000Z
|
2022-02-14T19:26:58.000Z
|
singlecellmultiomics/modularDemultiplexer/demultiplexModules/scartrace.py
|
zztin/SingleCellMultiOmics
|
d3035c33eb1375f0703cc49537417b755ad8a693
|
[
"MIT"
] | 70
|
2019-05-20T08:08:45.000Z
|
2021-06-22T15:58:01.000Z
|
singlecellmultiomics/modularDemultiplexer/demultiplexModules/scartrace.py
|
zztin/SingleCellMultiOmics
|
d3035c33eb1375f0703cc49537417b755ad8a693
|
[
"MIT"
] | 7
|
2020-04-09T15:11:12.000Z
|
2022-02-14T15:23:31.000Z
|
from singlecellmultiomics.modularDemultiplexer.baseDemultiplexMethods import UmiBarcodeDemuxMethod, NonMultiplexable
# ScarTrace
| 34.054348
| 125
| 0.605171
|
3272a27a8fc6fa3c964e19b20bd692f8755a0dee
| 6,151
|
py
|
Python
|
tests/models.py
|
intellineers/django-bridger
|
ed097984a99df7da40a4d01bd00c56e3c6083056
|
[
"BSD-3-Clause"
] | 2
|
2020-03-17T00:53:23.000Z
|
2020-07-16T07:00:33.000Z
|
tests/models.py
|
intellineers/django-bridger
|
ed097984a99df7da40a4d01bd00c56e3c6083056
|
[
"BSD-3-Clause"
] | 76
|
2019-12-05T01:15:57.000Z
|
2021-09-07T16:47:27.000Z
|
tests/models.py
|
intellineers/django-bridger
|
ed097984a99df7da40a4d01bd00c56e3c6083056
|
[
"BSD-3-Clause"
] | 1
|
2020-02-05T15:09:47.000Z
|
2020-02-05T15:09:47.000Z
|
from datetime import date, time
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.utils import timezone
from django_fsm import FSMField, transition
from rest_framework.reverse import reverse
from simple_history.models import HistoricalRecords
from bridger.buttons import ActionButton
from bridger.display import FieldSet, InstanceDisplay, Section
from bridger.enums import RequestType
from bridger.search import register as search_register
from bridger.tags import TagModelMixin
| 31.22335
| 123
| 0.677451
|
327639bba2a2aa36c47d30fbf67b64ee714db74b
| 2,975
|
py
|
Python
|
RNAstructure_Source/RNAstructure_python_interface/Error_handling.py
|
mayc2/PseudoKnot_research
|
33e94b84435d87aff3d89dbad970c438ac173331
|
[
"MIT"
] | null | null | null |
RNAstructure_Source/RNAstructure_python_interface/Error_handling.py
|
mayc2/PseudoKnot_research
|
33e94b84435d87aff3d89dbad970c438ac173331
|
[
"MIT"
] | null | null | null |
RNAstructure_Source/RNAstructure_python_interface/Error_handling.py
|
mayc2/PseudoKnot_research
|
33e94b84435d87aff3d89dbad970c438ac173331
|
[
"MIT"
] | null | null | null |
#automated error checking for RNAstructure python interface
from __future__ import print_function
import inspect
from functools import wraps
from collections import defaultdict
debug = False
lookup_exceptions = defaultdict(lambda:RuntimeError,
{ 1:IOError,
2:IOError,
3:IndexError,
4:IndexError,
5:EnvironmentError,
6:StructureError,
7:StructureError,
8:StructureError,
9:StructureError,
10:ValueError,
11:ValueError,
12:ValueError,
13:IOError,
14:RNAstructureInternalError,
15:ValueError,
16:ValueError,
17:ValueError,
18:ValueError,
19:ValueError,
20:ValueError,
21:RNAstructureInternalError,
22:RNAstructureInternalError,
23:ValueError,
24:ValueError,
25:ValueError,
26:ValueError
})
| 35.416667
| 110
| 0.621176
|
3276b79a61cf27161c545de376944d5851538c10
| 52,691
|
py
|
Python
|
Src/si_figs.py
|
jomimc/FoldAsymCode
|
1896e5768e738bb5d1921a3f4c8eaf7f66c06be9
|
[
"MIT"
] | 1
|
2020-10-07T14:24:06.000Z
|
2020-10-07T14:24:06.000Z
|
Src/si_figs.py
|
jomimc/FoldAsymCode
|
1896e5768e738bb5d1921a3f4c8eaf7f66c06be9
|
[
"MIT"
] | null | null | null |
Src/si_figs.py
|
jomimc/FoldAsymCode
|
1896e5768e738bb5d1921a3f4c8eaf7f66c06be9
|
[
"MIT"
] | null | null | null |
from collections import defaultdict, Counter
from itertools import product, permutations
from glob import glob
import json
import os
from pathlib import Path
import pickle
import sqlite3
import string
import sys
import time
import matplotlib as mpl
from matplotlib import colors
from matplotlib import pyplot as plt
from matplotlib.gridspec import GridSpec
from matplotlib.lines import Line2D
import matplotlib.patches as mpatches
from multiprocessing import Pool
import numpy as np
import pandas as pd
from palettable.colorbrewer.qualitative import Paired_12
from palettable.colorbrewer.diverging import PuOr_5, RdYlGn_6, PuOr_10, RdBu_10
from palettable.scientific.diverging import Cork_10
from scipy.spatial import distance_matrix, ConvexHull, convex_hull_plot_2d
from scipy.stats import linregress, pearsonr, lognorm
import seaborn as sns
import svgutils.compose as sc
import asym_io
from asym_io import PATH_BASE, PATH_ASYM, PATH_ASYM_DATA
import asym_utils as utils
import folding_rate
import paper_figs
import structure
PATH_FIG = PATH_ASYM.joinpath("Figures")
PATH_FIG_DATA = PATH_FIG.joinpath("Data")
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[2]
c_sheet = custom_cmap[10]
col = [c_helix, c_sheet, "#CB7CE6", "#79C726"]
####################################################################
### SI Figures
####################################################################
### FIG 1
####################################################################
### FIG 2
####################################################################
### FIG 3
####################################################################
### FIG 4
####################################################################
### FIG 5
####################################################################
### FIG 6
####################################################################
### FIG 7
####################################################################
### FIG 8
####################################################################
### FIG 9
####################################################################
### FIG 10
####################################################################
### FIG 11
# To create this figure, you need to download the complete
# Human and E. coli proteomes at:
# https://alphafold.ebi.ac.uk/download
# and then change the code so that "base" points to the
# folder that contains the downloaded ".pdb" files
| 42.085463
| 162
| 0.557723
|
327808782f63cb50deaafbd843fb0446afafa40c
| 81
|
py
|
Python
|
release_ce.py
|
BTW-Community/BTW-MCP
|
4422e153525265029754dec222fc0c0064e03962
|
[
"MIT"
] | 2
|
2021-12-12T17:14:53.000Z
|
2021-12-25T04:03:18.000Z
|
release_ce.py
|
BTW-Community/BTW-MCP
|
4422e153525265029754dec222fc0c0064e03962
|
[
"MIT"
] | null | null | null |
release_ce.py
|
BTW-Community/BTW-MCP
|
4422e153525265029754dec222fc0c0064e03962
|
[
"MIT"
] | null | null | null |
from btw_mcp import *
package_release("vanilla", "main", directory="ce_release")
| 27
| 58
| 0.765432
|
327857254668f20b13612c825f93043e95b1c5c9
| 3,449
|
py
|
Python
|
test_beam_search.py
|
slegroux/slgBeam
|
733049ad4a97f582bc169623941cfbdf3efea207
|
[
"Apache-2.0"
] | null | null | null |
test_beam_search.py
|
slegroux/slgBeam
|
733049ad4a97f582bc169623941cfbdf3efea207
|
[
"Apache-2.0"
] | null | null | null |
test_beam_search.py
|
slegroux/slgBeam
|
733049ad4a97f582bc169623941cfbdf3efea207
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# (c) 2020 Sylvain Le Groux <slegroux@ccrma.stanford.edu>
import pytest
from pytest import approx
import numpy as np
import torch
from IPython import embed
from beam_search import Tokenizer, Score, BeamSearch
| 33.813725
| 98
| 0.632647
|
327872875221fcfb18f1db81613c4a83884de390
| 3,404
|
py
|
Python
|
src/main/python/hydra/kafkatest/maxrate_test.py
|
bopopescu/hydra
|
ec0793f8c1f49ceb93bf1f1a9789085b68d55f08
|
[
"Apache-2.0"
] | 10
|
2016-05-28T15:56:43.000Z
|
2018-01-03T21:30:58.000Z
|
src/main/python/hydra/kafkatest/maxrate_test.py
|
bopopescu/hydra
|
ec0793f8c1f49ceb93bf1f1a9789085b68d55f08
|
[
"Apache-2.0"
] | 17
|
2016-06-06T22:15:28.000Z
|
2020-07-22T20:28:12.000Z
|
src/main/python/hydra/kafkatest/maxrate_test.py
|
bopopescu/hydra
|
ec0793f8c1f49ceb93bf1f1a9789085b68d55f08
|
[
"Apache-2.0"
] | 5
|
2016-06-01T22:01:44.000Z
|
2020-07-22T20:12:49.000Z
|
__author__ = 'annyz'
from pprint import pprint, pformat # NOQA
import logging
import os
import sys
from datetime import datetime
from hydra.lib import util
from hydra.kafkatest.runtest import RunTestKAFKA
from hydra.lib.boundary import Scanner
from optparse import OptionParser
l = util.createlogger('runSuitMaxRate', logging.INFO)
| 38.247191
| 94
| 0.595476
|
327a37a67a58b314caa95c02379bd85e44d7216f
| 722
|
py
|
Python
|
src/api/v1/villains/serializers.py
|
reiniervdwindt/power-ranger-api
|
13ce639a7f5e9d4b106ce5f094c076db0aad398e
|
[
"MIT"
] | null | null | null |
src/api/v1/villains/serializers.py
|
reiniervdwindt/power-ranger-api
|
13ce639a7f5e9d4b106ce5f094c076db0aad398e
|
[
"MIT"
] | null | null | null |
src/api/v1/villains/serializers.py
|
reiniervdwindt/power-ranger-api
|
13ce639a7f5e9d4b106ce5f094c076db0aad398e
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from series.models import Series
from villains.models import Villain
| 26.740741
| 88
| 0.688366
|
327a4fc033970cf2fec138ab6d2ea6fa9e580d97
| 1,574
|
py
|
Python
|
map_report.py
|
porcpine1967/aoe2stats
|
52965e437b8471753186ba1fc34cb773807eb496
|
[
"MIT"
] | null | null | null |
map_report.py
|
porcpine1967/aoe2stats
|
52965e437b8471753186ba1fc34cb773807eb496
|
[
"MIT"
] | null | null | null |
map_report.py
|
porcpine1967/aoe2stats
|
52965e437b8471753186ba1fc34cb773807eb496
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
""" Writes out map popularity of last two pools."""
from datetime import datetime, timedelta
from utils.map_pools import map_type_filter, pools
from utils.tools import execute_sql, last_time_breakpoint, map_name_lookup
SQL = """SELECT map_type, COUNT(*) as cnt
FROM matches
WHERE started BETWEEN {:0.0f} AND {:0.0f}
{}
AND team_size = {}
GROUP BY map_type
ORDER BY cnt DESC"""
def run():
""" Run the report."""
map_names = map_name_lookup()
weeks = pools()[-2:]
for size in (1, 2):
print("TEAM" if size > 1 else "1v1")
week_infos = []
for idx, week in enumerate(weeks):
week_info = []
year = int(week[:4])
month = int(week[4:6])
day = int(week[6:])
start = last_time_breakpoint(datetime(year, month, day))
end = start + timedelta(days=14)
sql = SQL.format(
start.timestamp(), end.timestamp(), map_type_filter(week, size), size
)
total = 0
for map_type, count in execute_sql(sql):
week_info.append((map_names[map_type], count,))
total += count
hold = []
for name, count in week_info:
hold.append("{:17}: {:4.1f}%".format(name, 100.0 * count / total))
week_infos.append(hold)
print("{:^24} {:^24}".format(*weeks))
for idx in range(len(week_infos[0])):
print("{} {}".format(week_infos[0][idx], week_infos[1][idx]))
if __name__ == "__main__":
run()
| 32.122449
| 85
| 0.560991
|
327c981e0a47fcedcb62aea60362f8adb3c7ccec
| 5,277
|
py
|
Python
|
common/xrd-ui-tests-qautomate/pagemodel/ss_keys_and_cert_dlg_subject_dname.py
|
ria-ee/XTM
|
6103f3f5bbba387b8b59b050c0c4f1fb2180fc37
|
[
"MIT"
] | 3
|
2018-03-15T14:22:50.000Z
|
2021-11-08T10:30:35.000Z
|
common/xrd-ui-tests-qautomate/pagemodel/ss_keys_and_cert_dlg_subject_dname.py
|
ria-ee/XTM
|
6103f3f5bbba387b8b59b050c0c4f1fb2180fc37
|
[
"MIT"
] | 11
|
2017-04-06T09:25:41.000Z
|
2018-06-04T09:08:48.000Z
|
common/xrd-ui-tests-qautomate/pagemodel/ss_keys_and_cert_dlg_subject_dname.py
|
ria-ee/XTM
|
6103f3f5bbba387b8b59b050c0c4f1fb2180fc37
|
[
"MIT"
] | 20
|
2017-03-14T07:21:58.000Z
|
2019-05-21T09:26:30.000Z
|
# -*- coding: utf-8 -*-
# Example for using WebDriver object: driver = get_driver() e.g driver.current_url
from webframework import TESTDATA
from variables import strings
from selenium.webdriver.common.by import By
from webframework.extension.util.common_utils import *
from webframework.extension.util.webtimings import get_measurements
from webframework.extension.parsers.parameter_parser import get_parameter
from time import sleep
| 53.30303
| 250
| 0.675005
|
327cb6d4121abb0fa5a0265759fdf829da140dce
| 6,303
|
py
|
Python
|
tempdb/postgres.py
|
runfalk/tempdb
|
a19f7568db1795025c9ec8adfd84a9544f9a6966
|
[
"MIT"
] | 2
|
2021-01-17T00:01:14.000Z
|
2021-01-18T09:26:56.000Z
|
tempdb/postgres.py
|
runfalk/tempdb
|
a19f7568db1795025c9ec8adfd84a9544f9a6966
|
[
"MIT"
] | null | null | null |
tempdb/postgres.py
|
runfalk/tempdb
|
a19f7568db1795025c9ec8adfd84a9544f9a6966
|
[
"MIT"
] | null | null | null |
import getpass
import os
import platform
import psycopg2
import sys
import tempfile
from glob import glob
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, quote_ident
from subprocess import check_output, PIPE, Popen
from time import sleep
from ._compat import ustr
from .utils import is_executable, Uri, Version
__all__ = [
"PostgresFactory",
"PostgresCluster",
]
| 30.597087
| 79
| 0.58369
|
327ee9780e46ebbfd9024596b22934ad7011175f
| 426
|
py
|
Python
|
nymph/modules/tool.py
|
smilelight/nymph
|
c8da2211f7a8f58d1c6d327b243e419ed9e64ead
|
[
"Apache-2.0"
] | 1
|
2020-08-10T00:58:14.000Z
|
2020-08-10T00:58:14.000Z
|
nymph/modules/tool.py
|
smilelight/nymph
|
c8da2211f7a8f58d1c6d327b243e419ed9e64ead
|
[
"Apache-2.0"
] | null | null | null |
nymph/modules/tool.py
|
smilelight/nymph
|
c8da2211f7a8f58d1c6d327b243e419ed9e64ead
|
[
"Apache-2.0"
] | 1
|
2021-07-03T07:06:41.000Z
|
2021-07-03T07:06:41.000Z
|
# -*- coding: utf-8 -*-
import pandas as pd
| 30.428571
| 64
| 0.666667
|
327fa5382ee48b811835bb16249bdcc124edd278
| 1,187
|
py
|
Python
|
apps/core/serializers.py
|
jfterpstra/onepercentclub-site
|
43e8e01ac4d3d1ffdd5959ebd048ce95bb2dba0e
|
[
"BSD-3-Clause"
] | 7
|
2015-01-02T19:31:14.000Z
|
2021-03-22T17:30:23.000Z
|
apps/core/serializers.py
|
jfterpstra/onepercentclub-site
|
43e8e01ac4d3d1ffdd5959ebd048ce95bb2dba0e
|
[
"BSD-3-Clause"
] | 1
|
2015-03-06T08:34:59.000Z
|
2015-03-06T08:34:59.000Z
|
apps/core/serializers.py
|
jfterpstra/onepercentclub-site
|
43e8e01ac4d3d1ffdd5959ebd048ce95bb2dba0e
|
[
"BSD-3-Clause"
] | null | null | null |
from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_donation_model
from bluebottle.bb_projects.serializers import ProjectPreviewSerializer as BaseProjectPreviewSerializer
from bluebottle.bb_accounts.serializers import UserPreviewSerializer
DONATION_MODEL = get_donation_model()
| 42.392857
| 125
| 0.754844
|
328065cc7a0c80c52a732c0213b03b1281db7d57
| 1,035
|
py
|
Python
|
Python/rockpaperscissors/rockpaperscissors.py
|
rvrheenen/OpenKattis
|
7fd59fcb54e86cdf10f56c580c218c62e584f391
|
[
"MIT"
] | 12
|
2016-10-03T20:43:43.000Z
|
2021-06-12T17:18:42.000Z
|
Python/rockpaperscissors/rockpaperscissors.py
|
rvrheenen/OpenKattis
|
7fd59fcb54e86cdf10f56c580c218c62e584f391
|
[
"MIT"
] | null | null | null |
Python/rockpaperscissors/rockpaperscissors.py
|
rvrheenen/OpenKattis
|
7fd59fcb54e86cdf10f56c580c218c62e584f391
|
[
"MIT"
] | 10
|
2017-11-14T19:56:37.000Z
|
2021-02-02T07:39:57.000Z
|
# WORKS BUT ISN'T FAST ENOUGH
first_run = True
while(True):
inp = input().split()
if len(inp) == 1:
break
if first_run:
first_run = False
else:
print()
nPlayers, nGames = [int(x) for x in inp]
resultsW = [0] * nPlayers
resultsL = [0] * nPlayers
for i in range( int( ((nGames*nPlayers)*(nPlayers - 1)) / 2 ) ):
p1, p1move, p2, p2move = [int(x) if x.isdigit() else x for x in input().split()]
if p1move == p2move:
continue
if (p1move == "scissors" and p2move == "paper") or (p1move == "paper" and p2move == "rock") or (p1move == "rock" and p2move == "scissors"):
resultsW[p1-1] += 1
resultsL[p2-1] += 1
else:
resultsW[p2-1] += 1
resultsL[p1-1] += 1
for i in range(nPlayers):
w_plus_l = resultsL[i] + resultsW[i]
if w_plus_l == 0:
print("-")
else:
print("%.3f" % (resultsL[i] / w_plus_l))
print("\n\n\n\n\n\n\n")
print(resultsW)
| 32.34375
| 147
| 0.510145
|
3280c700cb467b6fd44a96a8f003a083cb2e0a5f
| 9,460
|
py
|
Python
|
monitorcontrol/monitor_control.py
|
klwlau/monitorcontrol
|
92d07c7a93585de14551ba1f1dd8bb3a009c4842
|
[
"MIT"
] | null | null | null |
monitorcontrol/monitor_control.py
|
klwlau/monitorcontrol
|
92d07c7a93585de14551ba1f1dd8bb3a009c4842
|
[
"MIT"
] | null | null | null |
monitorcontrol/monitor_control.py
|
klwlau/monitorcontrol
|
92d07c7a93585de14551ba1f1dd8bb3a009c4842
|
[
"MIT"
] | null | null | null |
###############################################################################
# Copyright 2019 Alex M.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###############################################################################
from . import vcp
import sys
from typing import Type, List, Union, Iterable
def get_vcps() -> List[Type[vcp.VCP]]:
"""
Discovers virtual control panels.
This function should not be used directly in most cases, use
:py:meth:`get_monitors()` or :py:meth:`iterate_monitors()` to
get monitors with VCPs.
Returns:
List of VCPs in a closed state.
Raises:
NotImplementedError: not implemented for your operating system
VCPError: failed to list VCPs
"""
if sys.platform == "win32" or sys.platform.startswith("linux"):
return vcp.get_vcps()
else:
raise NotImplementedError(f"not implemented for {sys.platform}")
def get_monitors() -> List[Monitor]:
"""
Creates a list of all monitors.
Returns:
List of monitors in a closed state.
Raises:
NotImplementedError: not implemented for your operating system
VCPError: failed to list VCPs
Example:
Setting the power mode of all monitors to standby::
for monitor in get_monitors():
try:
monitor.open()
# put monitor in standby mode
monitor.power_mode = "standby"
except VCPError:
print("uh-oh")
raise
finally:
monitor.close()
Setting all monitors to the maximum brightness using the
context manager::
for monitor in get_monitors():
with monitor as m:
# set back-light luminance to 100%
m.luminance = 100
"""
return [Monitor(v) for v in get_vcps()]
| 30.031746
| 79
| 0.595455
|
328135201e01cdb2208c77c5703c4b619db0d327
| 6,201
|
py
|
Python
|
algorithms/vae.py
|
ENSP-AI-Mentoring/machine-learning-algorithms
|
d53d5342f79d08066e158228cab6240872f61f72
|
[
"Apache-2.0"
] | 1
|
2021-11-14T19:46:46.000Z
|
2021-11-14T19:46:46.000Z
|
algorithms/vae.py
|
ENSP-AI-Mentoring/machine-learning-algorithms
|
d53d5342f79d08066e158228cab6240872f61f72
|
[
"Apache-2.0"
] | null | null | null |
algorithms/vae.py
|
ENSP-AI-Mentoring/machine-learning-algorithms
|
d53d5342f79d08066e158228cab6240872f61f72
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import torch
from torch.optim import Adam
from torch.utils.data import DataLoader
from tqdm import tqdm
| 31.8
| 94
| 0.588776
|
32837c01862960b0796752083e66eefb2afb0c24
| 1,244
|
py
|
Python
|
qfig.py
|
mth1haha/BlockchainQueueingNetwork
|
611dc84b857efbec22edfe5f3a1bb8f7052a39aa
|
[
"Apache-2.0"
] | 1
|
2021-11-30T08:22:43.000Z
|
2021-11-30T08:22:43.000Z
|
qfig.py
|
mth1haha/BlockchainQueueingNetwork
|
611dc84b857efbec22edfe5f3a1bb8f7052a39aa
|
[
"Apache-2.0"
] | null | null | null |
qfig.py
|
mth1haha/BlockchainQueueingNetwork
|
611dc84b857efbec22edfe5f3a1bb8f7052a39aa
|
[
"Apache-2.0"
] | 1
|
2020-11-25T08:48:25.000Z
|
2020-11-25T08:48:25.000Z
|
import simpy as sp
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from scipy import stats, integrate
lamda = 75
alpha = 0.333
mu1 = 370
mu2 = 370*(0.666)
num_bins = 50
runtime = 1000 #
tic = [] #
toc = [] #
env = sp.Environment()
q = sp.Store(env)
env.process(client(env, lamda, q, tic))
env.process(server(env, alpha, mu1, mu2, q, toc))
env.run(until=runtime)
l = len(tic)
a = toc
b = toc
#b = toc[0:l:40]
histdata = [b[i] - b[i-1] for i in range(1, len(b))]
sns.distplot(histdata, kde=False, fit=stats.expon)
plt.xlabel("inter departure time (s)")
plt.xlim(0,0.15)
#plt.ylim(0,100)
plt.savefig('dist1.png')
plt.show()
#plt.hist(histdata, num_bins)
#plt.show()
| 20.393443
| 52
| 0.619775
|
328382e2d62ec49094cab44e02a8b760c1f9a700
| 4,756
|
py
|
Python
|
all_words.py
|
secureterminal/100-Days-of-Code
|
04383ae541938d8a551b5aac9a0dad3348a6ef23
|
[
"MIT"
] | 1
|
2022-01-28T13:55:39.000Z
|
2022-01-28T13:55:39.000Z
|
Day 7/all_words.py
|
secureterminal/100-Days-of-Code
|
04383ae541938d8a551b5aac9a0dad3348a6ef23
|
[
"MIT"
] | 1
|
2022-02-02T00:13:18.000Z
|
2022-02-03T11:32:53.000Z
|
Day 7/all_words.py
|
secureterminal/100-Days-of-Code
|
04383ae541938d8a551b5aac9a0dad3348a6ef23
|
[
"MIT"
] | 2
|
2022-02-07T20:49:36.000Z
|
2022-02-19T21:22:15.000Z
|
word_list = ['pseudolamellibranchiate',
'microcolorimetrically',
'pancreaticoduodenostomy',
'theologicoastronomical',
'pancreatoduodenectomy',
'tetraiodophenolphthalein',
'choledocholithotripsy',
'hematospectrophotometer',
'deintellectualization',
'pharyngoepiglottidean',
'psychophysiologically',
'pathologicopsychological',
'pseudomonocotyledonous',
'philosophicohistorical',
'Pseudolamellibranchia',
'chlamydobacteriaceous',
'cholecystoduodenostomy',
'anemometrographically',
'duodenopancreatectomy',
'dacryocystoblennorrhea',
'thymolsulphonephthalein',
'aminoacetophenetidine',
'ureterocystanastomosis',
'undistinguishableness',
'disestablishmentarian',
'cryptocrystallization',
'scientificogeographical',
'chemicopharmaceutical',
'overindustrialization',
'counterinterpretation',
'superincomprehensible',
'dacryocystorhinostomy',
'choledochoduodenostomy',
'cholecystogastrostomy',
'photochronographically',
'philosophicoreligious',
'scleroticochoroiditis',
'pyopneumocholecystitis',
'crystalloluminescence',
'phoneticohieroglyphic',
'historicogeographical',
'counterreconnaissance',
'pathologicoanatomical',
'omnirepresentativeness',
'establishmentarianism',
'glossolabiopharyngeal',
'pseudohermaphroditism',
'anthropoclimatologist',
'cholecystojejunostomy',
'epididymodeferentectomy',
'pericardiomediastinitis',
'cholecystolithotripsy',
'tessarescaedecahedron',
'electrotelethermometer',
'pharmacoendocrinology',
'poliencephalomyelitis',
'duodenocholedochotomy',
'cholecystonephrostomy',
'formaldehydesulphoxylate',
'dacryocystosyringotomy',
'counterpronunciamento',
'cholecystenterorrhaphy',
'deanthropomorphization',
'microseismometrograph',
'pseudoparthenogenesis',
'Pseudolamellibranchiata',
'ureteropyelonephritis',
'electroencephalography',
'anticonstitutionalist',
'electroencephalograph',
'hypsidolichocephalism',
'mandibulosuspensorial',
'acetylphenylhydrazine',
'hexanitrodiphenylamine',
'historicocabbalistical',
'hexachlorocyclohexane',
'anatomicophysiological',
'pseudoanthropological',
'microcryptocrystalline',
'lymphangioendothelioma',
'nonrepresentationalism',
'blepharoconjunctivitis',
'hydropneumopericardium',
'stereoroentgenography',
'otorhinolaryngologist',
'scientificohistorical',
'phenolsulphonephthalein',
'mechanicointellectual',
'counterexcommunication',
'duodenocholecystostomy',
'noninterchangeability',
'thermophosphorescence',
'naphthylaminesulphonic',
'polioencephalomyelitis',
'stereophotomicrograph',
'philosophicotheological',
'theologicometaphysical',
'benzalphenylhydrazone',
'scleroticochorioiditis',
'anthropomorphologically',
'thyroparathyroidectomize',
'disproportionableness',
'heterotransplantation',
'membranocartilaginous',
'scientificophilosophical',
'thyroparathyroidectomy',
'enterocholecystostomy',
'Prorhipidoglossomorpha',
'constitutionalization',
'poluphloisboiotatotic',
'anatomicopathological',
'zoologicoarchaeologist',
'protransubstantiation',
'labioglossopharyngeal',
'pneumohydropericardium',
'choledochoenterostomy',
'zygomaticoauricularis',
'anthropomorphological',
'stereophotomicrography',
'aquopentamminecobaltic',
'hexamethylenetetramine',
'macracanthrorhynchiasis',
'palaeodendrologically',
'intertransformability',
'hyperconscientiousness',
'laparocolpohysterotomy',
'indistinguishableness',
'formaldehydesulphoxylic',
'blepharosphincterectomy',
'transubstantiationalist',
'transubstantiationite',
'prostatovesiculectomy',
'pathologicohistological',
'platydolichocephalous',
'pneumoventriculography',
'photochromolithograph',
'gastroenteroanastomosis',
'chromophotolithograph',
'pentamethylenediamine',
'historicophilosophica',
'intellectualistically',
'gastroenterocolostomy',
'pancreaticogastrostomy',
'appendorontgenography',
'photospectroheliograph']
stages = ['''
+---+
| |
O |
/|\ |
/ \ |
|
=========
''', '''
+---+
| |
O |
/|\ |
/ |
|
=========
''', '''
+---+
| |
O |
/|\ |
|
|
=========
''', '''
+---+
| |
O |
/| |
|
|
=========''', '''
+---+
| |
O |
| |
|
|
=========
''', '''
+---+
| |
O |
|
|
|
=========
''', '''
+---+
| |
|
|
|
|
=========
''']
logo = '''
_
| |
| |__ __ _ _ __ __ _ _ __ ___ __ _ _ __
| '_ \ / _` | '_ \ / _` | '_ ` _ \ / _` | '_ \
| | | | (_| | | | | (_| | | | | | | (_| | | | |
|_| |_|\__,_|_| |_|\__, |_| |_| |_|\__,_|_| |_|
__/ |
|___/
'''
| 21.716895
| 47
| 0.676409
|
32839d586b1955e1c6b167959e736b233c1def5e
| 363
|
py
|
Python
|
vandal/objects/__init__.py
|
vandal-dev/vandal
|
1981c86f4de6632776a4132ecbc206fac5188f32
|
[
"Apache-2.0"
] | 1
|
2022-02-22T18:39:57.000Z
|
2022-02-22T18:39:57.000Z
|
vandal/objects/__init__.py
|
vandal-dev/vandal
|
1981c86f4de6632776a4132ecbc206fac5188f32
|
[
"Apache-2.0"
] | null | null | null |
vandal/objects/__init__.py
|
vandal-dev/vandal
|
1981c86f4de6632776a4132ecbc206fac5188f32
|
[
"Apache-2.0"
] | null | null | null |
# import all relevant contents from the associated module.
from vandal.objects.montecarlo import (
MonteCarlo,
MCapp,
)
from vandal.objects.eoq import(
EOQ,
EOQapp,
)
from vandal.objects.dijkstra import Dijkstra
# all relevant contents.
__all__ = [
'MonteCarlo',
'EOQ',
'Dijkstra',
'MCapp',
'EOQapp',
]
| 16.5
| 59
| 0.628099
|
3283d11b9d4cf8bd45f4150291dcecd926809bd7
| 124
|
py
|
Python
|
authentication/admin.py
|
jatingupta14/cruzz
|
9a00f1555cdd5c76c9ef250d7037d72d725de367
|
[
"MIT"
] | 7
|
2018-11-09T14:40:54.000Z
|
2019-12-20T08:10:17.000Z
|
authentication/admin.py
|
jatingupta14/cruzz
|
9a00f1555cdd5c76c9ef250d7037d72d725de367
|
[
"MIT"
] | 25
|
2018-11-30T17:38:36.000Z
|
2018-12-27T17:21:09.000Z
|
authentication/admin.py
|
jatingupta14/cruzz
|
9a00f1555cdd5c76c9ef250d7037d72d725de367
|
[
"MIT"
] | 6
|
2018-12-03T14:44:29.000Z
|
2018-12-26T11:49:43.000Z
|
# Django
from django.contrib import admin
# local Django
from authentication.models import User
admin.site.register(User)
| 15.5
| 38
| 0.806452
|
3287cec655cdef3ec14897e557822dfcd28c5019
| 84
|
py
|
Python
|
nv/__init__.py
|
3stack-software/nv
|
7b00fb857aea238ed060a9eb017e351aac19258e
|
[
"Apache-2.0"
] | null | null | null |
nv/__init__.py
|
3stack-software/nv
|
7b00fb857aea238ed060a9eb017e351aac19258e
|
[
"Apache-2.0"
] | 1
|
2017-06-19T00:52:37.000Z
|
2017-06-19T00:52:37.000Z
|
nv/__init__.py
|
3stack-software/nv
|
7b00fb857aea238ed060a9eb017e351aac19258e
|
[
"Apache-2.0"
] | null | null | null |
from .__version__ import __version__
from .core import create, remove, launch_shell
| 28
| 46
| 0.833333
|
32885105782d33bbebe4c4cc904fbc2149735713
| 784
|
py
|
Python
|
app/live/tests.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | 2
|
2017-12-02T13:58:30.000Z
|
2018-08-02T17:07:59.000Z
|
app/live/tests.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | null | null | null |
app/live/tests.py
|
B-ROY/TESTGIT
|
40221cf254c90d37d21afb981635740aebf11949
|
[
"Apache-2.0"
] | null | null | null |
import os
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.contrib.flatpages.models import FlatPage
from django.test import TestCase
from django.test.utils import override_settings
if __name__ == '__main__':
unittest.main()
| 27.034483
| 60
| 0.632653
|
328b211073d9f2b0d84385aebf512b9639d8569d
| 1,133
|
py
|
Python
|
application/utils/data_transfer_objects.py
|
charles-crawford/sentiment
|
38cfd6af1cc81ad1858621a182cd76dc3e5f04db
|
[
"MIT"
] | null | null | null |
application/utils/data_transfer_objects.py
|
charles-crawford/sentiment
|
38cfd6af1cc81ad1858621a182cd76dc3e5f04db
|
[
"MIT"
] | null | null | null |
application/utils/data_transfer_objects.py
|
charles-crawford/sentiment
|
38cfd6af1cc81ad1858621a182cd76dc3e5f04db
|
[
"MIT"
] | null | null | null |
from flask_restx.fields import String, Boolean, Raw, List, Float, Nested
| 33.323529
| 112
| 0.529568
|
328faff3ddad6381d560dd2330552d383362af7f
| 91
|
py
|
Python
|
utils.py
|
Spratiher9/newsnuggets
|
1147e55a9a0c8a483384711840462b1526cf7681
|
[
"MIT"
] | 1
|
2021-11-17T19:18:42.000Z
|
2021-11-17T19:18:42.000Z
|
utils.py
|
Spratiher9/newsnuggets
|
1147e55a9a0c8a483384711840462b1526cf7681
|
[
"MIT"
] | null | null | null |
utils.py
|
Spratiher9/newsnuggets
|
1147e55a9a0c8a483384711840462b1526cf7681
|
[
"MIT"
] | null | null | null |
from gnews import GNews
| 18.2
| 25
| 0.725275
|
329003760fc6877a5fb340f8c2de344d9c2c4d3e
| 13,284
|
py
|
Python
|
grover.py
|
raulillo82/TFG-Fisica-2021
|
8acfd748c7f49ea294606a9c185227927ec2e256
|
[
"MIT"
] | null | null | null |
grover.py
|
raulillo82/TFG-Fisica-2021
|
8acfd748c7f49ea294606a9c185227927ec2e256
|
[
"MIT"
] | null | null | null |
grover.py
|
raulillo82/TFG-Fisica-2021
|
8acfd748c7f49ea294606a9c185227927ec2e256
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
'''
* Copyright (C) 2021 Ral Osuna Snchez-Infante
*
* This software may be modified and distributed under the terms
* of the MIT license. See the LICENSE.txt file for details.
'''
##################
#Needed libraries#
##################
import matplotlib as mpl
mpl.use('TkAgg')
import matplotlib.pyplot as plt
import qiskit as q
import sys
from qiskit.visualization import plot_histogram
from qiskit.providers.ibmq import least_busy
from random import getrandbits
'''
Grover's algorithim. Intro
'''
#######################
#Functions definitions#
#######################
'''
Usage function
calling the program with "-h" or "--help" will display the help without returning an error (help was intended)
calling the progam with no options or wrong ones, will display the same help but returning an error
Please bear in mind that some combination of options are simply ignored, see the text of this function itself
'''
'''
Check whether parameter is an integer
'''
'''
Initialization:
Simply apply an H gate to every qubit
'''
'''
Implement multi controlled Z-gate, easy to reutilize
'''
'''
Oracle metaimplementation
This function will simply call one of the possibles oracles functions
'''
'''
Oracle implementation for 2 qubits.
Simply a controlled-Z gate (cz in qiskit).
For qubits different to 1, an x-gate is needed before and after the cz-gate
'''
'''
Oracle implementation for 3 qubits and single solution.
Reference for oracles: https://www.nature.com/articles/s41467-017-01904-7 (table 1)
'''
'''
Oracle implementation for 3 qubits and two possible solutions.
Reference for oracles: https://www.nature.com/articles/s41467-017-01904-7 (table 2)
'''
'''
Diffusion operator: Flip sign and amplify
For 2 qubits, simply apply H and Z to each qubit, then cz, and then apply H again to each qubit:
'''
'''
Add measurements and plot the quantum circuit:
'''
'''
Generate results from quantum simulator (no plotting)
'''
'''
Generate results from real quantum hardware (no plotting)
'''
def results_qhw(qc):
'''
#Only needed if credentials are not stored (e.g., deleted and regeneration is needed
token='XXXXXXXX' #Use token from ibm quantum portal if needed to enable again, should be stored under ~/.qiskit directory
q.IBMQ.save_account(token)
'''
provider = q.IBMQ.load_account()
provider = q.IBMQ.get_provider()
device = q.providers.ibmq.least_busy(provider.backends(filters=lambda x: x.configuration().n_qubits >= 3 and
not x.configuration().simulator and x.status().operational==True))
print("Running on current least busy device: ", device)
transpiled_grover_circuit = q.transpile(qc, device, optimization_level=3)
qobj = q.assemble(transpiled_grover_circuit)
job = device.run(qobj)
q.tools.monitor.job_monitor(job, interval=2)
return job
'''
Plot results
'''
##############################
#End of functions definitions#
##############################
################################
#Program actually starts here!!#
################################
#Initialization
grover_circuit = initialize()
#Generate the oracle randomly according to the command line arguments
oracle(grover_circuit)
#Diffusion
if (not(int(sys.argv[1]) == 3 and int(sys.argv[2]) == 1)):
diffusion(grover_circuit)
#Add measurements
measure(grover_circuit)
#Generate results in simulator
job_sim = results_qsim(grover_circuit)
#Plot these results
draw_job(job_sim, "Quantum simulator output")
#Generate results in quantum hw if requested
if int(sys.argv[4]) == 1:
plt.show(block=False)
plt.draw()
#Next line needed for keeping computations in background while still seeing the previous plots
plt.pause(0.001)
#Generate results in real quantum hardware
job_qhw = results_qhw(grover_circuit)
#Plot these results as well
draw_job(job_qhw, "Quantum hardware output")
#Keep plots active when done till they're closed, used for explanations during presentations
plt.show()
| 29.851685
| 187
| 0.546522
|
3291b0fa03bb75af83a902f66fc3f91285f8e9a3
| 9,147
|
py
|
Python
|
TM1py/Services/GitService.py
|
adscheevel/tm1py
|
8a53c7a63e3c0e2c6198c2cd0c2f57d10a7cfe43
|
[
"MIT"
] | 113
|
2019-03-12T19:42:39.000Z
|
2022-03-31T22:40:05.000Z
|
TM1py/Services/GitService.py
|
adscheevel/tm1py
|
8a53c7a63e3c0e2c6198c2cd0c2f57d10a7cfe43
|
[
"MIT"
] | 459
|
2019-01-25T09:32:18.000Z
|
2022-03-24T21:57:16.000Z
|
TM1py/Services/GitService.py
|
adscheevel/tm1py
|
8a53c7a63e3c0e2c6198c2cd0c2f57d10a7cfe43
|
[
"MIT"
] | 107
|
2019-01-31T15:08:34.000Z
|
2022-03-16T14:58:38.000Z
|
# -*- coding: utf-8 -*-
import json
from typing import List
from TM1py.Objects.Git import Git
from TM1py.Objects.GitCommit import GitCommit
from TM1py.Objects.GitPlan import GitPushPlan, GitPullPlan, GitPlan
from TM1py.Services.ObjectService import ObjectService
from TM1py.Services.RestService import RestService, Response
from TM1py.Utils.Utils import format_url
| 44.619512
| 117
| 0.608396
|
3294741b0f8e1bf0eeabf4019d19a68a63e99c23
| 1,419
|
py
|
Python
|
tests/bind_tests/diagram_tests/strategies.py
|
lycantropos/voronoi
|
977e0b3e5eff2dd294e2e6ce1a8030c763e86233
|
[
"MIT"
] | null | null | null |
tests/bind_tests/diagram_tests/strategies.py
|
lycantropos/voronoi
|
977e0b3e5eff2dd294e2e6ce1a8030c763e86233
|
[
"MIT"
] | null | null | null |
tests/bind_tests/diagram_tests/strategies.py
|
lycantropos/voronoi
|
977e0b3e5eff2dd294e2e6ce1a8030c763e86233
|
[
"MIT"
] | null | null | null |
from hypothesis import strategies
from hypothesis_geometry import planar
from tests.bind_tests.hints import (BoundCell,
BoundDiagram,
BoundEdge,
BoundVertex)
from tests.bind_tests.utils import (bound_source_categories,
to_bound_multipoint,
to_bound_multisegment)
from tests.strategies import (doubles,
integers_32,
sizes)
from tests.utils import to_maybe
booleans = strategies.booleans()
coordinates = doubles
empty_diagrams = strategies.builds(BoundDiagram)
source_categories = strategies.sampled_from(bound_source_categories)
cells = strategies.builds(BoundCell, sizes,
source_categories)
vertices = strategies.builds(BoundVertex, coordinates, coordinates)
edges = strategies.builds(BoundEdge, to_maybe(vertices), cells,
booleans, booleans)
cells_lists = strategies.lists(cells)
edges_lists = strategies.lists(edges)
vertices_lists = strategies.lists(vertices)
diagrams = strategies.builds(BoundDiagram, cells_lists, edges_lists,
vertices_lists)
multipoints = planar.multipoints(integers_32).map(to_bound_multipoint)
multisegments = planar.multisegments(integers_32).map(to_bound_multisegment)
| 44.34375
| 76
| 0.653982
|
32955f3ecdc5ec46e6e7127a3ed57f1411af2c54
| 2,381
|
py
|
Python
|
apps/blog/serializers.py
|
yc19890920/dble_fastapi_blog
|
dd9b8984d849df893d4fea270e8b75ac12d01241
|
[
"Apache-2.0"
] | null | null | null |
apps/blog/serializers.py
|
yc19890920/dble_fastapi_blog
|
dd9b8984d849df893d4fea270e8b75ac12d01241
|
[
"Apache-2.0"
] | 2
|
2021-03-31T19:56:46.000Z
|
2021-04-30T21:19:15.000Z
|
apps/blog/serializers.py
|
yc19890920/dble_fastapi_blog
|
dd9b8984d849df893d4fea270e8b75ac12d01241
|
[
"Apache-2.0"
] | null | null | null |
"""
@Author: YangCheng
@contact: 1248644045@qq.com
@Software: Y.C
@Time: 2020/7/21 15:22
"""
from typing import List
from pydantic import BaseModel, Field
from tortoise import Tortoise
from tortoise.contrib.pydantic import pydantic_model_creator, pydantic_queryset_creator
from lib.tortoise.pydantic import json_encoders
from .models import Tag, Category, Article
Tortoise.init_models(["apps.blog.models"], "models")
# -*- tag -*-
# Tag create/update
TagCreateRequest = pydantic_model_creator(
Tag, name="TagCreateRequest", exclude_readonly=True
)
TagCreateResponse = pydantic_model_creator(
Category, name="TagCreateResponse", exclude=["articles"]
)
TagCreateResponse.Config.json_encoders = json_encoders
# Tag List
TagListSerializer = pydantic_queryset_creator(
Tag, name="TagListSerializer", exclude=["articles"]
)
# -*- Category -*-
# Category create/update
CategoryCreateRequest = pydantic_model_creator(
Category, name="CategoryCreateRequest", exclude_readonly=True
)
CategoryCreateResponse = pydantic_model_creator(
Category, name="CategoryCreateResponse", exclude=("articles",)
)
CategoryCreateResponse.Config.json_encoders = json_encoders
# Category List
CategoryListSerializer = pydantic_queryset_creator(
Category, name="CategoryListSerializer", exclude=("articles",)
)
# -*- Article -*-
# Article create/update
ArticleCreateResponse = pydantic_model_creator(
Article, name="ArticleCreateResponse"
)
ArticleCreateResponse.Config.json_encoders = json_encoders
ArticleListSerializer = pydantic_queryset_creator(
Article, name="ArticleListSerializer"
)
# Article List
| 25.063158
| 87
| 0.761025
|
329a1a34027b83c6621340af222a98c0d43067e0
| 1,102
|
py
|
Python
|
Python/image_analysis_centerlines/analysis_example.py
|
fromenlab/guides
|
ac9831265f8219d5b5a8ee3a441fc77c7ae4fe3b
|
[
"MIT"
] | null | null | null |
Python/image_analysis_centerlines/analysis_example.py
|
fromenlab/guides
|
ac9831265f8219d5b5a8ee3a441fc77c7ae4fe3b
|
[
"MIT"
] | null | null | null |
Python/image_analysis_centerlines/analysis_example.py
|
fromenlab/guides
|
ac9831265f8219d5b5a8ee3a441fc77c7ae4fe3b
|
[
"MIT"
] | null | null | null |
from skimage import img_as_bool, io, color, morphology
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
# Testing process
# Import images
one = img_as_bool(color.rgb2gray(io.imread('1.jpg')))
cross = img_as_bool(color.rgb2gray(io.imread('cross.jpg')))
grid = img_as_bool(color.rgb2gray(io.imread('grid.jpg')))
# Get skeleton
one_skel = morphology.skeletonize(one)
cross_skel = morphology.skeletonize(cross)
grid_skel = morphology.skeletonize(grid)
# Get medial axis
one_med, one_med_distance = morphology.medial_axis(one, return_distance=True)
cross_med, cross_med_distance = morphology.medial_axis(cross, return_distance=True)
grid_med, grid_med_distance = morphology.medial_axis(grid, return_distance=True)
# Get skeleton distance
one_skel_distance = one_med_distance*one_skel
# Data processing for "1.jpg"
one_skel_nonzero = one_skel_distance.nonzero()
trans = np.transpose(one_skel_nonzero)
df_coords = pd.DataFrame(data = trans, columns = ["y", "x"])
df_dist = pd.DataFrame(data = one_skel_distance[one_skel_nonzero])
combined = pd.concat([df_coords, df_dist], axis=1)
| 34.4375
| 83
| 0.791289
|
329a5ba2f15a3280c3c7c2b2a6a0114abcec0cf9
| 485
|
py
|
Python
|
resources/settings.py
|
Miriel-py/Room-Wizard
|
83d86fe8e8fed8bb073b38465cd0e97b1a6113b8
|
[
"MIT"
] | null | null | null |
resources/settings.py
|
Miriel-py/Room-Wizard
|
83d86fe8e8fed8bb073b38465cd0e97b1a6113b8
|
[
"MIT"
] | null | null | null |
resources/settings.py
|
Miriel-py/Room-Wizard
|
83d86fe8e8fed8bb073b38465cd0e97b1a6113b8
|
[
"MIT"
] | null | null | null |
# global_data.py
import os
from dotenv import load_dotenv
# Read the bot token from the .env file
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')
DEBUG_MODE = os.getenv('DEBUG_MODE')
BOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_FILE = os.path.join(BOT_DIR, 'database/room_wizard_db.db')
LOG_FILE = os.path.join(BOT_DIR, 'logs/discord.log')
DEV_GUILDS = [730115558766411857]
# Embed color
EMBED_COLOR = 0x6C48A7
DEFAULT_FOOTER = 'Just pinning things.'
| 24.25
| 69
| 0.764948
|
329eec6934c9b0ff2824d0ffd01a1902dae80850
| 1,767
|
py
|
Python
|
detection_algorithms/temporal_anomaly_detection/model_def.py
|
hanahs-deepfake-detection/detection-algorithms
|
6d7ec53eaf333adb10a1aba448f80fceaf7722be
|
[
"MIT"
] | null | null | null |
detection_algorithms/temporal_anomaly_detection/model_def.py
|
hanahs-deepfake-detection/detection-algorithms
|
6d7ec53eaf333adb10a1aba448f80fceaf7722be
|
[
"MIT"
] | null | null | null |
detection_algorithms/temporal_anomaly_detection/model_def.py
|
hanahs-deepfake-detection/detection-algorithms
|
6d7ec53eaf333adb10a1aba448f80fceaf7722be
|
[
"MIT"
] | null | null | null |
"""
Model Definition
"""
from tensorflow import keras
from tensorflow.keras.applications import ResNet101V2
from tensorflow.keras.layers import (
BatchNormalization, Conv2D, Dense, Dropout, Flatten, LSTM, MaxPool2D,
TimeDistributed, Lambda
)
import tensorflow as tf
from .spatial_transformer.bilinear_sampler import BilinearSampler
| 42.071429
| 84
| 0.654782
|
329f38947acdd5b4c36b6e62995a1a5be5206f16
| 1,515
|
py
|
Python
|
scripts/lwtnn-build-dummy-inputs.py
|
aghoshpub/lwtnn
|
979069b372f8c3d001d08fb0c756ff98954db644
|
[
"MIT"
] | 98
|
2016-11-27T04:05:56.000Z
|
2022-02-28T17:14:19.000Z
|
scripts/lwtnn-build-dummy-inputs.py
|
aghoshpub/lwtnn
|
979069b372f8c3d001d08fb0c756ff98954db644
|
[
"MIT"
] | 90
|
2016-11-24T15:13:31.000Z
|
2021-11-29T14:09:34.000Z
|
scripts/lwtnn-build-dummy-inputs.py
|
aghoshpub/lwtnn
|
979069b372f8c3d001d08fb0c756ff98954db644
|
[
"MIT"
] | 46
|
2016-12-15T17:21:43.000Z
|
2022-01-27T22:45:42.000Z
|
#!/usr/bin/env python3
"""Generate fake serialized NNs to test the lightweight classes"""
import argparse
import json
import h5py
import numpy as np
if __name__ == "__main__":
_run()
| 27.545455
| 72
| 0.634323
|
329f8f1e2538fb2f56b719613eee2ed54216347d
| 4,884
|
py
|
Python
|
osspeak/platforms/windows.py
|
OSSpeak/OSSpeak
|
327c38a37684165f87bf8d76ab2ca135b43b8ab7
|
[
"MIT"
] | 1
|
2020-03-17T10:24:41.000Z
|
2020-03-17T10:24:41.000Z
|
osspeak/platforms/windows.py
|
OSSpeak/OSSpeak
|
327c38a37684165f87bf8d76ab2ca135b43b8ab7
|
[
"MIT"
] | 12
|
2016-09-28T05:16:00.000Z
|
2020-11-27T22:32:40.000Z
|
osspeak/platforms/windows.py
|
OSSpeak/OSSpeak
|
327c38a37684165f87bf8d76ab2ca135b43b8ab7
|
[
"MIT"
] | null | null | null |
'''
Collection of Windows-specific I/O functions
'''
import msvcrt
import time
import ctypes
from platforms import winconstants, winclipboard
EnumWindows = ctypes.windll.user32.EnumWindows
EnumWindowsProc = ctypes.WINFUNCTYPE(ctypes.c_bool, ctypes.POINTER(ctypes.c_int), ctypes.POINTER(ctypes.c_int))
GetWindowText = ctypes.windll.user32.GetWindowTextW
GetWindowTextLength = ctypes.windll.user32.GetWindowTextLengthW
IsWindowVisible = ctypes.windll.user32.IsWindowVisible
| 37.282443
| 123
| 0.719287
|
32a0d30f56c4a1916c5ad0aef5a7b50495e1860b
| 715
|
py
|
Python
|
sudokusolver/common/messenger.py
|
Blondberg/SudokuSolver
|
4a6f1f927d41f7a39a953b9784b28d570edf1f09
|
[
"MIT"
] | null | null | null |
sudokusolver/common/messenger.py
|
Blondberg/SudokuSolver
|
4a6f1f927d41f7a39a953b9784b28d570edf1f09
|
[
"MIT"
] | null | null | null |
sudokusolver/common/messenger.py
|
Blondberg/SudokuSolver
|
4a6f1f927d41f7a39a953b9784b28d570edf1f09
|
[
"MIT"
] | null | null | null |
# messenger.py - contains functions to create different kinds of messages like info or error
# color the text, usage: print bcolors.WARNING + "Warning: No active frommets remain. Continue?" + bcolors.ENDC
BCOLORS = {
'HEADER': '\033[95m',
'OKBLUE': '\033[94m',
'OKGREEN': '\033[92m',
'WARNING': '\033[93m',
'FAIL': '\033[91m',
'ENDC': '\033[0m',
'BOLD': '\033[1m',
'UNDERLINE': '\033[4m'
}
# Information message
# Action message
# Error message
| 23.833333
| 111
| 0.633566
|
32a23291b7486cbc9a87ce5a914dd735071b20e4
| 554
|
py
|
Python
|
test.py
|
w0w/miniPFC
|
63b1bf608de03efada2a1b57c0370b6a7c2bf1ad
|
[
"MIT"
] | null | null | null |
test.py
|
w0w/miniPFC
|
63b1bf608de03efada2a1b57c0370b6a7c2bf1ad
|
[
"MIT"
] | null | null | null |
test.py
|
w0w/miniPFC
|
63b1bf608de03efada2a1b57c0370b6a7c2bf1ad
|
[
"MIT"
] | null | null | null |
import json
import RPi.GPIO as GPIO
from modules.sensor import getTempC, getHumidity
currentPins = loadConfig().values()
def bootActuators():
'''Assumes that pi is booting and set off all the relays'''
GPIO.setmode(GPIO.BOARD)
for i, p in enumerate(currentPins):
GPIO.setup(p, GPIO.OUT)
GPIO.output(p, GPIO.HIGH)
print(p, GPIO.input(p))
print('Actuators turned off')
bootActuators()
| 25.181818
| 63
| 0.66426
|
32a426fd1c9efac97183a6c708ae91ac77c14062
| 1,170
|
py
|
Python
|
example.py
|
clagraff/habu
|
28d05c2fa2204b26177bbaed969648b92b89c735
|
[
"MIT"
] | null | null | null |
example.py
|
clagraff/habu
|
28d05c2fa2204b26177bbaed969648b92b89c735
|
[
"MIT"
] | null | null | null |
example.py
|
clagraff/habu
|
28d05c2fa2204b26177bbaed969648b92b89c735
|
[
"MIT"
] | null | null | null |
import json
import habu
if __name__ == "__main__":
main()
| 23.4
| 105
| 0.417949
|
32a62b611ae086d7c010dc8106960f0f8f3738b2
| 1,162
|
py
|
Python
|
notify_tweet.py
|
mkaraki/WatchTweets
|
9b0a4ef66e38311453fff99d02091758b1bd0df5
|
[
"MIT"
] | null | null | null |
notify_tweet.py
|
mkaraki/WatchTweets
|
9b0a4ef66e38311453fff99d02091758b1bd0df5
|
[
"MIT"
] | 1
|
2022-01-26T18:03:15.000Z
|
2022-01-26T18:03:35.000Z
|
notify_tweet.py
|
mkaraki/WatchTweets
|
9b0a4ef66e38311453fff99d02091758b1bd0df5
|
[
"MIT"
] | null | null | null |
import json
import os
import requests
from dotenv import load_dotenv
# You have to configure in this file to notify other services
load_dotenv(override=True)
| 26.409091
| 87
| 0.553356
|
32aa7faedb604f995e124967e180cd9dc0c8087d
| 2,245
|
py
|
Python
|
credentials.py
|
Ken-mbira/Trust_Password_Protector
|
7d4d25e6d10582c21cc84ce0ffdffe45d45c0d63
|
[
"MIT"
] | null | null | null |
credentials.py
|
Ken-mbira/Trust_Password_Protector
|
7d4d25e6d10582c21cc84ce0ffdffe45d45c0d63
|
[
"MIT"
] | null | null | null |
credentials.py
|
Ken-mbira/Trust_Password_Protector
|
7d4d25e6d10582c21cc84ce0ffdffe45d45c0d63
|
[
"MIT"
] | 1
|
2021-09-07T05:08:02.000Z
|
2021-09-07T05:08:02.000Z
|
import random
import string
| 32.071429
| 128
| 0.632962
|
32ac15da27e5771cb19e9b355fd09244b1a2fee3
| 561
|
py
|
Python
|
misprogs/sensor_Luz_LCD.py
|
dacocube/CursoGalileo
|
1dac903031d9ff61174cb0c5e00e3f3795ea60de
|
[
"Apache-2.0"
] | null | null | null |
misprogs/sensor_Luz_LCD.py
|
dacocube/CursoGalileo
|
1dac903031d9ff61174cb0c5e00e3f3795ea60de
|
[
"Apache-2.0"
] | null | null | null |
misprogs/sensor_Luz_LCD.py
|
dacocube/CursoGalileo
|
1dac903031d9ff61174cb0c5e00e3f3795ea60de
|
[
"Apache-2.0"
] | null | null | null |
import signal
import sys
import time
import pyupm_grove as grove
import pyupm_i2clcd as lcd
if __name__=='__main__':
signal.signal(signal.SIGINT, interruptHandler)
myLcd = lcd.Jhd1313m1(0, 0x3E,0x62)
sensorluz=grove.GroveLight(0)
coloR=255
colorG=200
colorB=100
myLcd.setColor(coloR,colorG,colorB)
#read the input and print, waiting 1/2 seconds between reading
while True:
valorSensor=sensorluz.value()
myLcd.setCursor(0,0)
myLcd.write('%6d'% valorSensor)
time.sleep(0.5)
del sensorluz
| 20.777778
| 63
| 0.761141
|
32b0d4c387e53daeda7939c3bdfe5d3e18cb6dbb
| 210
|
py
|
Python
|
setup.py
|
cogsy23/pyfsm
|
22236994f7455a39489d1438b7c8bbcd081352be
|
[
"MIT"
] | null | null | null |
setup.py
|
cogsy23/pyfsm
|
22236994f7455a39489d1438b7c8bbcd081352be
|
[
"MIT"
] | null | null | null |
setup.py
|
cogsy23/pyfsm
|
22236994f7455a39489d1438b7c8bbcd081352be
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
setup(
name='FSM',
version='0.1',
author='Ben Coughlan',
author_email='ben@cgsy.com.au',
packages=find_packages(),
license_file='LICENSE',
)
| 19.090909
| 43
| 0.666667
|
32b26100558c8d0079fd4f055056d994cd62c099
| 9,553
|
py
|
Python
|
clustviz/clarans.py
|
barbarametzler/ClustViz
|
a460e1ffb5195dfe1e12bca106366901d169a690
|
[
"MIT"
] | 6
|
2019-11-14T11:22:54.000Z
|
2020-03-01T09:14:21.000Z
|
clustviz/clarans.py
|
barbarametzler/ClustViz
|
a460e1ffb5195dfe1e12bca106366901d169a690
|
[
"MIT"
] | 2
|
2020-07-21T07:49:07.000Z
|
2021-04-06T16:16:09.000Z
|
clustviz/clarans.py
|
barbarametzler/ClustViz
|
a460e1ffb5195dfe1e12bca106366901d169a690
|
[
"MIT"
] | 5
|
2020-07-14T15:22:00.000Z
|
2022-03-19T19:45:32.000Z
|
import random
from typing import Tuple, Dict, Any
import scipy
import itertools
import graphviz
import numpy as np
import pandas as pd
from clustviz.pam import plot_pam
from pyclustering.utils import euclidean_distance_square
from pyclustering.cluster.clarans import clarans as clarans_pyclustering
def compute_cost_clarans(data: pd.DataFrame, _cur_choice: list) -> Tuple[float, Dict[Any, list]]:
"""
A function to compute the configuration cost. (modified from that of CLARA)
:param data: The input dataframe.
:param _cur_choice: The current set of medoid choices.
:return: The total configuration cost, the medoids.
"""
total_cost = 0.0
medoids = {}
for idx in _cur_choice:
medoids[idx] = []
for i in list(data.index):
choice = -1
min_cost = np.inf
for m in medoids:
# fast_euclidean from CLARA
tmp = np.linalg.norm(data.loc[m] - data.loc[i])
if tmp < min_cost:
choice = m
min_cost = tmp
medoids[choice].append(i)
total_cost += min_cost
# print("total_cost: ", total_cost)
return total_cost, medoids
def plot_tree_clarans(data: pd.DataFrame, k: int) -> None:
"""
plot G_{k,n} as in the paper of CLARANS; only to use with small input data.
:param data: input DataFrame.
:param k: number of points in each combination (possible set of medoids).
"""
n = len(data)
num_points = int(scipy.special.binom(n, k))
num_neigh = k * (n - k)
if (num_points > 50) or (num_neigh > 10):
print(
"Either graph nodes are more than 50 or neighbors are more than 10, the graph would be too big"
)
return
# all possibile combinations of k elements from input data
name_nodes = list(itertools.combinations(list(data.index), k))
dot = graphviz.Digraph(comment="Clustering")
# draw nodes, also adding the configuration cost
for i in range(num_points):
tot_cost, meds = compute_cost_clarans(data, list(name_nodes[i]))
tc = round(tot_cost, 3)
dot.node(str(name_nodes[i]), str(name_nodes[i]) + ": " + str(tc))
# only connect nodes if they have k-1 common elements
for i in range(num_points):
for j in range(num_points):
if i != j:
if (
len(set(list(name_nodes[i])) & set(list(name_nodes[j])))
== k - 1
):
dot.edge(str(name_nodes[i]), str(name_nodes[j]))
graph = graphviz.Source(dot) # .view()
display(graph)
| 36.185606
| 114
| 0.539098
|
32b489e63deb6a7323ecb9996f33d06edac172bd
| 1,507
|
py
|
Python
|
bin/demo_findit_backup_url.py
|
cariaso/metapub
|
bfa361dd6e5de8ee0859e596d490fb478f7dcfba
|
[
"Apache-2.0"
] | 28
|
2019-09-09T08:12:31.000Z
|
2021-12-17T00:09:14.000Z
|
bin/demo_findit_backup_url.py
|
cariaso/metapub
|
bfa361dd6e5de8ee0859e596d490fb478f7dcfba
|
[
"Apache-2.0"
] | 33
|
2019-11-07T05:36:04.000Z
|
2022-01-29T01:14:57.000Z
|
bin/demo_findit_backup_url.py
|
cariaso/metapub
|
bfa361dd6e5de8ee0859e596d490fb478f7dcfba
|
[
"Apache-2.0"
] | 10
|
2019-09-09T10:04:05.000Z
|
2021-06-08T16:00:14.000Z
|
from __future__ import absolute_import, print_function, unicode_literals
import os
import requests
from metapub.findit import FindIt
from metapub.exceptions import *
from requests.packages import urllib3
urllib3.disable_warnings()
OUTPUT_DIR = 'findit'
CURL_TIMEOUT = 4000
if __name__=='__main__':
import sys
try:
start_pmid = int(sys.argv[1])
except (IndexError, TypeError) as err:
print("Supply a pubmed ID as the starting point for this script.")
sys.exit()
for pmid in range(start_pmid, start_pmid+1000):
try_backup_url(pmid)
| 28.433962
| 98
| 0.666224
|
32b5c206b4bd2dca61a6557018af529be9b8ba2f
| 3,939
|
py
|
Python
|
kgcnn/layers/conv/dmpnn_conv.py
|
the16thpythonist/gcnn_keras
|
27d794095b684333d93149c825d84b85df8c30ff
|
[
"MIT"
] | 47
|
2021-03-10T10:15:42.000Z
|
2022-03-14T00:53:40.000Z
|
kgcnn/layers/conv/dmpnn_conv.py
|
the16thpythonist/gcnn_keras
|
27d794095b684333d93149c825d84b85df8c30ff
|
[
"MIT"
] | 36
|
2021-05-06T15:06:51.000Z
|
2022-03-02T13:06:16.000Z
|
kgcnn/layers/conv/dmpnn_conv.py
|
the16thpythonist/gcnn_keras
|
27d794095b684333d93149c825d84b85df8c30ff
|
[
"MIT"
] | 11
|
2021-04-05T02:14:27.000Z
|
2022-03-02T03:25:52.000Z
|
import tensorflow as tf
from kgcnn.layers.base import GraphBaseLayer
from kgcnn.layers.gather import GatherNodesOutgoing, GatherNodesIngoing
from kgcnn.layers.pooling import PoolingLocalEdges
from kgcnn.layers.modules import LazySubtract
| 43.766667
| 117
| 0.67276
|
32b80da9076a6963ab2a24a72478920a41611e59
| 181
|
py
|
Python
|
src/keys_management/secret_key/types.py
|
nielsen-oss/keys-management
|
ddeeceb19dae68516272fe13dfc6521dcbe295f2
|
[
"Apache-2.0"
] | 6
|
2021-06-25T17:21:18.000Z
|
2021-07-13T17:31:28.000Z
|
src/keys_management/secret_key/types.py
|
nielsen-oss/keys-management
|
ddeeceb19dae68516272fe13dfc6521dcbe295f2
|
[
"Apache-2.0"
] | null | null | null |
src/keys_management/secret_key/types.py
|
nielsen-oss/keys-management
|
ddeeceb19dae68516272fe13dfc6521dcbe295f2
|
[
"Apache-2.0"
] | null | null | null |
from typing import Callable, Tuple, Union
StrOrBytes = Union[str, bytes]
StrOrBytesPair = Tuple[StrOrBytes, StrOrBytes]
KeysStore = Callable[[], Union[StrOrBytes, StrOrBytesPair]]
| 30.166667
| 59
| 0.78453
|
32b877d4916dd5d40bd6976997b7ef7d01823785
| 349
|
py
|
Python
|
api/admin.py
|
jchmura/suchary-django
|
af2e8a62d222fd6eb18f29af95c23ab098ccc2a6
|
[
"MIT"
] | null | null | null |
api/admin.py
|
jchmura/suchary-django
|
af2e8a62d222fd6eb18f29af95c23ab098ccc2a6
|
[
"MIT"
] | 2
|
2021-03-19T21:54:17.000Z
|
2021-06-10T19:20:12.000Z
|
api/admin.py
|
jchmura/suchary-django
|
af2e8a62d222fd6eb18f29af95c23ab098ccc2a6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from api.models import Device
admin.site.register(Device, DeviceAdmin)
| 26.846154
| 110
| 0.713467
|
32b93fe289994ee8aa84e901e1536e526ce09b82
| 169
|
py
|
Python
|
project/help/urls.py
|
samuraii/otus_python_backend
|
1bc7c8953a03008c94dd4b0ca89a7c830772f79a
|
[
"MIT"
] | null | null | null |
project/help/urls.py
|
samuraii/otus_python_backend
|
1bc7c8953a03008c94dd4b0ca89a7c830772f79a
|
[
"MIT"
] | null | null | null |
project/help/urls.py
|
samuraii/otus_python_backend
|
1bc7c8953a03008c94dd4b0ca89a7c830772f79a
|
[
"MIT"
] | null | null | null |
# from django.contrib import admin
# from django.urls import path
from django.conf.urls import url
from help import views
urlpatterns = [
url(r'^$', views.index)
]
| 18.777778
| 34
| 0.727811
|
32b9a1053b526032d5d6c19f20fe7c9cbc1b1859
| 5,299
|
py
|
Python
|
social_network/utils.py
|
diana-gv/django-social-network
|
48bafca81f28874ceead59e263ce5b7e3853dbfb
|
[
"BSD-3-Clause"
] | 3
|
2015-01-13T05:45:04.000Z
|
2020-01-10T19:05:35.000Z
|
social_network/utils.py
|
diana-gv/django-social-network
|
48bafca81f28874ceead59e263ce5b7e3853dbfb
|
[
"BSD-3-Clause"
] | null | null | null |
social_network/utils.py
|
diana-gv/django-social-network
|
48bafca81f28874ceead59e263ce5b7e3853dbfb
|
[
"BSD-3-Clause"
] | 6
|
2015-01-13T04:40:53.000Z
|
2021-08-13T01:07:40.000Z
|
# coding=utf-8
import random
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from notifications.models import EventType
from social_graph import EdgeType
try:
from hashlib import sha1 as sha_constructor, md5 as md5_constructor
except ImportError:
pass
#---------------------NOTIFICATIONS---------------------------------
#---------------------EDGES-----------------------------------------
#---------------------GENERAL-----------------------------------------
def generate_sha1(string, salt=None):
"""
Generates a sha1 hash for supplied string. Doesn't need to be very secure
because it's not used for password checking. We got Django for that.
:param string:
The string that needs to be encrypted.
:param salt:
Optionally define your own salt. If none is supplied, will use a random
string of 5 characters.
:return: Tuple containing the salt and hash.
"""
if not isinstance(string, (str, unicode)):
string = str(string)
if isinstance(string, unicode):
string = string.encode("utf-8")
if not salt:
salt = sha_constructor(str(random.random())).hexdigest()[:5]
hash = sha_constructor(salt+string).hexdigest()
return (salt, hash)
# A tuple of standard large number to their converters
intword_converters = (
(3, lambda number: _('%(value)dK')),
(6, lambda number: _('%(value)dM')),
(9, lambda number: _('%(value)dG')),
)
def intmin(value):
"""
"""
try:
value = int(value)
except (TypeError, ValueError):
return value
if value < 1000:
return value
for exponent, converter in intword_converters:
large_number = 10 ** exponent
if value < large_number * 1000:
new_value = value / large_number
tpl = "+%s" if value > large_number else "%s"
return tpl % converter(new_value) % {'value': new_value}
return value
| 31.35503
| 93
| 0.670881
|
32ba91d9753d50c77b106fbc0d73eade94889fbb
| 219
|
py
|
Python
|
datavis/urls.py
|
poulomihore/iot-hackathon
|
4f90c12c164f3ee09341fc1381b1f7898a5d3055
|
[
"MIT"
] | null | null | null |
datavis/urls.py
|
poulomihore/iot-hackathon
|
4f90c12c164f3ee09341fc1381b1f7898a5d3055
|
[
"MIT"
] | null | null | null |
datavis/urls.py
|
poulomihore/iot-hackathon
|
4f90c12c164f3ee09341fc1381b1f7898a5d3055
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('', views.get_percentage, name='get_percentage'),
path('get_percentage_value', views.get_percentage_value, name='get_percentage_value'),
]
| 24.333333
| 90
| 0.748858
|
32bb0cd05fa6989d453a40177c162d1a6d206545
| 10,866
|
py
|
Python
|
datafiles/migrations/0001_initial.py
|
ChalkLab/SciFlow
|
5bf021007d6184402ebfe6cefc2111d99160cb69
|
[
"MIT"
] | 1
|
2021-04-26T20:03:11.000Z
|
2021-04-26T20:03:11.000Z
|
datafiles/migrations/0001_initial.py
|
ChalkLab/SciFlow
|
5bf021007d6184402ebfe6cefc2111d99160cb69
|
[
"MIT"
] | 17
|
2021-04-23T16:51:59.000Z
|
2021-12-13T21:17:41.000Z
|
datafiles/migrations/0001_initial.py
|
ChalkLab/SciFlow
|
5bf021007d6184402ebfe6cefc2111d99160cb69
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.8 on 2021-10-12 15:54
from django.db import migrations, models
| 43.119048
| 117
| 0.510307
|
32bd83533b8a10d702670e0618e12d21f2714992
| 712
|
py
|
Python
|
f8a_jobs/handlers/flow.py
|
sawood14012/fabric8-analytics-jobs
|
a7d850dfef5785144676b9a3b4e29942161e5347
|
[
"Apache-2.0"
] | 5
|
2017-05-04T11:22:31.000Z
|
2018-08-24T16:12:30.000Z
|
f8a_jobs/handlers/flow.py
|
sawood14012/fabric8-analytics-jobs
|
a7d850dfef5785144676b9a3b4e29942161e5347
|
[
"Apache-2.0"
] | 325
|
2017-05-03T08:44:03.000Z
|
2021-12-13T21:03:49.000Z
|
f8a_jobs/handlers/flow.py
|
sawood14012/fabric8-analytics-jobs
|
a7d850dfef5785144676b9a3b4e29942161e5347
|
[
"Apache-2.0"
] | 28
|
2017-05-02T05:09:32.000Z
|
2021-03-11T09:42:34.000Z
|
"""Schedule multiple flows of a type."""
from .base import BaseHandler
| 33.904762
| 76
| 0.651685
|
32bdf6c9f66952e90bfd46bcfa58f2ec034c3c0d
| 1,032
|
py
|
Python
|
mako/stats/notifier.py
|
zer0tonin/mako
|
12420056e13e1acd333e686537d5ebc909450620
|
[
"MIT"
] | null | null | null |
mako/stats/notifier.py
|
zer0tonin/mako
|
12420056e13e1acd333e686537d5ebc909450620
|
[
"MIT"
] | 1
|
2021-06-02T04:22:46.000Z
|
2021-06-02T04:22:46.000Z
|
mako/stats/notifier.py
|
zer0tonin/mako
|
12420056e13e1acd333e686537d5ebc909450620
|
[
"MIT"
] | null | null | null |
import logging
logger = logging.getLogger(__name__)
| 30.352941
| 86
| 0.631783
|
32be27b57feb5ea94289c2693437fff5fe254149
| 286
|
py
|
Python
|
app/models/users.py
|
muzzammilh/valid-voice
|
7e5f8211471cfeb1f404de6b0b715196e8276b41
|
[
"MIT"
] | null | null | null |
app/models/users.py
|
muzzammilh/valid-voice
|
7e5f8211471cfeb1f404de6b0b715196e8276b41
|
[
"MIT"
] | null | null | null |
app/models/users.py
|
muzzammilh/valid-voice
|
7e5f8211471cfeb1f404de6b0b715196e8276b41
|
[
"MIT"
] | null | null | null |
from app.helpers.sqlalchemy import db
| 35.75
| 62
| 0.706294
|
32c012e2243ac30d8702a0e4c7e1a09c458c9ec8
| 12,819
|
py
|
Python
|
pysnmp/HUAWEI-CDP-COMPLIANCE-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/HUAWEI-CDP-COMPLIANCE-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/HUAWEI-CDP-COMPLIANCE-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module HUAWEI-CDP-COMPLIANCE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-CDP-COMPLIANCE-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:31:50 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
EnabledStatus, = mibBuilder.importSymbols("P-BRIDGE-MIB", "EnabledStatus")
ZeroBasedCounter32, TimeFilter = mibBuilder.importSymbols("RMON2-MIB", "ZeroBasedCounter32", "TimeFilter")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Unsigned32, TimeTicks, Counter32, IpAddress, iso, NotificationType, ObjectIdentity, ModuleIdentity, Counter64, Bits, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "TimeTicks", "Counter32", "IpAddress", "iso", "NotificationType", "ObjectIdentity", "ModuleIdentity", "Counter64", "Bits", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Integer32")
TextualConvention, TruthValue, TimeStamp, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TruthValue", "TimeStamp", "DisplayString")
hwCdpComplianceMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198))
if mibBuilder.loadTexts: hwCdpComplianceMIB.setLastUpdated('200905050000Z')
if mibBuilder.loadTexts: hwCdpComplianceMIB.setOrganization('Huawei Technologies co.,Ltd.')
hwCdpComplianceObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1))
hwCdpComplianceNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 2))
hwCdpComplianceConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 3))
hwCdpComplianceConfiguration = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1))
hwCdpComplianceStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 2))
hwCdpComplianceRemoteSystemsData = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 3))
hwCdpComplianceEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 1), EnabledStatus().clone()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwCdpComplianceEnable.setStatus('current')
hwCdpComplianceNotificationInterval = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(5)).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwCdpComplianceNotificationInterval.setStatus('current')
hwCdpCompliancePortConfigTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 3), )
if mibBuilder.loadTexts: hwCdpCompliancePortConfigTable.setStatus('current')
hwCdpCompliancePortConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 3, 1), ).setIndexNames((0, "HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpCompliancePortConfigIfIndex"))
if mibBuilder.loadTexts: hwCdpCompliancePortConfigEntry.setStatus('current')
hwCdpCompliancePortConfigIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 3, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwCdpCompliancePortConfigIfIndex.setStatus('current')
hwCdpCompliancePortConfigAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("rxOnly", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwCdpCompliancePortConfigAdminStatus.setStatus('current')
hwCdpCompliancePortConfigHoldTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 254)).clone(180)).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwCdpCompliancePortConfigHoldTime.setStatus('current')
hwCdpCompliancePortConfigNotificationEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 3, 1, 4), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwCdpCompliancePortConfigNotificationEnable.setStatus('current')
hwCdpCompliancePortStatsReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 1, 3, 1, 5), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwCdpCompliancePortStatsReset.setStatus('current')
hwCdpComplianceStatsRemTablesLastChangeTime = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 2, 1), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwCdpComplianceStatsRemTablesLastChangeTime.setStatus('current')
hwCdpComplianceStatsRemTablesAgeouts = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 2, 2), ZeroBasedCounter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwCdpComplianceStatsRemTablesAgeouts.setStatus('current')
hwCdpComplianceStatsRxPortTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 2, 3), )
if mibBuilder.loadTexts: hwCdpComplianceStatsRxPortTable.setStatus('current')
hwCdpComplianceStatsRxPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 2, 3, 1), ).setIndexNames((0, "HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceStatsRxPortIfIndex"))
if mibBuilder.loadTexts: hwCdpComplianceStatsRxPortEntry.setStatus('current')
hwCdpComplianceStatsRxPortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 2, 3, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwCdpComplianceStatsRxPortIfIndex.setStatus('current')
hwCdpComplianceStatsRxPortFramesTotal = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 2, 3, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwCdpComplianceStatsRxPortFramesTotal.setStatus('current')
hwCdpComplianceStatsRxPortAgeoutsTotal = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 2, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwCdpComplianceStatsRxPortAgeoutsTotal.setStatus('current')
hwCdpComplianceRemoteTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 3, 1), )
if mibBuilder.loadTexts: hwCdpComplianceRemoteTable.setStatus('current')
hwCdpComplianceRemoteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 3, 1, 1), ).setIndexNames((0, "HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceRemLocalPortIfIndex"))
if mibBuilder.loadTexts: hwCdpComplianceRemoteEntry.setStatus('current')
hwCdpComplianceRemLocalPortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 3, 1, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwCdpComplianceRemLocalPortIfIndex.setStatus('current')
hwCdpComplianceRemTimeMark = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 3, 1, 1, 2), TimeFilter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwCdpComplianceRemTimeMark.setStatus('current')
hwCdpComplianceRemoteInfo = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 1, 3, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 1600))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwCdpComplianceRemoteInfo.setStatus('current')
hwCdpComplianceNotificationPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 2, 1))
hwCdpComplianceRemTablesChange = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 2, 1, 1)).setObjects(("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceStatsRemTablesLastChangeTime"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceStatsRemTablesAgeouts"))
if mibBuilder.loadTexts: hwCdpComplianceRemTablesChange.setStatus('current')
hwCdpComplianceCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 3, 1))
hwCdpComplianceGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 3, 2))
hwCdpComplianceCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 3, 1, 1)).setObjects(("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceConfigGroup"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceStatsGroup"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceRemSysGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwCdpComplianceCompliance = hwCdpComplianceCompliance.setStatus('current')
hwCdpComplianceConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 3, 2, 1)).setObjects(("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceEnable"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceNotificationInterval"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpCompliancePortConfigAdminStatus"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpCompliancePortConfigHoldTime"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpCompliancePortConfigNotificationEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwCdpComplianceConfigGroup = hwCdpComplianceConfigGroup.setStatus('current')
hwCdpComplianceStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 3, 2, 2)).setObjects(("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceStatsRxPortFramesTotal"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpCompliancePortStatsReset"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceStatsRemTablesLastChangeTime"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceStatsRemTablesAgeouts"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceStatsRxPortAgeoutsTotal"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwCdpComplianceStatsGroup = hwCdpComplianceStatsGroup.setStatus('current')
hwCdpComplianceRemSysGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 3, 2, 3)).setObjects(("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceRemoteInfo"), ("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceRemTimeMark"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwCdpComplianceRemSysGroup = hwCdpComplianceRemSysGroup.setStatus('current')
hwCdpComplianceTrapGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 198, 3, 2, 4)).setObjects(("HUAWEI-CDP-COMPLIANCE-MIB", "hwCdpComplianceRemTablesChange"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwCdpComplianceTrapGroup = hwCdpComplianceTrapGroup.setStatus('current')
mibBuilder.exportSymbols("HUAWEI-CDP-COMPLIANCE-MIB", hwCdpComplianceRemoteTable=hwCdpComplianceRemoteTable, hwCdpCompliancePortConfigAdminStatus=hwCdpCompliancePortConfigAdminStatus, hwCdpComplianceRemoteInfo=hwCdpComplianceRemoteInfo, hwCdpComplianceGroups=hwCdpComplianceGroups, hwCdpComplianceRemoteEntry=hwCdpComplianceRemoteEntry, hwCdpCompliancePortConfigIfIndex=hwCdpCompliancePortConfigIfIndex, hwCdpComplianceEnable=hwCdpComplianceEnable, hwCdpComplianceNotifications=hwCdpComplianceNotifications, hwCdpComplianceCompliance=hwCdpComplianceCompliance, hwCdpCompliancePortConfigTable=hwCdpCompliancePortConfigTable, hwCdpComplianceNotificationPrefix=hwCdpComplianceNotificationPrefix, hwCdpComplianceStatsGroup=hwCdpComplianceStatsGroup, hwCdpComplianceStatsRemTablesAgeouts=hwCdpComplianceStatsRemTablesAgeouts, hwCdpComplianceStatsRemTablesLastChangeTime=hwCdpComplianceStatsRemTablesLastChangeTime, hwCdpComplianceStatsRxPortIfIndex=hwCdpComplianceStatsRxPortIfIndex, hwCdpComplianceRemTimeMark=hwCdpComplianceRemTimeMark, hwCdpComplianceRemoteSystemsData=hwCdpComplianceRemoteSystemsData, hwCdpComplianceStatsRxPortAgeoutsTotal=hwCdpComplianceStatsRxPortAgeoutsTotal, hwCdpCompliancePortStatsReset=hwCdpCompliancePortStatsReset, hwCdpComplianceRemTablesChange=hwCdpComplianceRemTablesChange, hwCdpComplianceConfiguration=hwCdpComplianceConfiguration, hwCdpComplianceTrapGroup=hwCdpComplianceTrapGroup, hwCdpComplianceMIB=hwCdpComplianceMIB, hwCdpComplianceRemLocalPortIfIndex=hwCdpComplianceRemLocalPortIfIndex, hwCdpComplianceObjects=hwCdpComplianceObjects, hwCdpComplianceNotificationInterval=hwCdpComplianceNotificationInterval, hwCdpComplianceStatsRxPortEntry=hwCdpComplianceStatsRxPortEntry, hwCdpCompliancePortConfigEntry=hwCdpCompliancePortConfigEntry, PYSNMP_MODULE_ID=hwCdpComplianceMIB, hwCdpComplianceCompliances=hwCdpComplianceCompliances, hwCdpComplianceRemSysGroup=hwCdpComplianceRemSysGroup, hwCdpCompliancePortConfigHoldTime=hwCdpCompliancePortConfigHoldTime, hwCdpComplianceStatsRxPortTable=hwCdpComplianceStatsRxPortTable, hwCdpComplianceConformance=hwCdpComplianceConformance, hwCdpComplianceConfigGroup=hwCdpComplianceConfigGroup, hwCdpComplianceStatistics=hwCdpComplianceStatistics, hwCdpCompliancePortConfigNotificationEnable=hwCdpCompliancePortConfigNotificationEnable, hwCdpComplianceStatsRxPortFramesTotal=hwCdpComplianceStatsRxPortFramesTotal)
| 140.868132
| 2,381
| 0.791715
|
32c0b9c3ba62988df85d3108c0c4b36be8f563b9
| 1,223
|
py
|
Python
|
pybb/contrib/mentions/processors.py
|
thoas/pybbm
|
0e7ab7ef60f15951660015f2b9be0ff7192f1095
|
[
"BSD-2-Clause"
] | 1
|
2015-05-18T09:19:30.000Z
|
2015-05-18T09:19:30.000Z
|
pybb/contrib/mentions/processors.py
|
ulule/pybbm
|
0e7ab7ef60f15951660015f2b9be0ff7192f1095
|
[
"BSD-2-Clause"
] | 5
|
2017-06-13T16:25:34.000Z
|
2018-07-17T20:30:56.000Z
|
pybb/contrib/mentions/processors.py
|
ulule/pybbm
|
0e7ab7ef60f15951660015f2b9be0ff7192f1095
|
[
"BSD-2-Clause"
] | 1
|
2018-10-29T13:12:59.000Z
|
2018-10-29T13:12:59.000Z
|
import re
from pybb.processors import BaseProcessor
from pybb.compat import get_user_model
from . import settings
| 25.479167
| 98
| 0.562551
|
32c304191982cf35da8aed8e53fd875c3bef3ba2
| 1,505
|
py
|
Python
|
PageObjectModel/Test/addAndEditionData.py
|
lblaszkowski/Arena
|
61f924bc7c3994ec7714fe68f60b02b35ccd286b
|
[
"Apache-2.0"
] | null | null | null |
PageObjectModel/Test/addAndEditionData.py
|
lblaszkowski/Arena
|
61f924bc7c3994ec7714fe68f60b02b35ccd286b
|
[
"Apache-2.0"
] | null | null | null |
PageObjectModel/Test/addAndEditionData.py
|
lblaszkowski/Arena
|
61f924bc7c3994ec7714fe68f60b02b35ccd286b
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from selenium import webdriver
from PageObjectModel.Pages.addAndEditionDataPage import AddAndEditionData_Page
from time import sleep
url = 'https://buggy-testingcup.pgs-soft.com/'
| 32.717391
| 106
| 0.67907
|
32c40b429ba7f1090b72fd13e36b8055346940c3
| 827
|
py
|
Python
|
q2_api_client/clients/mobile_ws/calendar_client.py
|
jcook00/q2-api-client
|
4431af164eb4baf52e26e8842e017cad1609a279
|
[
"BSD-2-Clause"
] | null | null | null |
q2_api_client/clients/mobile_ws/calendar_client.py
|
jcook00/q2-api-client
|
4431af164eb4baf52e26e8842e017cad1609a279
|
[
"BSD-2-Clause"
] | null | null | null |
q2_api_client/clients/mobile_ws/calendar_client.py
|
jcook00/q2-api-client
|
4431af164eb4baf52e26e8842e017cad1609a279
|
[
"BSD-2-Clause"
] | null | null | null |
from q2_api_client.clients.base_q2_client import BaseQ2Client
from q2_api_client.endpoints.mobile_ws_endpoints import CalendarEndpoint
| 33.08
| 108
| 0.718259
|
32c4baf38f537ef55e48bae1faabe6aee1fe7ca3
| 11,477
|
py
|
Python
|
cg_token.py
|
gmnicke2/GISolve-API-Util
|
74d10d2ae60c1f000ef151a394ef9276b284867a
|
[
"MIT"
] | null | null | null |
cg_token.py
|
gmnicke2/GISolve-API-Util
|
74d10d2ae60c1f000ef151a394ef9276b284867a
|
[
"MIT"
] | null | null | null |
cg_token.py
|
gmnicke2/GISolve-API-Util
|
74d10d2ae60c1f000ef151a394ef9276b284867a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Set of utilities to issue/verify/revoke a CG token with REST calls
Requires valid username and password either in bash environment or
given at the command line.
Issue Token:
Token can be easily created (and stored to env) with the folloing:
# create token using CG_USERNAME, CG_PASSWORD, and CG_API env variables
./cg_token.py
# create token specifying all the parameters on command line
./cg_token.py --username <login> --password <password> --endpoint <url>
# create token using CG_USERNAME, CG_API, but prompt for password
./cg_token.py --password -
# add token to environmental variables
export CG_TOKEN=`./cg_token.py`
# add token to environmental variable, specify extra parameters
export CG_TOKEN=`./cg_token.py --username <login> --endpoint <newurl>`
Verify or Revoke Token:
Verifying or Revoking requires the positional 'verify' or 'revoke'
command line argument.
User can still override env variables with command-line arguments.
Uses CG_API, and CG_TOKEN env variables for both.
Verify uses CG_CLIENT_ID and CG_CLIENT_IP for consumer ID & user client IP,
Revoke uses CG_USERNAME and CG_PASSWORD for security purposes :
# Verify token, overriding CG_CLIENT_ID and CG_CLIENT_IP with command
# line (Upon success, it will print the remaining lifetime of the token
# in seconds)
./cg_token.py verify --clientid <ID> --clientip <IP>
# Revoke token, overriding CG_TOKEN with command line
./cg_token.py revoke --token <token>
Print debug info to stderr:
Append the flag "--debug" or "-d" :
./cg_token.py --debug
"""
import sys, os, getpass
import json
import logging
import requests
import argparse
from requests import exceptions as rex
# This is used sed to disable InsecureRequestWarning.
requests.packages.urllib3.disable_warnings()
logger = logging.getLogger(__name__)
def logger_initialize(debug) :
"""Initializes the format and level for the logger"""
_format = ("%(levelname)s - %(asctime)s\n%(message)s\n")
if debug :
logging.basicConfig(format=_format,
level=logging.DEBUG)
else :
logging.basicConfig(format=_format,
level=logging.WARNING)
def log_response(method, url, response, request) :
"""Logs request and response when in debug mode"""
if request.get('password', '') :
request['password'] = '*******'
logger.debug("URL: " + url)
logger.debug("Request: " + method)
logger.debug("Request Data (in JSON format)"
": " + json.dumps(request,indent=4,separators=(',',': ')))
logger.debug("Response (in JSON format)"
": " + json.dumps(response,indent=4,separators=(',',': ')))
def parse_args() :
"""Defines command line positional and optional arguments and checks
for valid action input if present. Additionally prompts with getpass
if user specifies "--password -" to override CG_PASSWORD
Args: none
Returns: A (tuple) containing the following:
args (namespace) : used to overwrite env variables when necessary
action (string) : for main to use as a switch for calls to perform
"""
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug",
action="store_true",
help='Allow debug info to be written to stderr')
parser.add_argument("-e", "--endpoint",
default=os.getenv('CG_API',''),
help="Set API url")
parser.add_argument("-p", "--password",
default=os.getenv('CG_PASSWORD',''),
help="Set password. '-' for secure prompting")
parser.add_argument("-u", "--username",
default=os.getenv('CG_USERNAME',''),
help="Set Username")
parser.add_argument("-t", "--token",
default=os.getenv('CG_TOKEN',''),
help="Set Token for Verify/Revoke")
parser.add_argument("-l", "--lifetime",
type=long,
default=43200,
help="Set Lifetime for Token Issue in seconds"
". minimum=3600 (1hr), maximum=12*3600 (12hr)")
parser.add_argument("-b", "--binding",
type=int,
default=1,
help="1: Bind with IP Address, 0: Don't Bind")
parser.add_argument("-c", "--clientid",
default=os.getenv('CG_CLIENT_ID',''),
help="Set Client ID for Verify")
parser.add_argument("-i", "--clientip",
default=os.getenv('CG_CLIENT_IP',''),
help="Set Client IP for Verify")
parser.add_argument("action", nargs='?', type=str, default='issue',
help='issue/verify/revoke')
args = parser.parse_args()
logger_initialize(args.debug)
if args.password and args.password == '-' :
args.password = getpass.getpass("Enter desired CG Password: ")
if not args.endpoint :
logger.error('CG_API (API url for REST calls) '
'not specified\n')
sys.exit(1)
if args.action.lower() not in ['issue','verify','revoke'] :
logger.error('Invalid Action')
sys.exit(1)
return (args,args.action.lower())
def cg_rest(method, endpoint, headers={}, **kwargs) :
"""Calls the CG REST endpoint passing keyword arguments given.
'cg_rest' provides a basic wrapper around the HTTP request to
the rest endpoint, and attempts to provide informative error
messages when errors occur. Exceptions are passed to the calling
function for final resolution.
cg_rest('POST', <url>, headers=<HTTP headers dict>, username=<username>,
password=<password>, ...)
or with a previously constructed data/params dict
cg_rest('POST', <url>, headers=headers, **data/params)
or with no header necessary
cg_rest('POST', <url>, **data/params)
Args:
method (str): the HTTP method that will be called
endpoint (str, URL): the REST endpoint
headers (dict, optional): HTTP headers
kwargs (optional): common keywords include username, password, etc.
Returns:
(dict): decodes the response and returns it as a dictionary
Raises:
Raises CGException when the gateway server return an error status.
Other exceptions may be raised based errors with the HTTP request
and response. See documentation of Python's request module for
a complete list.
"""
try :
if method.upper() == 'POST' or method.upper() == 'PUT' :
r = requests.request(method.upper(), endpoint, timeout=50,
verify=False, headers=headers, data=kwargs)
else : # Must be 'GET' or 'DELETE'
r = requests.request(method.upper(), endpoint, timeout=50,
verify=False, headers=headers, params=kwargs)
r.raise_for_status()
except (rex.ConnectionError, rex.HTTPError, rex.MissingSchema) as e :
logger.debug("Problem with API endpoint '%s', "
"is it entered correctly?" %endpoint)
raise
except (rex.Timeout) as e :
logger.debug('Request timed out, the service may be '
'temporarily unavailable')
raise
response = r.json()
log_response(method, endpoint, response, kwargs)
# If status is not provided, default to error.
if response.get('status','') and response.get('status','') == 'error' :
logger.debug("Call fails with '%s'" %response['result']['message'])
raise CGException(response['result'])
return response
def issue_token(endpoint, username, password, lifetime, binding) :
"""Calls the Gateway issueToken function and returns token.
Args:
endpoint (string, URL): the REST endpoint
username (string): the user's login
password (string): the user's password
lifetime (int): the lifetime of a token in seconds
(3600 <= lifetime <= 12*3600)
binding (int): 1 if user wants token to be bound to user IP
0 else
Returns:
(string): Open Service API token
Raises:
Passes any exceptions raised in cg_rest.
"""
data = {
'username' : username,
'password' : password,
'lifetime' : lifetime,
'binding' : binding
}
url = endpoint.rstrip('/') + '/token'
logger.debug('Issuing token from %s' %url)
response = cg_rest('POST', url, **data)
return response['result']['token']
def verify_token(endpoint, username, token, client_id, client_ip) :
"""Calls the Gateway verifyToken function, returns remaining token lifetime.
Args:
endpoint(string, URL): the REST endpoint
username (string):
token (string): Token to verify
client_id (string): Consumer ID
client_ip (string): User Client's IP Address
Returns:
(int): Remaining lifetime of token (in seconds)
Raises:
Passes any exceptions raised in cg_rest.
"""
data = {
'token' : token,
'consumer' : client_id,
'remote_addr' : client_ip,
'username' : username
}
url = endpoint.rstrip('/') + '/token'
logger.debug("Verifying token '%s' from '%s'" %(token,url))
data_length = str(len(json.dumps(data)))
headers = {'Content-Length' : data_length}
response = cg_rest('PUT', url, headers=headers, **data)
return response['result']['lifetime']
def revoke_token(endpoint, username, password, token) :
"""Calls the Gateway revokeToken function
Args:
endpoint (string, URL): the REST endpoint
username (string): the user's login
password (string): the user's password
token (string): The token to be revoked
Returns: void
Raises:
Passes any exceptions raised in cg_rest.
"""
params = {
'token' : token,
'username' : username,
'password' : password,
}
url = endpoint.rstrip('/') + "/token"
logger.debug("Revoking token '%s' from '%s'" %(token,url))
response = cg_rest('DELETE', url, **params)
if __name__ == '__main__' :
main()
| 33.55848
| 81
| 0.611658
|
32c57ec480ef32335403cba14fba78c713f0eb97
| 741
|
py
|
Python
|
azext_script/compilers/az/handlers/HDInsight.py
|
yorek/adl
|
d9da1b7d46c71415e38a6efe5b1c8d45b02b3704
|
[
"MIT"
] | null | null | null |
azext_script/compilers/az/handlers/HDInsight.py
|
yorek/adl
|
d9da1b7d46c71415e38a6efe5b1c8d45b02b3704
|
[
"MIT"
] | 1
|
2018-10-15T05:51:38.000Z
|
2018-10-15T05:51:38.000Z
|
azext_script/compilers/az/handlers/HDInsight.py
|
yorek/adl
|
d9da1b7d46c71415e38a6efe5b1c8d45b02b3704
|
[
"MIT"
] | 1
|
2018-10-18T18:41:02.000Z
|
2018-10-18T18:41:02.000Z
|
from .Generic import GenericHandler
| 30.875
| 70
| 0.618084
|
32c59fc06a151e5b5740b23fbb1aff371ee1d8f2
| 30,841
|
py
|
Python
|
a2-py-beta/erd_converter.py
|
francisgerman70/CSC370
|
0682ea5abdfdbc87b76efd18f98e27a6c49d2b45
|
[
"MIT"
] | null | null | null |
a2-py-beta/erd_converter.py
|
francisgerman70/CSC370
|
0682ea5abdfdbc87b76efd18f98e27a6c49d2b45
|
[
"MIT"
] | null | null | null |
a2-py-beta/erd_converter.py
|
francisgerman70/CSC370
|
0682ea5abdfdbc87b76efd18f98e27a6c49d2b45
|
[
"MIT"
] | null | null | null |
from audioop import add
from erd import *
from table import *
# This function converts an ERD object into a Database object
# The Database object should correspond to a fully correct implementation
# of the ERD, including both data structure and constraints, such that the
# CREATE TABLE statements generated by the Database object will populate an
# empty MySQL database to exactly implement the conceptual design communicated
# by the ERD.
#
# @TODO: Implement me!
| 55.171735
| 303
| 0.642489
|
32c6b6ee54440932d94dc43f2f2f342cc123a082
| 1,848
|
py
|
Python
|
ObitSystem/ObitSD/scripts/scriptResidCal.py
|
sarrvesh/Obit
|
e4ce6029e9beb2a8c0316ee81ea710b66b2b7986
|
[
"Linux-OpenIB"
] | 5
|
2019-08-26T06:53:08.000Z
|
2020-10-20T01:08:59.000Z
|
ObitSystem/ObitSD/scripts/scriptResidCal.py
|
sarrvesh/Obit
|
e4ce6029e9beb2a8c0316ee81ea710b66b2b7986
|
[
"Linux-OpenIB"
] | null | null | null |
ObitSystem/ObitSD/scripts/scriptResidCal.py
|
sarrvesh/Obit
|
e4ce6029e9beb2a8c0316ee81ea710b66b2b7986
|
[
"Linux-OpenIB"
] | 8
|
2017-08-29T15:12:32.000Z
|
2022-03-31T12:16:08.000Z
|
# Program to self calibrate OTF data
import Obit, OTF, Image, OSystem, OErr, OTFGetSoln, InfoList, Table
# Init Obit
err=OErr.OErr()
ObitSys=OSystem.OSystem ("Python", 1, 103, 1, ["None"], 1, ["./"], 1, 0, err)
OErr.printErrMsg(err, "Error with Obit startup")
# Files
disk = 1
# Dirty
inFullFile = "OTFDirtyFull.fits" # input Full OTF data
inSubFile = "OTFDirtySub.fits" # input Full OTF data
#Clean
#inFullFile = "OTFCleanFull.fits" # input Full OTF data
#inSubFile = "OTFCleanSub.fits" # input Full OTF data
# Set data
fullData = OTF.newPOTF("Input data", inFullFile, disk, 1, err)
subData = OTF.newPOTF("Input data", inSubFile, disk, 1, err)
OErr.printErrMsg(err, "Error creating input data object")
# Calibration parameters
calType = "Filter"
solint = 5.0 / 86400.0
minRMS = 0.0
minEl = 0.0
calJy = [1.0,1.0]
dim = OTF.dim
dim[0] = 1
inInfo = OTF.POTFGetList(subData)
InfoList.PInfoListAlwaysPutFloat(inInfo, "SOLINT", dim, [solint])
InfoList.PInfoListAlwaysPutFloat(inInfo, "MINRMS", dim, [minRMS])
InfoList.PInfoListAlwaysPutFloat(inInfo, "MINEL", dim, [minEl])
dim[0] = len(calJy)
InfoList.PInfoListAlwaysPutFloat(inInfo, "CALJY", dim, calJy)
dim[0] = len(calType)
InfoList.PInfoListAlwaysPutString(inInfo, "calType", dim, [calType])
dim[0] = 1
solnTable = OTFGetSoln.POTFGetSolnFilter (subData, fullData, err)
soln = Table.PTableGetVer(solnTable)
# Update Cal table
# Soln2Cal parameters (most defaulted)
OTF.Soln2CalInput["InData"] = fullData
OTF.Soln2CalInput["soln"] = soln
# Use highest extant Cal table as input
oldCal = Obit.OTFGetHighVer(fullData.me, "OTFCal")
if oldCal == 0: # Must not be one
oldCal = -1
OTF.Soln2CalInput["oldCal"] = oldCal
OTF.Soln2CalInput["newCal"] = 0
OTF.Soln2Cal(err,OTF.Soln2CalInput)
# Shutdown
OErr.printErr(err)
print 'Done, calibrated',inFullFile
| 31.862069
| 77
| 0.715368
|
32c6c31592e8107e78ef2bb52771dcffacd50781
| 393
|
py
|
Python
|
html_mining/twitter.py
|
sourceperl/sandbox
|
bbe1be52c3e51906a8ec94411c4df6a95dcbb39c
|
[
"MIT"
] | null | null | null |
html_mining/twitter.py
|
sourceperl/sandbox
|
bbe1be52c3e51906a8ec94411c4df6a95dcbb39c
|
[
"MIT"
] | null | null | null |
html_mining/twitter.py
|
sourceperl/sandbox
|
bbe1be52c3e51906a8ec94411c4df6a95dcbb39c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
r = requests.get("https://twitter.com/ThePSF", headers={"User-Agent": ""})
if r.status_code == 200:
s = BeautifulSoup(r.content, "html.parser")
# extract tweets
l_tw = []
for p in s.find_all("p", attrs={"class": "tweet-text"}):
l_tw.append(p.text.strip())
print(l_tw)
| 23.117647
| 74
| 0.62341
|
32c80a80f478110db9183291633d248502cd65ad
| 590
|
py
|
Python
|
warehouse_labeling_machines/libs/utils.py
|
sdg97/warehouse_labeling_machines
|
3650b9fb2d3fef85ee01925acf0a9266dafe746a
|
[
"Apache-2.0"
] | null | null | null |
warehouse_labeling_machines/libs/utils.py
|
sdg97/warehouse_labeling_machines
|
3650b9fb2d3fef85ee01925acf0a9266dafe746a
|
[
"Apache-2.0"
] | null | null | null |
warehouse_labeling_machines/libs/utils.py
|
sdg97/warehouse_labeling_machines
|
3650b9fb2d3fef85ee01925acf0a9266dafe746a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import decimal
import multiprocessing
import random
def roundDecimal(v):
'''
Sembra che l'arrotondamento di un decimal sia pi complicato del previsto
'''
return v.quantize(decimal.Decimal('0.01'), rounding=decimal.ROUND_HALF_UP)
def maybeStart(startCb, debug):
'''
Ogni tanto esegue questa callback...
Ad ogni restart di un worker in maniera casuale esegue la callback
'''
if debug:
return
workers = multiprocessing.cpu_count() * 2 + 1
if random.randrange(workers) == 0:
startCb()
| 21.851852
| 78
| 0.666102
|
32c8f25c548f019704dfb22f0db7ab07f62d2dd9
| 504
|
py
|
Python
|
projeto/main/migrations/0017_alter_user_room.py
|
neilom18/g5-chess
|
8998199b3432f0b83aa27e5c2126173ecc87f311
|
[
"MIT"
] | null | null | null |
projeto/main/migrations/0017_alter_user_room.py
|
neilom18/g5-chess
|
8998199b3432f0b83aa27e5c2126173ecc87f311
|
[
"MIT"
] | 1
|
2021-10-03T22:26:45.000Z
|
2021-10-03T22:26:45.000Z
|
projeto/main/migrations/0017_alter_user_room.py
|
neilom18/g5-chess
|
8998199b3432f0b83aa27e5c2126173ecc87f311
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.4 on 2021-09-24 15:29
from django.db import migrations, models
import django.db.models.deletion
| 25.2
| 141
| 0.640873
|
08613adf55222eb81cf9aea8d6ff94d2cf2ab660
| 105
|
py
|
Python
|
groups/views.py
|
AliAkberAakash/learn-in-groups
|
850601ddd5520c850ebec12003c8337670762948
|
[
"MIT"
] | null | null | null |
groups/views.py
|
AliAkberAakash/learn-in-groups
|
850601ddd5520c850ebec12003c8337670762948
|
[
"MIT"
] | null | null | null |
groups/views.py
|
AliAkberAakash/learn-in-groups
|
850601ddd5520c850ebec12003c8337670762948
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
| 21
| 42
| 0.8
|
086749fe086bfe8b53982e2dc76e87c1e91b6cc7
| 1,596
|
py
|
Python
|
code/p3.py
|
OscarFlores-IFi/CDINP19
|
7fb0cb6ff36b9a10bcfa0772b172c5e49996df48
|
[
"MIT"
] | null | null | null |
code/p3.py
|
OscarFlores-IFi/CDINP19
|
7fb0cb6ff36b9a10bcfa0772b172c5e49996df48
|
[
"MIT"
] | null | null | null |
code/p3.py
|
OscarFlores-IFi/CDINP19
|
7fb0cb6ff36b9a10bcfa0772b172c5e49996df48
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 11 09:18:37 2019
@author: if715029
"""
import pandas as pd
import numpy as np
import sklearn.metrics as skm
import scipy.spatial.distance as sc
#%% Leer datos
data = pd.read_excel('../data/Test de pelculas(1-16).xlsx', encoding='latin_1')
#%% Seleccionar datos (a mi estilo)
pel = pd.DataFrame()
for i in range((len(data.T)-5)//3):
pel = pel.append(data.iloc[:,6+i*3])
pel = pel.T
print(pel)
#%% Seleccionar datos (estilo Riemann)
csel = np.arange(6,243,3)
cnames = list(data.columns.values[csel])
datan = data[cnames]
#%% Promedios
movie_prom = datan.mean(axis=0)
user_prom = datan.mean(axis=1)
#%% Calificaciones a binarios (>= 3)
datan = datan.copy()
datan[datan<3] = 0
datan[datan>=3] = 1
#%% Calcular distancias de indices de similitud
#D1 = sc.pdist(datan,'hamming') # hamming == matching
D1 = sc.pdist(datan,'jaccard')
D1 = sc.squareform(D1)
#D2 = sc.pdist(data_b,'jaccard') # hamming == matching
#D2 = sc.squareform(D2)
Isim1 = 1-D1
#%% Seleccionar usuario y determinar sus parecidos
user = 1
Isim_user = Isim1[user]
Isim_user_sort = np.sort(Isim_user)
indx_user = np.argsort(Isim_user)
#%% Recomendacin de pelculas p1.
USER = datan.loc[user]
USER_sim = datan.loc[indx_user[-2]]
indx_recomend1 = (USER_sim==1)&(USER==0)
recomend1 = list(USER.index[indx_recomend1])
#%% Recomendacin peliculas p2.
USER = datan.loc[user]
USER_sim = np.mean(datan.loc[indx_user[-6:-1]],axis = 0)
USER_sim[USER_sim<=.5]=0
USER_sim[USER_sim>.5]=1
indx_recomend2 = (USER_sim==1)&(USER==0)
recomend2 = list(USER.index[indx_recomend2])
| 21.863014
| 80
| 0.697368
|
0867a27f2b0a9d65b0fbacf348d77dfbc3427264
| 1,187
|
py
|
Python
|
itao/utils/qt_logger.py
|
MaxChangInnodisk/itao
|
b0745eb48bf67718ef00db566c4cc19896d903a7
|
[
"MIT"
] | null | null | null |
itao/utils/qt_logger.py
|
MaxChangInnodisk/itao
|
b0745eb48bf67718ef00db566c4cc19896d903a7
|
[
"MIT"
] | null | null | null |
itao/utils/qt_logger.py
|
MaxChangInnodisk/itao
|
b0745eb48bf67718ef00db566c4cc19896d903a7
|
[
"MIT"
] | null | null | null |
import logging
| 34.911765
| 97
| 0.615838
|
08687783aacc944c351fc37618c9c87ef69b3d6b
| 2,296
|
py
|
Python
|
scripts/ndvi_diff.py
|
hkfrei/pythonRemoteSensing
|
c8681d859313ee5ad01e5b9753f8c43462268624
|
[
"MIT"
] | 1
|
2019-12-18T21:54:22.000Z
|
2019-12-18T21:54:22.000Z
|
scripts/ndvi_diff.py
|
hkfrei/pythonRemoteSensing
|
c8681d859313ee5ad01e5b9753f8c43462268624
|
[
"MIT"
] | null | null | null |
scripts/ndvi_diff.py
|
hkfrei/pythonRemoteSensing
|
c8681d859313ee5ad01e5b9753f8c43462268624
|
[
"MIT"
] | 1
|
2020-07-01T16:44:21.000Z
|
2020-07-01T16:44:21.000Z
|
import numpy
import rasterio
import gdal
print('all modules imported')
# path to the folder with the ndvi rasters
base_path = "/Users/hk/Downloads/gaga/"
# shapefile with forest mask
forest_mask = base_path + "waldmaske_wgs84.shp"
# initialize the necessary rasters for the ndvi calculation.
ndvi_2017 = rasterio.open(base_path + "ndvi_17.tiff", driver="GTiff")
ndvi_2018 = rasterio.open(base_path + "ndvi_18.tiff", driver="GTiff")
# print out metadata about the ndvi's
print(ndvi_2018.count) # number of raster bands
print(ndvi_2017.count) # number of raster bands
print(ndvi_2018.height) # column count
print(ndvi_2018.dtypes) # data type of the raster e.g. ('float64',)
print(ndvi_2018.crs) # projection of the raster e.g. EPSG:32632
print("calculate ndvi difference")
# this is will give us an array of values, not an actual raster image.
ndvi_diff_array = numpy.subtract(ndvi_2018.read(1), ndvi_2017.read(1))
print("reclassify")
# reclassify
ndvi_diff_reclass_array = numpy.where(
ndvi_diff_array <= -0.05, 1, 9999.0
)
# create a new (empty) raster for the "original" diff
ndvi_diff_image = rasterio.open(base_path + "ndvi_diff.tif", "w", driver="Gtiff", width=ndvi_2018.width,
height=ndvi_2018.height, count=1, crs=ndvi_2018.crs, transform=ndvi_2018.transform,
dtype='float64')
# create a new (empty) raster for the reclassified diff
ndvi_diff_reclass_image = rasterio.open(base_path + "ndvi_reclass_diff.tif", "w", driver="Gtiff", width=ndvi_2018.width,
height=ndvi_2018.height, count=1, crs=ndvi_2018.crs,
transform=ndvi_2018.transform, dtype='float64')
# write the ndvi's to raster
ndvi_diff_image.write(ndvi_diff_array.astype("float64"), 1)
ndvi_diff_reclass_image.write(ndvi_diff_reclass_array.astype("float64"), 1)
ndvi_diff_image.close()
ndvi_diff_reclass_image.close()
# extract forest areas
# Make sure to add correct Nodata and Alpha values. They have to match the reclassified values.
warp_options = gdal.WarpOptions(cutlineDSName=forest_mask, cropToCutline=True, dstNodata=9999, dstAlpha=9999)
gdal.Warp(base_path + "change_masked.tif", base_path + "ndvi_reclass_diff.tif", options=warp_options)
print("finished")
| 41.745455
| 120
| 0.726916
|
08691612fc229c4b74017cbf49ecddb0965a12ea
| 462
|
py
|
Python
|
helga_umb/signals/util.py
|
ktdreyer/helga-umb
|
f0c6858745d90205e74eec0eb5ebaafa655b2336
|
[
"MIT"
] | null | null | null |
helga_umb/signals/util.py
|
ktdreyer/helga-umb
|
f0c6858745d90205e74eec0eb5ebaafa655b2336
|
[
"MIT"
] | 2
|
2018-04-27T15:37:10.000Z
|
2018-08-22T21:00:40.000Z
|
helga_umb/signals/util.py
|
ktdreyer/helga-umb
|
f0c6858745d90205e74eec0eb5ebaafa655b2336
|
[
"MIT"
] | null | null | null |
def product_from_branch(branch):
"""
Return a product name from this branch name.
:param branch: eg. "ceph-3.0-rhel-7"
:returns: eg. "ceph"
"""
if branch.startswith('private-'):
# Let's just return the thing after "private-" and hope there's a
# product string match somewhere in there.
return branch[8:]
# probably not gonna work for "stream" branches :(
parts = branch.split('-', 1)
return parts[0]
| 30.8
| 73
| 0.621212
|
08698150dd4c0d31ae984574dc2eb2d108201474
| 752
|
py
|
Python
|
work/2021/ne201076/src/cpu_notify.py
|
tora01/SkillLab
|
61ebfaf45c503b9e6f4a3d05a7edd4de2fcad93e
|
[
"CC0-1.0"
] | 2
|
2020-09-09T02:40:23.000Z
|
2021-09-12T18:08:15.000Z
|
work/2021/ne201076/src/cpu_notify.py
|
tora01/SkillLab
|
61ebfaf45c503b9e6f4a3d05a7edd4de2fcad93e
|
[
"CC0-1.0"
] | 1
|
2021-09-14T09:36:38.000Z
|
2021-09-14T09:36:38.000Z
|
work/2021/ne201076/src/cpu_notify.py
|
tora01/SkillLab
|
61ebfaf45c503b9e6f4a3d05a7edd4de2fcad93e
|
[
"CC0-1.0"
] | 19
|
2021-09-07T06:11:29.000Z
|
2021-09-07T07:45:08.000Z
|
import requests
url = 'https://notify-api.line.me/api/notify'#LINE NotifyAPIURL
token = '2RNdAKwlaj69HK0KlEdMX1y575gDWNKrPpggFcLnh82' #
ms = ""#
while True:
now=dt.('cpu_temps')
dt = getCpuTempFromFile(data_file) #CPU
print(cpu_temps)
if print(cpu_temp) == "print >= 80":#CPU80
line(postdate=message, date=postdate, palams=postdate)#line
break
time.sleep(1)
| 31.333333
| 73
| 0.670213
|
0869ba6e18dfa77decb88cf8144acde0c451215e
| 49
|
py
|
Python
|
src/titiler/application/titiler/application/__init__.py
|
kalxas/titiler
|
5e4e497f1033eb64b65315068c094abe8259cd8c
|
[
"MIT"
] | null | null | null |
src/titiler/application/titiler/application/__init__.py
|
kalxas/titiler
|
5e4e497f1033eb64b65315068c094abe8259cd8c
|
[
"MIT"
] | null | null | null |
src/titiler/application/titiler/application/__init__.py
|
kalxas/titiler
|
5e4e497f1033eb64b65315068c094abe8259cd8c
|
[
"MIT"
] | null | null | null |
"""titiler.application"""
__version__ = "0.6.0"
| 12.25
| 25
| 0.653061
|
0869cc3c4f8fe0eb7c864da5eb1b5caf6b676944
| 550
|
py
|
Python
|
testScripts/getAllFiles.py
|
ryanemerson/JGroups-HiTab
|
8fd8c6c45219e4c04618630be7e2449ebb0578dc
|
[
"Apache-2.0"
] | null | null | null |
testScripts/getAllFiles.py
|
ryanemerson/JGroups-HiTab
|
8fd8c6c45219e4c04618630be7e2449ebb0578dc
|
[
"Apache-2.0"
] | null | null | null |
testScripts/getAllFiles.py
|
ryanemerson/JGroups-HiTab
|
8fd8c6c45219e4c04618630be7e2449ebb0578dc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import os
from collections import defaultdict
hosts = {'mill001', 'mill004', 'mill005'}
user = 'a7109534'
file_location = '/work/a7109534/'
#file_location = '/home/ryan/workspace/JGroups'
#file_location = '/home/pg/p11/a7109534/'
file_wildcard = '*'
extension = ".csv"
get_file = file_location + file_wildcard + extension
destination = '.'
number_of_rounds = 18
os.system("rm *" + extension)
for hostname in hosts:
cmd = "scp " + user + "@" + hostname + ":" + get_file + " " + destination
print cmd
os.system(cmd)
| 26.190476
| 77
| 0.681818
|
0869fc3b1af3273cc468fc0da2d162910f894bff
| 3,610
|
py
|
Python
|
studio/model.py
|
NunoEdgarGFlowHub/studio
|
42b221892a81535842ff25cbbcc434d6422a19e5
|
[
"Apache-2.0"
] | null | null | null |
studio/model.py
|
NunoEdgarGFlowHub/studio
|
42b221892a81535842ff25cbbcc434d6422a19e5
|
[
"Apache-2.0"
] | null | null | null |
studio/model.py
|
NunoEdgarGFlowHub/studio
|
42b221892a81535842ff25cbbcc434d6422a19e5
|
[
"Apache-2.0"
] | null | null | null |
"""Data providers."""
import os
try:
# try-except statement needed because
# pip module is not available in google app engine
import pip
except ImportError:
pip = None
import yaml
import six
from .artifact_store import get_artifact_store
from .http_provider import HTTPProvider
from .firebase_provider import FirebaseProvider
from .s3_provider import S3Provider
from .gs_provider import GSProvider
from . import logs
| 28.88
| 77
| 0.591967
|
086a788b83deae56a16772a629310d3b84a228a0
| 570
|
py
|
Python
|
Server/server/model_inference/predictor.py
|
thaiminhpv/Doctor-Cyclop-Hackathon-2021
|
afb943f7d00ceccb408c895077517ddd06d87fd7
|
[
"MIT"
] | 6
|
2021-04-30T05:28:04.000Z
|
2022-03-21T14:50:43.000Z
|
Server/server/model_inference/predictor.py
|
thaiminhpv/Doctor-Cyclop-Hackathon-2021
|
afb943f7d00ceccb408c895077517ddd06d87fd7
|
[
"MIT"
] | null | null | null |
Server/server/model_inference/predictor.py
|
thaiminhpv/Doctor-Cyclop-Hackathon-2021
|
afb943f7d00ceccb408c895077517ddd06d87fd7
|
[
"MIT"
] | 1
|
2022-01-10T14:58:02.000Z
|
2022-01-10T14:58:02.000Z
|
import numpy as np
import pandas as pd
from server.model_inference.config import labels
from server.model_inference.core_model import get_model_prediction
from server.util.prediction_to_json import pandas_to_json
| 31.666667
| 90
| 0.764912
|
086b6939a15a14e2ba2c7a9bf78818444b385782
| 7,310
|
py
|
Python
|
extendPlugins/minecraft.py
|
f88af65a/XyzB0ts
|
21a557288877b24f337f16002d8bb72b155f2551
|
[
"MIT"
] | 4
|
2021-10-17T11:54:07.000Z
|
2022-03-18T13:10:11.000Z
|
extendPlugins/minecraft.py
|
f88af65a/XyzB0ts
|
21a557288877b24f337f16002d8bb72b155f2551
|
[
"MIT"
] | null | null | null |
extendPlugins/minecraft.py
|
f88af65a/XyzB0ts
|
21a557288877b24f337f16002d8bb72b155f2551
|
[
"MIT"
] | 1
|
2021-10-16T09:51:25.000Z
|
2021-10-16T09:51:25.000Z
|
import asyncio
import json
import socket
import time
from botsdk.util.BotPlugin import BotPlugin
from botsdk.util.Error import printTraceBack
| 36.733668
| 79
| 0.477291
|
086cc04c9a62e2ff1bedaaac23c04ca27ca1b7b4
| 2,264
|
py
|
Python
|
schedule/tests/scheduler_latest_test.py
|
conzty01/RA_Scheduler
|
6bf4931871aef4058d93917e62ceb31766e06b3a
|
[
"MIT"
] | 1
|
2021-03-31T05:26:17.000Z
|
2021-03-31T05:26:17.000Z
|
schedule/tests/scheduler_latest_test.py
|
conzty01/RA_Scheduler
|
6bf4931871aef4058d93917e62ceb31766e06b3a
|
[
"MIT"
] | 83
|
2018-03-19T18:32:34.000Z
|
2022-02-01T02:15:01.000Z
|
schedule/tests/scheduler_latest_test.py
|
conzty01/RA_Scheduler
|
6bf4931871aef4058d93917e62ceb31766e06b3a
|
[
"MIT"
] | 2
|
2021-01-15T22:16:00.000Z
|
2021-02-10T01:03:32.000Z
|
from schedule.scheduler4_0 import schedule
from schedule.ra_sched import Schedule, RA
from unittest.mock import MagicMock, patch
from datetime import date
import unittest
import random
if __name__ == "__main__":
unittest.main()
| 31.444444
| 79
| 0.651502
|
086ccdd01316fbb3c32c9928ed64ba2001cd4f5d
| 2,583
|
py
|
Python
|
main.py
|
brpaz/ulauncher-dockerhub
|
22e646bda40328373a4d90fa0aece2cac0187a42
|
[
"MIT"
] | 3
|
2020-09-04T07:56:47.000Z
|
2022-01-05T13:19:25.000Z
|
main.py
|
brpaz/ulauncher-dockerhub
|
22e646bda40328373a4d90fa0aece2cac0187a42
|
[
"MIT"
] | null | null | null |
main.py
|
brpaz/ulauncher-dockerhub
|
22e646bda40328373a4d90fa0aece2cac0187a42
|
[
"MIT"
] | null | null | null |
""" Main Module """
import logging
from ulauncher.api.client.Extension import Extension
from ulauncher.api.client.EventListener import EventListener
from ulauncher.api.shared.event import KeywordQueryEvent
from ulauncher.api.shared.item.ExtensionResultItem import ExtensionResultItem
from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction
from ulauncher.api.shared.action.DoNothingAction import DoNothingAction
from ulauncher.api.shared.action.HideWindowAction import HideWindowAction
from ulauncher.api.shared.action.OpenUrlAction import OpenUrlAction
from dockerhub.client import Client
logger = logging.getLogger(__name__)
if __name__ == '__main__':
DockerHubExtension().run()
| 34.905405
| 85
| 0.622145
|
086fc0967062337eeb0ecb19108dc1ab0e1d65e3
| 957
|
py
|
Python
|
geeklist_examples.py
|
juliengrenier/python-geeklist
|
52528b099e94e539c3451bfd2e741e563f0924e9
|
[
"MIT"
] | 1
|
2015-02-28T10:22:02.000Z
|
2015-02-28T10:22:02.000Z
|
geeklist_examples.py
|
juliengrenier/python-geeklist
|
52528b099e94e539c3451bfd2e741e563f0924e9
|
[
"MIT"
] | null | null | null |
geeklist_examples.py
|
juliengrenier/python-geeklist
|
52528b099e94e539c3451bfd2e741e563f0924e9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from geeklist.api import BaseGeeklistApi, GeekListOauthApi, GeekListUserApi
from access import consumer_info #please access.py which contains consumer_info = { 'key': YOUR_KEY, 'secret': secret}
BaseGeeklistApi.BASE_URL ='http://sandbox-api.geekli.st/v1'
oauth_api = GeekListOauthApi(consumer_info=consumer_info)
request_token = oauth_api.request_token(type='oob')
import webbrowser
webbrowser.open('http://sandbox.geekli.st/oauth/authorize?oauth_token=%s' % request_token['oauth_token'])
#read verifier
verifier = raw_input('Please enter verifier code>')
oauth_access_token = oauth_api.access_token(request_token=request_token, verifier=verifier)
access_token = {
'key':oauth_access_token['oauth_token'],
'secret':oauth_access_token['oauth_token_secret']
}
user_api = GeekListUserApi(consumer_info, access_token)
print user_api.user_info()
user_api.create_card(headline='First card created with the python wrapper API')
| 41.608696
| 118
| 0.797283
|
0871266d4d435da659b3d90a1e0729b53c28c39c
| 2,448
|
py
|
Python
|
game/gamesrc/objects/character.py
|
ranka47/battle-of-hogwarts
|
e7b2265ebe5661249dd28e472c49b74c1bbcdf23
|
[
"BSD-3-Clause"
] | 2
|
2019-02-24T00:20:47.000Z
|
2020-04-24T15:50:31.000Z
|
game/gamesrc/objects/character.py
|
ranka47/battle-of-hogwarts
|
e7b2265ebe5661249dd28e472c49b74c1bbcdf23
|
[
"BSD-3-Clause"
] | null | null | null |
game/gamesrc/objects/character.py
|
ranka47/battle-of-hogwarts
|
e7b2265ebe5661249dd28e472c49b74c1bbcdf23
|
[
"BSD-3-Clause"
] | 1
|
2019-01-05T15:51:37.000Z
|
2019-01-05T15:51:37.000Z
|
"""
Template for Characters
Copy this module up one level and name it as you like, then
use it as a template to create your own Character class.
To make new logins default to creating characters
of your new type, change settings.BASE_CHARACTER_TYPECLASS to point to
your new class, e.g.
settings.BASE_CHARACTER_TYPECLASS = "game.gamesrc.objects.mychar.MyChar"
Note that objects already created in the database will not notice
this change, you have to convert them manually e.g. with the
@typeclass command.
"""
from ev import Character as DefaultCharacter
from ev import Script
import random
| 38.25
| 79
| 0.663807
|
0873053669c5a9be614101baec79eda2eb276cb9
| 3,170
|
py
|
Python
|
lesson5/lesson5_task4.py
|
nekdfl/GB-python-developer
|
ca3f34bac2a92a930779f89357941bfa9634b3d4
|
[
"MIT"
] | null | null | null |
lesson5/lesson5_task4.py
|
nekdfl/GB-python-developer
|
ca3f34bac2a92a930779f89357941bfa9634b3d4
|
[
"MIT"
] | null | null | null |
lesson5/lesson5_task4.py
|
nekdfl/GB-python-developer
|
ca3f34bac2a92a930779f89357941bfa9634b3d4
|
[
"MIT"
] | null | null | null |
"""
( ) :
One 1
Two 2
Three 3
Four 4
, .
.
.
"""
if __name__ == "__main__":
# main()
short_variant()
| 26.864407
| 111
| 0.582334
|
087388739eebaad50c966ec02ed0312b37726c72
| 1,203
|
py
|
Python
|
w_cutscenes_test.py
|
wholetonegames/panda3d-ness-rpg
|
8d81e8418c1bc20706b5b3f4c0631fe9bd76a65e
|
[
"MIT"
] | 1
|
2021-02-01T03:09:28.000Z
|
2021-02-01T03:09:28.000Z
|
w_cutscenes_test.py
|
wholetonegames/panda3d-ness-rpg
|
8d81e8418c1bc20706b5b3f4c0631fe9bd76a65e
|
[
"MIT"
] | null | null | null |
w_cutscenes_test.py
|
wholetonegames/panda3d-ness-rpg
|
8d81e8418c1bc20706b5b3f4c0631fe9bd76a65e
|
[
"MIT"
] | null | null | null |
from w_i_stage import IStage
from direct.interval.IntervalGlobal import Sequence, Func, Wait
| 25.0625
| 63
| 0.596841
|
0874abf4b1ea7884b6edfbac6a754d49e5cc5678
| 154
|
py
|
Python
|
lichee/utils/__init__.py
|
Tencent/Lichee
|
7653becd6fbf8b0715f788af3c0507c012be08b4
|
[
"Apache-2.0"
] | 91
|
2021-10-30T02:25:05.000Z
|
2022-03-28T06:51:52.000Z
|
lichee/utils/__init__.py
|
zhaijunyu/Lichee
|
7653becd6fbf8b0715f788af3c0507c012be08b4
|
[
"Apache-2.0"
] | 1
|
2021-12-17T09:30:25.000Z
|
2022-03-05T12:30:13.000Z
|
lichee/utils/__init__.py
|
zhaijunyu/Lichee
|
7653becd6fbf8b0715f788af3c0507c012be08b4
|
[
"Apache-2.0"
] | 17
|
2021-11-04T07:50:23.000Z
|
2022-03-24T14:24:11.000Z
|
# -*- coding: utf-8 -*-
"""
"""
from . import convertor
from . import model_loader
from . import storage
from . import parallel
from . import logging
| 15.4
| 26
| 0.688312
|
08757365d19fb16259355c3f4a0bc7a45ccc8fde
| 1,808
|
py
|
Python
|
networkunit/models/backends/network_model.py
|
russelljjarvis/NetworkUnit
|
32179371d3a0ba354e6637cf4f97ba70522d4054
|
[
"BSD-3-Clause"
] | null | null | null |
networkunit/models/backends/network_model.py
|
russelljjarvis/NetworkUnit
|
32179371d3a0ba354e6637cf4f97ba70522d4054
|
[
"BSD-3-Clause"
] | 1
|
2019-11-15T22:56:20.000Z
|
2019-11-15T22:56:20.000Z
|
networkunit/models/backends/network_model.py
|
russelljjarvis/NetworkUnit
|
32179371d3a0ba354e6637cf4f97ba70522d4054
|
[
"BSD-3-Clause"
] | null | null | null |
"""NeuronUnit model class for reduced neuron models"""
import numpy as np
from neo.core import AnalogSignal
import quantities as pq
import neuronunit.capabilities as cap
import neuronunit.models as mod
import neuronunit.capabilities.spike_functions as sf
from neuronunit.models import backends
from generic_network import net_sim_runner, get_dummy_synapses
| 35.45098
| 131
| 0.68031
|
0876136eb46ef1d30f09dbd0eff572dd1e4a0144
| 28,812
|
py
|
Python
|
generator.py
|
jimstorch/DGGen
|
cdecbc4bfa491a634aac370de05b21bb6f6cf8e1
|
[
"Apache-2.0"
] | 19
|
2016-12-04T12:43:43.000Z
|
2022-01-25T01:00:24.000Z
|
generator.py
|
jimstorch/DGGen
|
cdecbc4bfa491a634aac370de05b21bb6f6cf8e1
|
[
"Apache-2.0"
] | 9
|
2017-01-04T16:33:00.000Z
|
2021-11-16T06:02:16.000Z
|
generator.py
|
jimstorch/DGGen
|
cdecbc4bfa491a634aac370de05b21bb6f6cf8e1
|
[
"Apache-2.0"
] | 7
|
2016-12-04T12:43:47.000Z
|
2022-02-04T13:10:58.000Z
|
#!/usr/bin/env python3
import argparse
import csv
import datetime
import json
import logging
import os
import sys
import warnings
from collections import defaultdict
from copy import copy
from dataclasses import dataclass
from itertools import islice, cycle, chain
from random import randint, shuffle, choice, sample
from textwrap import shorten, wrap
from typing import List, Any, Dict, Tuple
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
from reportlab.pdfgen import canvas
script_name = os.path.basename(sys.argv[0])
description = """
Generate characters for the Delta Green pen-and-paper roleplaying game from Arc Dream Publishing.
"""
__version__ = "1.4"
logger = logging.getLogger(script_name)
TEXT_COLOR = (0, 0.1, 0.5)
DEFAULT_FONT = "Special Elite"
MONTHS = ("JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC")
SUGGESTED_BONUS_CHANCE = 75
def generate_label(profession):
return ", ".join(
e
for e in [
profession.get("label", ""),
profession.get("employer", ""),
profession.get("division", ""),
]
if e
)
def get_options():
"""Get options and arguments from argv string."""
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"-v",
"--verbosity",
action="count",
default=0,
help="specify up to three times to increase verbosity, "
"i.e. -v to see warnings, -vv for information messages, or -vvv for debug messages.",
)
parser.add_argument("-V", "--version", action="version", version=__version__)
parser.add_argument(
"-o",
"--output",
action="store",
default=f"DeltaGreenPregen-{datetime.datetime.now() :%Y-%m-%d-%H-%M}.pdf",
help="Output PDF file. Defaults to %(default)s.",
)
parser.add_argument(
"-t", "--type", action="store", help=f"Select single profession to generate."
)
parser.add_argument("-l", "--label", action="store", help="Override profession label.")
parser.add_argument(
"-c",
"--count",
type=int,
action="store",
help="Generate this many characters of each profession.",
)
parser.add_argument(
"-e", "--employer", action="store", help="Set employer for all generated characters."
)
parser.add_argument(
"-u",
"--unequipped",
action="store_false",
dest="equip",
help="Don't generate equipment.",
default=True,
)
data = parser.add_argument_group(title="Data", description="Data file locations")
data.add_argument(
"--professions",
action="store",
default="data/professions.json",
help="Data file for professions - defaults to %(default)s",
)
return parser.parse_args()
def init_logger(verbosity, stream=sys.stdout):
"""Initialize logger and warnings according to verbosity argument.
Verbosity levels of 0-3 supported."""
is_not_debug = verbosity <= 2
level = (
[logging.ERROR, logging.WARNING, logging.INFO][verbosity] if is_not_debug else logging.DEBUG
)
log_format = (
"%(message)s"
if is_not_debug
else "%(asctime)s %(levelname)-8s %(name)s %(module)s.py:%(funcName)s():%(lineno)d %(message)s"
)
logging.basicConfig(level=level, format=log_format, stream=stream)
if is_not_debug:
warnings.filterwarnings("ignore")
if __name__ == "__main__":
sys.exit(main())
| 34.797101
| 103
| 0.530508
|
087670710e46b9499b04f22d8a01fa0767bf4b47
| 9,093
|
py
|
Python
|
tests/test_remote.py
|
bcyran/philipstv
|
6037724d5fab0b72265c2de2c0441a64f6e00c00
|
[
"MIT"
] | null | null | null |
tests/test_remote.py
|
bcyran/philipstv
|
6037724d5fab0b72265c2de2c0441a64f6e00c00
|
[
"MIT"
] | null | null | null |
tests/test_remote.py
|
bcyran/philipstv
|
6037724d5fab0b72265c2de2c0441a64f6e00c00
|
[
"MIT"
] | null | null | null |
from typing import Union
from unittest.mock import Mock, create_autospec
import pytest
from pytest import MonkeyPatch
from philipstv import PhilipsTVAPI, PhilipsTVPairer, PhilipsTVRemote, PhilipsTVRemoteError
from philipstv.model import (
AllChannels,
AmbilightColor,
AmbilightColors,
AmbilightLayer,
AmbilightPower,
AmbilightPowerValue,
AmbilightTopology,
Application,
ApplicationComponent,
ApplicationIntent,
Applications,
Channel,
ChannelID,
ChannelList,
ChannelShort,
CurrentChannel,
CurrentVolume,
DeviceInfo,
InputKey,
InputKeyValue,
PowerState,
PowerStateValue,
SetChannel,
Volume,
)
CHANNELS = AllChannels(
version=1,
id="all",
list_type="MixedSources",
medium="mixed",
operator="OPER",
install_country="Poland",
channel=[
Channel(
ccid=35,
preset="1",
name="Polsat HD",
onid=1537,
tsid=24,
sid=2403,
service_type="audio_video",
type="DVB_C",
logo_version=33,
),
Channel(
ccid=40,
preset="3",
name="TVN HD",
onid=666,
tsid=24,
sid=2403,
service_type="audio_video",
type="DVB_C",
logo_version=33,
),
],
)
APPLICATION_SPOTIFY = Application(
intent=ApplicationIntent(
component=ApplicationComponent(
package_name="com.spotify.tv.android",
class_name="com.spotify.tv.android.SpotifyTVActivity",
),
action="android.intent.action.MAIN",
),
label="Spotify",
order=0,
id="com.spotify.tv.android.SpotifyTVActivity-com.spotify.tv.android",
type="app",
)
APPLICATION_NETFLIX = Application(
intent=ApplicationIntent(
component=ApplicationComponent(
package_name="com.netflix.ninja",
class_name="com.netflix.ninja.MainActivity",
),
action="android.intent.action.MAIN",
),
label="Netflix",
order=0,
id="com.netflix.ninja.MainActivity-com.netflix.ninja",
type="app",
)
APPLICATIONS = Applications(
version=0,
applications=[APPLICATION_SPOTIFY, APPLICATION_NETFLIX],
)
| 29.144231
| 94
| 0.698119
|
0879ba08e89fa5f242f50ddb01acf847e7896d29
| 9,612
|
py
|
Python
|
a2t/src/test_runner.py
|
syeda-khurrath/fabric8-analytics-common
|
421f7e27869c5695ed73b51e6422e097aba00108
|
[
"Apache-2.0"
] | null | null | null |
a2t/src/test_runner.py
|
syeda-khurrath/fabric8-analytics-common
|
421f7e27869c5695ed73b51e6422e097aba00108
|
[
"Apache-2.0"
] | 4
|
2019-05-20T08:27:47.000Z
|
2019-05-20T08:29:57.000Z
|
a2t/src/test_runner.py
|
codeready-analytics/fabric8-analytics-common
|
a763c5534d601f2f40a0f02c02914c49ea23669d
|
[
"Apache-2.0"
] | 1
|
2020-10-05T21:12:44.000Z
|
2020-10-05T21:12:44.000Z
|
"""Implementation of benchmarks.
Copyright (c) 2019 Red Hat Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
from random import randint
from fastlog import log
from time import time
from queue import Queue
from threading import Thread
from report_generator import generate_csv_report
from component_generator import ComponentGenerator
from setup import parse_tags
# directory containing test results
RESULT_DIRECTORY = "test_results"
def check_number_of_results(queue_size, component_analysis_count, stack_analysis_count):
"""Check if we really got the same number of results as expected.
When the server respond by any HTTP error code (4xx, 5xx), the results
are NOT stored in the queue. This means that number of results stored
in the queue might be less than number of threads set up by user via
CLI parameters in certain situations. This function check this situation.
"""
log.info("queue size: {size}".format(size=queue_size))
expected = component_analysis_count + 2 * stack_analysis_count
if queue_size != expected:
log.warning("Warning: {expected} results expected, but only {got} is presented".format(
expected=expected, got=queue_size))
log.warning("This means that {n} analysis ends with error or exception".format(
n=expected - queue_size))
def prepare_component_generators(python_payload, maven_payload, npm_payload):
"""Prepare all required component generators for selected payload types."""
component_generator = ComponentGenerator()
g_python = component_generator.generator_for_ecosystem("pypi")
g_maven = component_generator.generator_for_ecosystem("maven")
g_npm = component_generator.generator_for_ecosystem("npm")
generators = []
if python_payload:
generators.append(g_python)
if maven_payload:
generators.append(g_maven)
if npm_payload:
generators.append(g_npm)
return generators
def initialize_generators(generators):
"""Initialize the generators randomly so we don't start from the 1st item."""
for i in range(randint(10, 100)):
for g in generators:
next(g)
def component_analysis_benchmark(queue, threads, component_analysis, thread_count,
python_payload, maven_payload, npm_payload):
"""Component analysis benchmark."""
generators = prepare_component_generators(python_payload, maven_payload, npm_payload)
initialize_generators(generators)
for t in range(thread_count):
g = generators[randint(0, len(generators) - 1)]
ecosystem, component, version = next(g)
with log.indent():
log.info("Component analysis for E/P/V {} {} {}".format(ecosystem, component, version))
t = Thread(target=component_analysis.start,
args=(t, ecosystem, component, version, queue))
t.start()
threads.append(t)
# skip some items
for i in range(randint(5, 25)):
next(g)
def stack_analysis_benchmark(queue, threads, stack_analysis, thread_count,
python_payload, maven_payload, npm_payload):
"""Stack analysis benchmark."""
# TODO: read automagically from the filelist
manifests = (
("maven", "clojure_1_6_0.xml"),
("maven", "clojure_1_7_0.xml"),
("maven", "clojure_1_8_0.xml"),
("maven", "clojure_junit.xml"),
("pypi", "click_6_star.txt"),
("pypi", "array_split.txt"),
("pypi", "fastlog_urllib_requests.txt"),
("pypi", "requests_latest.txt"),
("pypi", "numpy_latest.txt"),
("pypi", "flask_latest.txt"),
("pypi", "scipy_latest.txt"),
("pypi", "pygame_latest.txt"),
("pypi", "pyglet_latest.txt"),
("pypi", "dash_latest.txt"),
("pypi", "pudb_latest.txt"),
("pypi", "pytest_latest.txt"),
("pypi", "numpy_1_11_0.txt"),
("pypi", "numpy_1_12_0.txt"),
("pypi", "numpy_1_16_2.txt"),
("pypi", "numpy_1_16_3.txt"),
("pypi", "numpy_scipy.txt"),
("pypi", "pytest_2_0_0.txt"),
("pypi", "pytest_2_0_1.txt"),
("pypi", "pytest_3_2_2.txt"),
("pypi", "requests_2_20_0.txt"),
("pypi", "requests_2_20_1.txt"),
("pypi", "requests_2_21_0.txt"),
("pypi", "scipy_1_1_0.txt"),
("pypi", "scipy_1_2_0.txt"),
("pypi", "scipy_1_2_1.txt"),
("npm", "array.json"),
("npm", "dependency_array.json"),
("npm", "dependency_emitter_component.json"),
("npm", "dependency_jquery.json"),
("npm", "dependency_jquery_react.json"),
("npm", "dependency_lodash.json"),
("npm", "dependency_lodash_react_jquery.json"),
("npm", "dependency_react.json"),
("npm", "dependency_to_function.json"),
("npm", "dependency_to_function_vue_array.json"),
("npm", "dependency_underscore.json"),
("npm", "dependency_underscore_react_jquery.json"),
("npm", "dependency_vue.json"),
("npm", "dependency_vue_to_function.json"),
("npm", "empty.json"),
("npm", "jquery.json"),
("npm", "lodash.json"),
("npm", "mocha.json"),
("npm", "no_requirements.json"),
("npm", "underscore.json"),
("npm", "wisp.json"),
)
for t in range(thread_count):
manifest_idx = randint(0, len(manifests) - 1)
manifest = manifests[manifest_idx]
with log.indent():
log.info("Stack analysis")
ecosystem = manifest[0]
manifest_file = manifest[1]
t = Thread(target=stack_analysis.start,
args=(t, ecosystem, manifest_file, queue))
t.start()
threads.append(t)
def wait_for_all_threads(threads):
"""Wait for all threads to finish."""
log.info("Waiting for all threads to finish")
for t in threads:
t.join()
log.success("Done")
def run_test(cfg, test, i, component_analysis, stack_analysis):
"""Run one selected test."""
test_name = test["Name"]
log.info("Starting test #{n} with name '{desc}'".format(n=i, desc=test_name))
with log.indent():
start = time()
threads = []
queue = Queue()
with log.indent():
component_analysis_count = int(test["Component analysis"])
stack_analysis_count = int(test["Stack analysis"])
python_payload = test["Python payload"] in ("Yes", "yes")
maven_payload = test["Maven payload"] in ("Yes", "yes")
npm_payload = test["NPM payload"] in ("Yes", "yes")
component_analysis_benchmark(queue, threads, component_analysis,
component_analysis_count,
python_payload, maven_payload, npm_payload)
stack_analysis_benchmark(queue, threads, stack_analysis,
stack_analysis_count,
python_payload, maven_payload, npm_payload)
wait_for_all_threads(threads)
queue_size = queue.qsize()
check_number_of_results(queue_size, component_analysis_count, stack_analysis_count)
end = time()
# TODO: use better approach to join paths
filename = RESULT_DIRECTORY + "/" + test_name.replace(" ", "_") + ".csv"
log.info("Generating test report into file '{filename}'".format(filename=filename))
generate_csv_report(queue, test, start, end, end - start, filename)
def run_all_loaded_tests(cfg, tests, component_analysis, stack_analysis):
"""Run all tests read from CSV file."""
i = 1
for test in tests:
run_test(cfg, test, i, component_analysis, stack_analysis)
i += 1
def run_tests_with_tags(cfg, tests, tags, component_analysis, stack_analysis):
"""Run tests read from CSV file that are marged by any of tags provided in tags parameter."""
i = 1
for test in tests:
test_tags = parse_tags(test["Tags"])
test_name = test["Name"]
if tags <= test_tags:
run_test(cfg, test, i, component_analysis, stack_analysis)
i += 1
else:
log.info("Skipping test #{n} with name '{desc}'".format(n=i, desc=test_name))
def no_tests(tests):
"""Predicate for number of tests."""
return not tests or len(tests) == 0
def start_tests(cfg, tests, tags, component_analysis, stack_analysis):
"""Start all tests using the already loaded configuration."""
log.info("Run tests")
with log.indent():
if no_tests(tests):
log.error("No tests loaded!")
sys.exit(-1)
if len(tests) == 1:
log.success("Loaded 1 test")
else:
log.success("Loaded {n} tests".format(n=len(tests)))
if not tags:
run_all_loaded_tests(cfg, tests, component_analysis, stack_analysis)
else:
run_tests_with_tags(cfg, tests, tags, component_analysis, stack_analysis)
| 37.546875
| 99
| 0.634415
|
087dd6bb53dc8a5a49168182e90d10a64dea2f64
| 159
|
py
|
Python
|
bus_system/apps/bus/admin.py
|
pygabo/bus_system
|
ffb76d3414e058286799f3df1cb551b26286e7c3
|
[
"MIT"
] | null | null | null |
bus_system/apps/bus/admin.py
|
pygabo/bus_system
|
ffb76d3414e058286799f3df1cb551b26286e7c3
|
[
"MIT"
] | null | null | null |
bus_system/apps/bus/admin.py
|
pygabo/bus_system
|
ffb76d3414e058286799f3df1cb551b26286e7c3
|
[
"MIT"
] | null | null | null |
# Core Django imports
from django.contrib import admin
# Imports from my apps
from bus_system.apps.bus.models import BusModel
admin.site.register(BusModel)
| 19.875
| 47
| 0.805031
|
087e3e81767ebb79be98cf41ccb71262d3691e12
| 2,454
|
py
|
Python
|
jocular/calcs.py
|
MartinCooke/jocular
|
635816d4ef6aa6ea75187137e25386dad2d551e9
|
[
"MIT"
] | 6
|
2021-03-21T16:46:44.000Z
|
2021-11-27T14:07:06.000Z
|
jocular/calcs.py
|
MartinCooke/jocular
|
635816d4ef6aa6ea75187137e25386dad2d551e9
|
[
"MIT"
] | null | null | null |
jocular/calcs.py
|
MartinCooke/jocular
|
635816d4ef6aa6ea75187137e25386dad2d551e9
|
[
"MIT"
] | null | null | null |
'''Various astro calcs mainly based on Meuss.
'''
import numpy as np
import math
import time
from datetime import datetime
| 26.106383
| 104
| 0.600652
|
087e86827c6cc73f03d6554fcf8f36b2777a11b4
| 1,221
|
py
|
Python
|
win/python/CAO/calcClient.py
|
kioto/ORiN2Sample
|
a7a9007b696fdd3ab29f1ec5cededc59b232fae2
|
[
"MIT"
] | null | null | null |
win/python/CAO/calcClient.py
|
kioto/ORiN2Sample
|
a7a9007b696fdd3ab29f1ec5cededc59b232fae2
|
[
"MIT"
] | null | null | null |
win/python/CAO/calcClient.py
|
kioto/ORiN2Sample
|
a7a9007b696fdd3ab29f1ec5cededc59b232fae2
|
[
"MIT"
] | null | null | null |
import win32com.client
import time
if __name__ == '__main__':
cc = CalcClient()
cc.calc('ADD', 123, 567)
cc.calc('SUB', 123, 567)
cc.calc('MUL', 123, 567)
cc.calc('DIV', 123, 567)
| 29.780488
| 73
| 0.564292
|