hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f9de9cbd50628088d488c6c5b4c6b194981f73ef | 3,043 | py | Python | imap.py | cynsky/voyage_data_emails | 3cc3b1364248dd3fed56e4c9f4c3ad619c6650ae | [
"MIT"
] | 1 | 2020-09-22T16:24:31.000Z | 2020-09-22T16:24:31.000Z | imap.py | cynsky/voyage_data_emails | 3cc3b1364248dd3fed56e4c9f4c3ad619c6650ae | [
"MIT"
] | null | null | null | imap.py | cynsky/voyage_data_emails | 3cc3b1364248dd3fed56e4c9f4c3ad619c6650ae | [
"MIT"
] | null | null | null | """
Read voyage data emails.
"""
import email
from imaplib import IMAP4_SSL
import logging
OK = 'OK'
logger = logging.getLogger(__name__)
| 20.019737 | 73 | 0.692737 |
f9de9f006fc9afa79a63265eef2873fd5e7b5f5b | 1,991 | py | Python | 2021/python/day3.py | majormunky/advent_of_code | 4cccd7f3879e28e465bbc39176659bdd52bd70d6 | [
"MIT"
] | null | null | null | 2021/python/day3.py | majormunky/advent_of_code | 4cccd7f3879e28e465bbc39176659bdd52bd70d6 | [
"MIT"
] | null | null | null | 2021/python/day3.py | majormunky/advent_of_code | 4cccd7f3879e28e465bbc39176659bdd52bd70d6 | [
"MIT"
] | 1 | 2020-12-04T06:12:01.000Z | 2020-12-04T06:12:01.000Z | from common import get_file_contents
test_data = [
"00100",
"11110",
"10110",
"10111",
"10101",
"01111",
"00111",
"11100",
"10000",
"11001",
"00010",
"01010",
]
if __name__ == '__main__':
print("Part 1: ", p1())
print("Part 2: ", p2())
| 20.316327 | 72 | 0.658463 |
f9e18afe3ddb7b565b697f03187cc311b80b604e | 670 | py | Python | dallinger/redis_utils.py | Dallinger/Dallinger | c3acf1375391ef8cb702641638bf5a5008aa9be3 | [
"MIT"
] | 100 | 2016-09-07T03:55:36.000Z | 2022-02-28T02:20:10.000Z | dallinger/redis_utils.py | Dallinger/Dallinger | c3acf1375391ef8cb702641638bf5a5008aa9be3 | [
"MIT"
] | 3,457 | 2016-09-05T23:21:31.000Z | 2022-03-31T19:11:31.000Z | dallinger/redis_utils.py | Dallinger/Dallinger | c3acf1375391ef8cb702641638bf5a5008aa9be3 | [
"MIT"
] | 53 | 2016-10-03T07:24:34.000Z | 2021-10-20T20:42:38.000Z | import os
import redis
from urllib.parse import urlparse
def connect_to_redis(url=None):
"""Return a connection to Redis.
If a URL is supplied, it will be used, otherwise an environment variable
is checked before falling back to a default.
Since we are generally running on Heroku, and configuring SSL certificates
is challenging, we disable cert requirements on secure connections.
"""
redis_url = url or os.getenv("REDIS_URL", "redis://localhost:6379")
connection_args = {"url": redis_url}
if urlparse(redis_url).scheme == "rediss":
connection_args["ssl_cert_reqs"] = None
return redis.from_url(**connection_args)
| 30.454545 | 78 | 0.723881 |
f9e1ea0fa4cb837c6d62e27ef66230461e2beb01 | 139 | py | Python | libkludge/generate/this_access.py | zhangxiao6776/kludge | 17a561f4b15399edd8175c883f8410a1b23c4d90 | [
"BSD-3-Clause"
] | null | null | null | libkludge/generate/this_access.py | zhangxiao6776/kludge | 17a561f4b15399edd8175c883f8410a1b23c4d90 | [
"BSD-3-Clause"
] | null | null | null | libkludge/generate/this_access.py | zhangxiao6776/kludge | 17a561f4b15399edd8175c883f8410a1b23c4d90 | [
"BSD-3-Clause"
] | 2 | 2017-12-01T20:44:14.000Z | 2021-08-21T21:47:04.000Z | #
# Copyright (c) 2010-2016, Fabric Software Inc. All rights reserved.
#
| 15.444444 | 68 | 0.676259 |
f9e2a6cf566ceeaab2c7f9874c63accbd13dbe53 | 88 | py | Python | project/server/main/__init__.py | ardikabs/dnsmanager | 4d2f302ea9f54fd4d5416328dc46a1c47b573e5b | [
"MIT"
] | 1 | 2019-01-15T10:33:04.000Z | 2019-01-15T10:33:04.000Z | project/server/main/__init__.py | ardikabs/dnsmanager | 4d2f302ea9f54fd4d5416328dc46a1c47b573e5b | [
"MIT"
] | null | null | null | project/server/main/__init__.py | ardikabs/dnsmanager | 4d2f302ea9f54fd4d5416328dc46a1c47b573e5b | [
"MIT"
] | null | null | null |
from . import modules | 17.6 | 35 | 0.693182 |
f9e3c2c814e617eb1250bbe52b5026a15b1d2778 | 366 | py | Python | Python by Harish/Class 1/Loops.py | kai92a/Learning_Python | 5195aeb950e21150838c44d7c6af87cd86d31301 | [
"MIT"
] | null | null | null | Python by Harish/Class 1/Loops.py | kai92a/Learning_Python | 5195aeb950e21150838c44d7c6af87cd86d31301 | [
"MIT"
] | null | null | null | Python by Harish/Class 1/Loops.py | kai92a/Learning_Python | 5195aeb950e21150838c44d7c6af87cd86d31301 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 2 03:02:57 2021
@author: sgaa_
"""
print ("for loop")
for i in range (1,20):
print (i)
print ("while loop")
i=1
while i<12:
print (i)
i+=1
if 1 in range(2,5):
print ("Yes 1 is in the range")
elif 3 in range (3,5):
print ("Yes 1 is in range 2")
else:
print ("1 is not in both the ranges")
| 14.64 | 41 | 0.565574 |
f9e4232dbd5470195e751b3cfb7348b26305a4d1 | 12,684 | py | Python | src/utils/datasets.py | gorjanradevski/siamese_multi_head_attention | fcbfe21f284bf98a1d0e725a9e6f2df19363b4a5 | [
"MIT"
] | 2 | 2020-06-11T03:03:35.000Z | 2022-01-08T07:15:46.000Z | src/utils/datasets.py | gorjanradevski/multimodal_representations_deep_learning | fcbfe21f284bf98a1d0e725a9e6f2df19363b4a5 | [
"MIT"
] | null | null | null | src/utils/datasets.py | gorjanradevski/multimodal_representations_deep_learning | fcbfe21f284bf98a1d0e725a9e6f2df19363b4a5 | [
"MIT"
] | null | null | null | import json
import re
import os
import logging
from abc import ABC
from typing import Dict, Any, List, Tuple
from utils.constants import pascal_train_size, pascal_val_size
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def preprocess_caption(caption: str) -> str:
"""Basic method used around all classes
Performs pre-processing of the caption in the following way:
1. Converts the whole caption to lower case.
2. Removes all characters which are not letters.
Args:
caption: A list of words contained in the caption.
Returns:
"""
caption = caption.lower()
caption = re.sub("[^a-z' ]+", "", caption)
caption = re.sub("\s+", " ", caption).strip() # NOQA
caption = caption.strip()
return caption
class TrainCocoDataset(BaseCocoDataset):
# Adapted for working with the Microsoft COCO dataset.
def __init__(self, images_path: str, json_path: str):
"""Creates a dataset object.
Args:
images_path: Path where the images are located.
json_path: Path to the json file where the mappings are indicated as well
as the captions.
"""
super().__init__(images_path, json_path)
logger.info("Class variables set...")
class ValCocoDataset(BaseCocoDataset):
# Adapted for working with the Microsoft COCO dataset.
def __init__(self, images_path: str, json_path: str, val_size: int = None):
"""Creates a dataset object.
Args:
images_path: Path where the images are located.
json_path: Path to the json file where the mappings are indicated as well
as the captions.
val_size: The size of the validation set.
"""
super().__init__(images_path, json_path)
self.val_size = val_size
class FlickrDataset:
# Adapted for working with the Flickr8k and Flickr30k dataset.
def get_data(self, images_file_path: str):
image_paths, captions = self.get_data_wrapper(
images_file_path, self.img_path_caption, self.images_path
)
return image_paths, captions
class PascalSentencesDataset:
# Adapted for working with the Pascal sentences dataset.
def __init__(self, images_path, texts_path):
self.category_image_path_captions = self.parse_captions_filenames(
texts_path, images_path
)
def get_train_data(self):
img_paths, cap = self.get_data_wrapper(
self.category_image_path_captions, "train"
)
return img_paths, cap
def get_val_data(self):
img_paths, cap = self.get_data_wrapper(self.category_image_path_captions, "val")
return img_paths, cap
def get_test_data(self):
img_paths, cap = self.get_data_wrapper(
self.category_image_path_captions, "test"
)
return img_paths, cap
| 34.467391 | 88 | 0.601861 |
dda43962342edb0739a7efdbd01b8d80c87c5e19 | 26,474 | py | Python | libica/openapi/libgds/api/volumes_api.py | umccr-illumina/libica | 916d27eea499f29bee590268b84208effb0cc576 | [
"MIT"
] | null | null | null | libica/openapi/libgds/api/volumes_api.py | umccr-illumina/libica | 916d27eea499f29bee590268b84208effb0cc576 | [
"MIT"
] | 4 | 2021-11-15T10:47:51.000Z | 2022-02-22T04:43:20.000Z | libica/openapi/libgds/api/volumes_api.py | umccr-illumina/libica | 916d27eea499f29bee590268b84208effb0cc576 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Genomic Data Store Service
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from libica.openapi.libgds.api_client import ApiClient
from libica.openapi.libgds.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
| 49.116883 | 261 | 0.619136 |
dda4affe6b2847c17389112e2763a725bc4f7b5b | 5,545 | py | Python | jaxtorch/image.py | GallagherCommaJack/jaxtorch | 3bc6785d781f12fabf3a436d9cfc0b839ebf5aec | [
"MIT"
] | null | null | null | jaxtorch/image.py | GallagherCommaJack/jaxtorch | 3bc6785d781f12fabf3a436d9cfc0b839ebf5aec | [
"MIT"
] | null | null | null | jaxtorch/image.py | GallagherCommaJack/jaxtorch | 3bc6785d781f12fabf3a436d9cfc0b839ebf5aec | [
"MIT"
] | null | null | null | import math
from typing import Tuple
import jax
import jax.numpy as jnp
import numpy as np
from einops import repeat
upsample_arrays = dict(
lanczos3=np.array(
[
0.0073782638646662235,
0.030112292617559433,
-0.06799723953008652,
-0.13327467441558838,
0.2710106074810028,
0.8927707076072693,
0.8927707672119141,
0.2710106074810028,
-0.13327467441558838,
-0.06799724698066711,
0.03011229634284973,
0.007378263399004936,
],
),
cubic=np.array(
[
-0.0234375,
-0.0703125,
0.2265625,
0.8671875,
0.8671875,
0.2265625,
-0.0703125,
-0.0234375,
],
),
linear=np.array([0.25, 0.75, 0.75, 0.25]),
)
downsample_arrays = dict(
lanczos3=np.array(
[
0.003689131001010537,
0.015056144446134567,
-0.03399861603975296,
-0.066637322306633,
0.13550527393817902,
0.44638532400131226,
0.44638532400131226,
0.13550527393817902,
-0.066637322306633,
-0.03399861603975296,
0.015056144446134567,
0.003689131001010537,
]
),
cubic=np.array(
[
-0.01171875,
-0.03515625,
0.11328125,
0.43359375,
0.43359375,
0.11328125,
-0.03515625,
-0.01171875,
]
),
linear=np.array([0.125, 0.375, 0.375, 0.125]),
)
| 25.671296 | 85 | 0.544274 |
dda604bdbe931306a411dfabae424401c18dc54e | 1,210 | py | Python | 1-image2ascii/image2ascii.py | dourgey/Python_Exercise | f41d69033b76d2fea3671f751e936cb804742b57 | [
"MIT"
] | null | null | null | 1-image2ascii/image2ascii.py | dourgey/Python_Exercise | f41d69033b76d2fea3671f751e936cb804742b57 | [
"MIT"
] | null | null | null | 1-image2ascii/image2ascii.py | dourgey/Python_Exercise | f41d69033b76d2fea3671f751e936cb804742b57 | [
"MIT"
] | null | null | null | # Author: @dourgey
# Create Time: 2019/12/27: 18:06
#
# argparse
#
# PILLOW
#
import argparse
import os
import sys
from PIL import Image
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--image", help="~")
parser.add_argument("-f", "--file", help="~")
args = parser.parse_args()
if not os.path.exists(args.image): #
print("~")
sys.exit(0)
img_path = args.image
im = Image.open(img_path)
width, height = im.size
t_height = int(height / width * 100 / 2.5)
im = im.resize((100, t_height), Image.ANTIALIAS)
f = open(args.file, "w") #
#
for i in range(t_height):
for j in range(100):
r, g, b = im.getpixel((j, i))
f.write(
get_char(r, g, b)
)
f.write("\n")
f.close()
print("~")
| 22 | 107 | 0.629752 |
dda62a60e83b2ac0fa35757329d616e26ea6b265 | 6,536 | py | Python | python/ray/serialization.py | delding/ray | 8532ba4272556aa24b5e0c7d275c7b383815c022 | [
"Apache-2.0"
] | null | null | null | python/ray/serialization.py | delding/ray | 8532ba4272556aa24b5e0c7d275c7b383815c022 | [
"Apache-2.0"
] | null | null | null | python/ray/serialization.py | delding/ray | 8532ba4272556aa24b5e0c7d275c7b383815c022 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import ray.numbuf
import ray.pickling as pickling
def check_serializable(cls):
"""Throws an exception if Ray cannot serialize this class efficiently.
Args:
cls (type): The class to be serialized.
Raises:
Exception: An exception is raised if Ray cannot serialize this class
efficiently.
"""
if is_named_tuple(cls):
# This case works.
return
if not hasattr(cls, "__new__"):
raise Exception("The class {} does not have a '__new__' attribute, and is "
"probably an old-style class. We do not support this. "
"Please either make it a new-style class by inheriting "
"from 'object', or use "
"'ray.register_class(cls, pickle=True)'. However, note "
"that pickle is inefficient.".format(cls))
try:
obj = cls.__new__(cls)
except:
raise Exception("The class {} has overridden '__new__', so Ray may not be "
"able to serialize it efficiently. Try using "
"'ray.register_class(cls, pickle=True)'. However, note "
"that pickle is inefficient.".format(cls))
if not hasattr(obj, "__dict__"):
raise Exception("Objects of the class {} do not have a `__dict__` "
"attribute, so Ray cannot serialize it efficiently. Try "
"using 'ray.register_class(cls, pickle=True)'. However, "
"note that pickle is inefficient.".format(cls))
if hasattr(obj, "__slots__"):
raise Exception("The class {} uses '__slots__', so Ray may not be able to "
"serialize it efficiently. Try using "
"'ray.register_class(cls, pickle=True)'. However, note "
"that pickle is inefficient.".format(cls))
# This field keeps track of a whitelisted set of classes that Ray will
# serialize.
whitelisted_classes = {}
classes_to_pickle = set()
custom_serializers = {}
custom_deserializers = {}
def class_identifier(typ):
"""Return a string that identifies this type."""
return "{}.{}".format(typ.__module__, typ.__name__)
def is_named_tuple(cls):
"""Return True if cls is a namedtuple and False otherwise."""
b = cls.__bases__
if len(b) != 1 or b[0] != tuple:
return False
f = getattr(cls, "_fields", None)
if not isinstance(f, tuple):
return False
return all(type(n) == str for n in f)
def add_class_to_whitelist(cls, pickle=False, custom_serializer=None,
custom_deserializer=None):
"""Add cls to the list of classes that we can serialize.
Args:
cls (type): The class that we can serialize.
pickle (bool): True if the serialization should be done with pickle. False
if it should be done efficiently with Ray.
custom_serializer: This argument is optional, but can be provided to
serialize objects of the class in a particular way.
custom_deserializer: This argument is optional, but can be provided to
deserialize objects of the class in a particular way.
"""
class_id = class_identifier(cls)
whitelisted_classes[class_id] = cls
if pickle:
classes_to_pickle.add(class_id)
if custom_serializer is not None:
custom_serializers[class_id] = custom_serializer
custom_deserializers[class_id] = custom_deserializer
# Here we define a custom serializer and deserializer for handling numpy
# arrays that contain objects.
add_class_to_whitelist(np.ndarray, pickle=False,
custom_serializer=array_custom_serializer,
custom_deserializer=array_custom_deserializer)
def serialize(obj):
"""This is the callback that will be used by numbuf.
If numbuf does not know how to serialize an object, it will call this method.
Args:
obj (object): A Python object.
Returns:
A dictionary that has the key "_pyttype_" to identify the class, and
contains all information needed to reconstruct the object.
"""
class_id = class_identifier(type(obj))
if class_id not in whitelisted_classes:
raise Exception("Ray does not know how to serialize objects of type {}. "
"To fix this, call 'ray.register_class' with this class."
.format(type(obj)))
if class_id in classes_to_pickle:
serialized_obj = {"data": pickling.dumps(obj)}
elif class_id in custom_serializers.keys():
serialized_obj = {"data": custom_serializers[class_id](obj)}
else:
# Handle the namedtuple case.
if is_named_tuple(type(obj)):
serialized_obj = {}
serialized_obj["_ray_getnewargs_"] = obj.__getnewargs__()
elif hasattr(obj, "__dict__"):
serialized_obj = obj.__dict__
else:
raise Exception("We do not know how to serialize the object '{}'"
.format(obj))
result = dict(serialized_obj, **{"_pytype_": class_id})
return result
def deserialize(serialized_obj):
"""This is the callback that will be used by numbuf.
If numbuf encounters a dictionary that contains the key "_pytype_" during
deserialization, it will ask this callback to deserialize the object.
Args:
serialized_obj (object): A dictionary that contains the key "_pytype_".
Returns:
A Python object.
"""
class_id = serialized_obj["_pytype_"]
cls = whitelisted_classes[class_id]
if class_id in classes_to_pickle:
obj = pickling.loads(serialized_obj["data"])
elif class_id in custom_deserializers.keys():
obj = custom_deserializers[class_id](serialized_obj["data"])
else:
# In this case, serialized_obj should just be the __dict__ field.
if "_ray_getnewargs_" in serialized_obj:
obj = cls.__new__(cls, *serialized_obj["_ray_getnewargs_"])
else:
obj = cls.__new__(cls)
serialized_obj.pop("_pytype_")
obj.__dict__.update(serialized_obj)
return obj
def set_callbacks():
"""Register the custom callbacks with numbuf.
The serialize callback is used to serialize objects that numbuf does not know
how to serialize (for example custom Python classes). The deserialize
callback is used to serialize objects that were serialized by the serialize
callback.
"""
ray.numbuf.register_callbacks(serialize, deserialize)
| 35.521739 | 79 | 0.684823 |
dda91173fa6aa6ba29a55f8ecc21898b460a57e2 | 3,729 | py | Python | wlcsim/FrankElastic/stonefence.py | SpakowitzLab/BasicWLC | 13edbbc8e8cd36a3586571ff4d80880fc89d30e6 | [
"MIT"
] | 1 | 2021-03-16T01:39:18.000Z | 2021-03-16T01:39:18.000Z | wlcsim/FrankElastic/stonefence.py | riscalab/wlcsim | e34877ef6c5dc83c6444380dbe624b371d70faf2 | [
"MIT"
] | 17 | 2016-07-08T21:17:40.000Z | 2017-01-24T09:05:25.000Z | wlcsim/FrankElastic/stonefence.py | riscalab/wlcsim | e34877ef6c5dc83c6444380dbe624b371d70faf2 | [
"MIT"
] | 9 | 2016-06-21T22:03:53.000Z | 2016-11-10T00:55:01.000Z | from numpy import sqrt
import numpy as np
#from util import sphinx_compat_jit as jit
from numba import jit
ORDER_L=50
def precalculate_data(p, gamma, m_values=[0]):
"""Precalculate W_plus, W_minus, W_pm, and G_m_ll
Args:
p (complex): laplace conjugate of path length
gamma (real): aligning l=2 (Maier-Saupe) field strength
m_values (list): list of integer m values to precalculate for
"""
Wps = {}
Wms = {}
Gmlls = {}
for m in m_values:
Am = Alm_vec(m)
PgammaB = PgammaB_vec(m, p, gamma)
Wplus = Wplus_vec(m, gamma, p, Am, PgammaB)
Wminus = Wminus_vec(m, gamma, p, Am, PgammaB)
Gmll = Gmll_matrix(Wplus, Wminus, Am, PgammaB, gamma, m)
Wps[m]=Wplus
Wms[m]=Wminus
Gmlls[m] = Gmll
return {"Wplus":Wps, "Wminus":Wms, "Gmll":Gmlls, "ms":m_values, "p":p,
"gamma":gamma}
| 30.565574 | 79 | 0.592116 |
dda9ad1f88ac589e37e334d17add8aea6f4a5cd4 | 2,694 | py | Python | main.py | vojtsek/twitter-sentiment | 8f3fbb7be8ac9c0e786b5a58253a24ee8b3f5dae | [
"MIT"
] | null | null | null | main.py | vojtsek/twitter-sentiment | 8f3fbb7be8ac9c0e786b5a58253a24ee8b3f5dae | [
"MIT"
] | null | null | null | main.py | vojtsek/twitter-sentiment | 8f3fbb7be8ac9c0e786b5a58253a24ee8b3f5dae | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import logging
import json
import os
import os.path as path
from collections import OrderedDict
import argparse
import tweepy
from tweepy import Stream
import twitter_config
from tweet_writer_listener import TweetWriterListener
CITIES = ['San Francisco', 'New York', 'Boston', 'Los Angeles', 'Dallas', 'Miami']
OUT_DIR = 'out'
BBOX_FILE = path.join(OUT_DIR, 'bboxes.json')
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
logging.getLogger("tweetpy").setLevel(logging.WARNING)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
parser = argparse.ArgumentParser('Twitter sentiment analysis')
parser.add_argument('--limit', type=int, required=True, help='Tweet limit per city')
args = parser.parse_args()
mkdir_if_not_exists(OUT_DIR)
logging.info('API authentization')
auth = tweepy.OAuthHandler(twitter_config.CONSUMER_KEY, twitter_config.CONSUMER_SECRET)
auth.set_access_token(twitter_config.ACCESS_TOKEN, twitter_config.ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
# for country in countries:
if path.isfile(BBOX_FILE):
logging.info('Using the cached bounding boxes from file %s', BBOX_FILE)
bboxes = json.load(open(BBOX_FILE, 'r'))
else:
logging.info('Caching the bounding boxes into file %s', BBOX_FILE)
bboxes = OrderedDict()
for city in CITIES:
try:
place_bb = place2coords(city)
bboxes[city] = place_bb
except:
print('Coords error')
json.dump(bboxes, open(BBOX_FILE, 'w'))
logging.info('Creating stream')
for city, locations in bboxes.items():
logging.info('Getting tweets from %s (%s)', city, locations)
with open(path.join(OUT_DIR, "{}_tweets.txt".format(city.replace(' ', ''))), 'a') as f_out:
stream = Stream(auth, TweetWriterListener(f_out, args.limit))
stream.filter(locations=locations, languages=["en"], async=False)
| 32.071429 | 99 | 0.682257 |
ddaa279788a0d07cc55b56c6e5a215a8e2e118cc | 9,283 | py | Python | controllers/default.py | npfe/pursuit | edd2d66ec0770251041b748c4b9f967a15c138b5 | [
"Unlicense"
] | null | null | null | controllers/default.py | npfe/pursuit | edd2d66ec0770251041b748c4b9f967a15c138b5 | [
"Unlicense"
] | 16 | 2020-03-30T13:00:10.000Z | 2020-05-16T16:42:52.000Z | controllers/default.py | npfe/pursuit | edd2d66ec0770251041b748c4b9f967a15c138b5 | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
# -------------------------------------------------------------------------
# This is a sample controller
# this file is released under public domain and you can use without limitations
# -------------------------------------------------------------------------
import json
from datetime import datetime
from pprint import pprint
status = {1:'not_started', 2:'hold', 3:'track', 4:'done'}
# ---- index page ----
# ---- action to server uploaded static content (required) ---
| 39.004202 | 129 | 0.596251 |
ddaaf774a1abba687897e41a33028b028a3ed7fd | 2,200 | py | Python | convertgf.py | Wivik/gundam-france | 65d84098eec431e7e27b6a6c0f1e6eadea1c2bc8 | [
"MIT"
] | null | null | null | convertgf.py | Wivik/gundam-france | 65d84098eec431e7e27b6a6c0f1e6eadea1c2bc8 | [
"MIT"
] | null | null | null | convertgf.py | Wivik/gundam-france | 65d84098eec431e7e27b6a6c0f1e6eadea1c2bc8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from markdownify import markdownify as md
import argparse
import re
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument('input_file', help='file to convert')
args = parser.parse_args()
input_file = args.input_file
print(input_file)
if not re.search('.dist.php', input_file):
test_file = re.sub('\.php', '.dist.php', input_file)
print(test_file)
try:
os.stat(test_file)
print('file dist exists, ignoring')
sys.exit(0)
except:
os.rename(input_file, test_file)
print('file renamed, rerun job')
sys.exit(0)
output_file = os.path.splitext(input_file)[0]
output_file = os.path.splitext(output_file)[0]
output_file = output_file + '.md'
print(output_file)
# sys.exit(0)
with open(input_file, 'r') as file:
content = file.read()
html = md(content)
firstline = html.split('\n', 1)[0]
if re.search('php include', firstline):
print('ignore file')
# os.remove(input_file)
sys.exit(0)
# print(firstline)
## fix images path
html = html.replace('](images/', '](/images/')
# html = re.sub("{lien:db:(\d+):", "", html)
# html = re.sub("(html:lien})", "", html)
html = re.sub("(\s)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(\s)", " \\4 ", html)
html = re.sub("()({lien:db:)(\d+):(.*):(.*)(\.html:lien})(,)", "'\\4 ", html)
html = re.sub("(\s)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(,)", " \\4,", html)
# html = re.sub("(\)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(,)", " \\4,", html)
# html = re.sub("(\)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(\s)", " \\4,", html)
# html = re.sub("(\)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(\.)", " \\4.", html)
html = re.sub("(\s)({lien:db:)(\d+):(.*):(.*)(\.html:lien})(\.)", " \\4.", html)
html = re.sub("(<\?php echo \$_SERVER\[\'REQUEST_URI\'\]; \?>)", "", html)
html = re.sub("(php include\(\"modules/flag\\_spoiler\.php\"\); \?)", "", html)
# print(html)
result = '---\ntitle: "'+ firstline + '"\n---\n\n' + html
# print(output)
with (open(output_file, 'w')) as output:
output.write(result)
output.close()
file.close()
| 32.352941 | 86 | 0.537727 |
ddab10387c063d1c5dd03502020dc60340b9c9c1 | 1,957 | py | Python | scripts/disktest.py | CloudAdvocacy/ObjectDetection | ba823455a43684dea8f0bc1eab6f669a818895bb | [
"MIT"
] | null | null | null | scripts/disktest.py | CloudAdvocacy/ObjectDetection | ba823455a43684dea8f0bc1eab6f669a818895bb | [
"MIT"
] | null | null | null | scripts/disktest.py | CloudAdvocacy/ObjectDetection | ba823455a43684dea8f0bc1eab6f669a818895bb | [
"MIT"
] | null | null | null | import io, os
import argparse
from timeit import default_timer as timer
parser = argparse.ArgumentParser(description="File Performance Testing Util")
parser.add_argument("command",help="Test to perform",choices=['read','write','readany'])
parser.add_argument("dir",help="Directory to use")
args = parser.parse_args()
if args.command == "read":
time("1000 1k files",1024*1000,lambda: read_test(1000,1024))
time("100 1M files",1024*1024*100,lambda: read_test(100,1024*1024))
time("10 10M files",10*1024*1024*10,lambda: read_test(10,1024*1024*10))
time("1 100M files",1*1024*1024*100,lambda: read_test(1,1024*1024*100))
elif args.command == "write":
time("1000 1k files",1024*1000,lambda: write_test(1000,1024))
time("100 1M files",1024*1024*100,lambda: write_test(100,1024*1024))
time("10 10M files",10*1024*1024*10,lambda: write_test(10,1024*1024*10))
time("1 100M files",1*1024*1024*100,lambda: write_test(1,1024*1024*100))
elif args.command == "readany":
read_test()
| 33.169492 | 94 | 0.616249 |
ddab703f80463a12929dce437920fcb4f1403fd4 | 151 | py | Python | assignment2/scripts/exploit15/exploit15.py | vhazali/cs5331 | 3b3618aaa17199ebcd3c01bc6c25ddbdbe4f3d0f | [
"MIT"
] | 8 | 2020-02-22T12:47:12.000Z | 2021-12-03T11:39:19.000Z | assignment2/scripts/exploit15/exploit15.py | vhazali/cs5331 | 3b3618aaa17199ebcd3c01bc6c25ddbdbe4f3d0f | [
"MIT"
] | null | null | null | assignment2/scripts/exploit15/exploit15.py | vhazali/cs5331 | 3b3618aaa17199ebcd3c01bc6c25ddbdbe4f3d0f | [
"MIT"
] | 4 | 2018-08-15T12:58:36.000Z | 2021-12-29T07:06:29.000Z | import webbrowser
url = 'http://www.wsb.com/Assignment2/case15.php?videourl=" onerror="alert(document.cookie)'
new = 2
webbrowser.open(url, new=new)
| 21.571429 | 92 | 0.748344 |
ddabee57641e5f2212bdb1af4233c76d2dc9db3e | 2,238 | py | Python | introspection/call_stack.py | Aran-Fey/introspection | 0ce3a16688b51bdcb72c7b070d571a1004f5151b | [
"MIT"
] | 1 | 2022-03-02T23:13:06.000Z | 2022-03-02T23:13:06.000Z | introspection/call_stack.py | Aran-Fey/introspection | 0ce3a16688b51bdcb72c7b070d571a1004f5151b | [
"MIT"
] | null | null | null | introspection/call_stack.py | Aran-Fey/introspection | 0ce3a16688b51bdcb72c7b070d571a1004f5151b | [
"MIT"
] | null | null | null |
import types
from typing import Iterable, Union
from .call_frame import CallFrame
__all__ = ['CallStack']
| 26.963855 | 156 | 0.626005 |
ddaf61fd3b67b0ad82d3ff5a5a750292ac61bd57 | 2,728 | py | Python | products/views.py | deepindo/DoPython | d80b85b3b24566de6ece9f452564e4827e705959 | [
"MIT"
] | 4 | 2022-01-04T09:56:19.000Z | 2022-01-20T12:14:16.000Z | products/views.py | deepindo/DoPython | d80b85b3b24566de6ece9f452564e4827e705959 | [
"MIT"
] | null | null | null | products/views.py | deepindo/DoPython | d80b85b3b24566de6ece9f452564e4827e705959 | [
"MIT"
] | 1 | 2022-01-20T09:40:16.000Z | 2022-01-20T09:40:16.000Z | from django.shortcuts import render, get_object_or_404
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from products.models import Product
def productList(request, productName):
""""""
submenu = productName
if productName == 'robot':
productName = ''
elif productName == 'monitor':
productName = ''
else:
productName = ''
product_list = Product.objects.filter(product_type=productName).order_by('-publish_date')
#
# 2
p = Paginator(product_list, 2)
if p.num_pages <= 1:
page_data = ''
else:
# 1
page = int(request.GET.get('page', 1))
#
product_list = p.page(page)
left = []
right = []
left_has_more = False
right_has_more = False
first = False
last = False
total_pages = p.num_pages #
page_range = p.page_range #
if page == 1:
right = page_range[page:page + 2]
print(total_pages)
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
elif page == total_pages:
left = page_range[(page - 3) if (page - 3) > 0 else 0:page - 1]
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
else:
left = page_range[(page - 3) if (page - 3) > 0 else 0:page - 1]
right = page_range[page:page + 2]
if left[0] > 2:
left_has_more = True
if left[0] > 1:
first = True
if right[-1] < total_pages - 1:
right_has_more = True
if right[-1] < total_pages:
last = True
page_data = {
'left': left,
'right': right,
'left_has_more': left_has_more,
'right_has_more': right_has_more,
'first': first,
'last': last,
'total_pages': total_pages,
'page': page,
}
context = {
'active_menu': 'products',
'sub_menu': submenu,
'productName': productName,
'productList': product_list,
'pageData': page_data
}
return render(request, 'products/productList.html', context)
def productDetail(request, id):
""""""
product = get_object_or_404(Product, id=id) # id404
product.product_views += 1 # 1
product.save()
return render(request, 'products/productDetail.html', {
'active_menu': 'products',
'product': product,
}) | 29.978022 | 93 | 0.535924 |
ddb0240924a8101cddcbf80261a52d4f5843c4bf | 1,545 | py | Python | misc/CharacterMotion.py | qwewqa/dl-datamine | a8e050731f67e4cf49123947eadf66ac0fd948ca | [
"MIT"
] | 2 | 2020-03-31T00:07:54.000Z | 2020-04-01T23:39:23.000Z | misc/CharacterMotion.py | qwewqa/dl-datamine | a8e050731f67e4cf49123947eadf66ac0fd948ca | [
"MIT"
] | null | null | null | misc/CharacterMotion.py | qwewqa/dl-datamine | a8e050731f67e4cf49123947eadf66ac0fd948ca | [
"MIT"
] | 1 | 2021-08-02T04:21:31.000Z | 2021-08-02T04:21:31.000Z | import json
import os
from dataclasses import dataclass, field
from typing import Dict, Optional
| 28.611111 | 100 | 0.618123 |
ddb050b82209d0997ed09ca448c8c2752e16f7c5 | 14,431 | py | Python | kube/config.py | nearmap/kubefs | e2f6c019f04e436d031874e40c59ba0ee61d8c58 | [
"MIT"
] | 3 | 2021-05-31T06:45:37.000Z | 2021-10-05T22:36:37.000Z | kube/config.py | nearmap/kubefs | e2f6c019f04e436d031874e40c59ba0ee61d8c58 | [
"MIT"
] | 8 | 2021-09-06T00:43:13.000Z | 2021-10-01T00:22:53.000Z | kube/config.py | nearmap/kubefs | e2f6c019f04e436d031874e40c59ba0ee61d8c58 | [
"MIT"
] | null | null | null | import base64
import fnmatch
import logging
import os
import tempfile
from ssl import SSLContext, create_default_context
from typing import Dict, List, Optional, Sequence
import yaml
from kube.tools.repr import disp_secret_blob, disp_secret_string
def get_selector() -> KubeConfigSelector:
loader = KubeConfigLoader()
collection = loader.create_collection()
selector = KubeConfigSelector(collection=collection)
return selector
| 30.901499 | 86 | 0.56538 |
ddb11949c25d2f8ec4e231606475f6d7c71dff61 | 1,256 | py | Python | other/application/windowApp/test6.py | Ethan7102/FYP | c6560a0b95ad78d5e1a341ab2d93c063e10c6631 | [
"MIT"
] | null | null | null | other/application/windowApp/test6.py | Ethan7102/FYP | c6560a0b95ad78d5e1a341ab2d93c063e10c6631 | [
"MIT"
] | null | null | null | other/application/windowApp/test6.py | Ethan7102/FYP | c6560a0b95ad78d5e1a341ab2d93c063e10c6631 | [
"MIT"
] | 1 | 2021-01-23T07:59:57.000Z | 2021-01-23T07:59:57.000Z | from PyQt5.QtCore import QThread, pyqtSignal, QDateTime, QObject
from PyQt5.QtWidgets import QApplication, QDialog, QLineEdit
import time
import sys
if __name__ == '__main__':
app = QApplication(sys.argv)
win = Window()
win.show()
sys.exit(app.exec_()) | 25.632653 | 64 | 0.627389 |
ddb1e5dab629942b29ba8fb6aab9cb866f52c858 | 13,098 | py | Python | model/meter.py | meiwei92/meter-alignment | dc92e4aca0ca80ed1c9418027b050e9631b5fb7a | [
"MIT"
] | null | null | null | model/meter.py | meiwei92/meter-alignment | dc92e4aca0ca80ed1c9418027b050e9631b5fb7a | [
"MIT"
] | null | null | null | model/meter.py | meiwei92/meter-alignment | dc92e4aca0ca80ed1c9418027b050e9631b5fb7a | [
"MIT"
] | null | null | null | from __future__ import annotations
from typing import List, Tuple, OrderedDict as OrderedDictType, DefaultDict, Optional
from collections import OrderedDict, defaultdict
from metric import MusicNote, TimePointSequence
from model.base import MidiModel, MidiModelState
from model.beat import TatumTrackingModelState, TatumTrackingGrammarModelState
from model.voice import VoiceSplittingModelState, VoiceSplittingGrammarModelState
from model.hierarchy import HierarchyModelState, HierarchyGrammarModelState
| 42.803922 | 134 | 0.64506 |
ddb3b3248298a56481c0e14a355de5998e1c7be4 | 1,029 | py | Python | hw2skeleton/find_features.py | hasuni-max/hw2-skeleton | 498f5d250ec18042c1e21fac177a92f3c7d3da7c | [
"Apache-2.0"
] | null | null | null | hw2skeleton/find_features.py | hasuni-max/hw2-skeleton | 498f5d250ec18042c1e21fac177a92f3c7d3da7c | [
"Apache-2.0"
] | null | null | null | hw2skeleton/find_features.py | hasuni-max/hw2-skeleton | 498f5d250ec18042c1e21fac177a92f3c7d3da7c | [
"Apache-2.0"
] | null | null | null |
global plus
global minus
minus = ["ASP","GLU"]
plus = ["ARG","HIS","LYS"]
def find_charge(residues):
"""
Takes a list of residues and returns the number of plus and
minus charged residues.
This function uses the global plus and minus variables
"""
global plus
global minus
plus_charge = sum([res in plus for res in residues])
minus_charge = sum([res in minus for res in residues])
return plus_charge, minus_charge
| 25.097561 | 73 | 0.737609 |
ddb4c04468f28635f43140d7b43e540cbcb4c57d | 632 | py | Python | main.py | jalexray/CSV-Empty-Finder | 69d545ec9a757d331dbd2b1b298842de2c079129 | [
"MIT"
] | null | null | null | main.py | jalexray/CSV-Empty-Finder | 69d545ec9a757d331dbd2b1b298842de2c079129 | [
"MIT"
] | null | null | null | main.py | jalexray/CSV-Empty-Finder | 69d545ec9a757d331dbd2b1b298842de2c079129 | [
"MIT"
] | null | null | null | # imports
import csv
# open the file
with open("example.csv") as file:
reader = csv.reader(file)
# prep to store names of columns
titleRow = reader.next()
#rest = [row for row in reader]
columnList = {}
for row in reader:
iterator = 0
cellList = []
for cell in row:
if cell == "":
cellList.append(titleRow[iterator])
#print("Within " + row[0] + ", " + titleRow[iterator] + " has an empty")
iterator += 1
#print cellList
columnList[row[0]] = cellList
for item in sorted(columnList):
itemString = (str(columnList[item]))
if itemString != "[]":
print(item + ":" + str(columnList[item]))
| 20.387097 | 76 | 0.629747 |
ddb85f6c9f54c6a26a73cc1b1e07e1f705ce4e40 | 124 | py | Python | test_suite/suite/test09/other_mod.py | joncatanio/cannoli | 410f6bea362bf9e33eecc0e01fb080dadd14ef23 | [
"MIT"
] | 755 | 2017-12-09T05:34:43.000Z | 2022-03-26T09:15:56.000Z | test_suite/suite/test09/other_mod.py | joncatanio/cannoli | 410f6bea362bf9e33eecc0e01fb080dadd14ef23 | [
"MIT"
] | 8 | 2017-12-12T01:03:18.000Z | 2020-06-29T01:41:03.000Z | test_suite/suite/test09/other_mod.py | joncatanio/cannoli | 410f6bea362bf9e33eecc0e01fb080dadd14ef23 | [
"MIT"
] | 23 | 2018-05-17T17:48:23.000Z | 2022-03-26T09:15:57.000Z | import some_mod
| 17.714286 | 47 | 0.669355 |
ddb88819c796db53b08989fe1a656955b84d1760 | 140 | py | Python | application/blueprints/user/__init__.py | demetrius-mp/flask-template | 2dbab372bf2d7d5ff60af430c4b69c95a41cd681 | [
"MIT"
] | null | null | null | application/blueprints/user/__init__.py | demetrius-mp/flask-template | 2dbab372bf2d7d5ff60af430c4b69c95a41cd681 | [
"MIT"
] | 2 | 2021-10-14T02:00:15.000Z | 2021-10-14T02:19:44.000Z | application/blueprints/user/__init__.py | demetrius-mp/flask-template | 2dbab372bf2d7d5ff60af430c4b69c95a41cd681 | [
"MIT"
] | null | null | null | from flask import Flask
from application.blueprints.user.routes import users
| 17.5 | 52 | 0.792857 |
ddb942c43951bea6ec9329f93418d8030cd886cd | 249 | py | Python | src/methods/defs.py | karlotness/nn-benchmark | 65ddb2f3d9934db5718417fd800278a97f627ba2 | [
"MIT"
] | 13 | 2021-08-04T21:07:50.000Z | 2022-02-17T20:16:41.000Z | src/methods/defs.py | karlotness/nn-benchmark | 65ddb2f3d9934db5718417fd800278a97f627ba2 | [
"MIT"
] | null | null | null | src/methods/defs.py | karlotness/nn-benchmark | 65ddb2f3d9934db5718417fd800278a97f627ba2 | [
"MIT"
] | 1 | 2021-10-03T00:37:05.000Z | 2021-10-03T00:37:05.000Z | from collections import namedtuple
import torch
NONLINEARITIES = {
"tanh": torch.nn.Tanh,
"relu": torch.nn.ReLU,
}
TimeDerivative = namedtuple("TimeDerivative", ["dq_dt", "dp_dt"])
StepPrediction = namedtuple("StepPrediction", ["q", "p"])
| 22.636364 | 65 | 0.698795 |
ddba665d13fe8e2f5bc1b2bf2549c77f6e609bdd | 1,213 | py | Python | caosmedicallab.py | erichilarysmithsr/CAOSMedicalLabDb | 9a7acee44f9f3680c0a01332797ce94a0895c2d1 | [
"Apache-2.0"
] | null | null | null | caosmedicallab.py | erichilarysmithsr/CAOSMedicalLabDb | 9a7acee44f9f3680c0a01332797ce94a0895c2d1 | [
"Apache-2.0"
] | 10 | 2021-04-03T14:00:28.000Z | 2022-01-28T23:44:11.000Z | caosmedicallab.py | erichilarysmithsr/CAOSMedicalLabDb | 9a7acee44f9f3680c0a01332797ce94a0895c2d1 | [
"Apache-2.0"
] | null | null | null | >>> S = 'Susceptible'
>>> print(S)
>>> E = 'Exposed'
>>> print(E)
>>> I = 'Infectious'
>>> print(I)
>>> R = 'Removed'
>>> print(R)
>>> N = 'Total Population'
>>> print(N)
>>> C = 'Living with COVID19'
>>> print(C)
>>> D = 'Living with Diabetes'
>>> print(D)
>>> Susceptible = input('Enter number of Susceptible Individuals')
>>> print(Susceptible + 1)
>>> print(int(Susceptible)+ 1)
>>> Exposed = input('Enter number of Exposed Individuals')
>>> print(Exposed + 1)
>>> print(int(Exposed)+ 1)
>>> Infectious = input('Enter number of Infectious Individuals')
>>> print(Infectious + 1)
>>> print(int(Infectious)+ 1)
>>> Removed = input('Enter number of Removed Individuals')
>>> print(Removed + 1)
>>> print(int(Removed) + 1)
>>> Total Population = input('Enter number of Total Population')
>>> print(Total Population + 1)
>>> print(int(Total Population)+ 1)
>>> Living with COVID19 = input('Enter number of Individuals Living with COVID19')
>>> print(Living with COVID19 + 1)
>>> print(int(Living with COVID19)+ 1)
>>> Living with Diabetes = input('Enter number of Individuals Living with Diabetes')
>>> print(Living with Diabetes + 1)
>>> print(int(Living with Diabetes)+ 1)
>>> S = C + D - E - I - R
>>> print(S)
| 31.921053 | 84 | 0.645507 |
ddbaaba267f11c03c921ef7b0388970b8db8a6b9 | 2,396 | py | Python | src/quacks/__init__.py | ariebovenberg/quacks | 839d307b24f3f37d9a5318c16acb631b9a1153f0 | [
"MIT"
] | 11 | 2021-12-12T20:51:15.000Z | 2022-02-02T12:08:32.000Z | src/quacks/__init__.py | ariebovenberg/quacks | 839d307b24f3f37d9a5318c16acb631b9a1153f0 | [
"MIT"
] | 8 | 2021-12-14T12:53:51.000Z | 2022-03-15T04:29:44.000Z | src/quacks/__init__.py | ariebovenberg/quacks | 839d307b24f3f37d9a5318c16acb631b9a1153f0 | [
"MIT"
] | 1 | 2021-12-15T16:50:34.000Z | 2021-12-15T16:50:34.000Z | from typing import _GenericAlias # type: ignore
from typing import ClassVar
from typing_extensions import Protocol
# Single-sourcing the version number with poetry:
# https://github.com/python-poetry/poetry/pull/2366#issuecomment-652418094
try:
__version__ = __import__("importlib.metadata").metadata.version(__name__)
except ModuleNotFoundError: # pragma: no cover
__version__ = __import__("importlib_metadata").version(__name__)
__all__ = ["readonly"]
def readonly(cls: type) -> type:
"""Decorate a :class:`~typing.Protocol` to make it read-only.
Unlike default protocol attributes, read-only protocols will match
frozen dataclasses and other immutable types.
Read-only attributes are already supported in protocols with
``@property``, but this is cumbersome to do for many attributes.
The ``@readonly`` decorator effectively transforms all mutable attributes
into read-only properties.
Example
-------
.. code-block:: python
from quacks import readonly
@readonly
class User(Protocol):
id: int
name: str
is_premium: bool
# equivalent to:
class User(Protocol):
@property
def id(self) -> int: ...
@property
def name(self) -> str: ...
@property
def is_premium(self) -> bool: ...
Warning
-------
Subprotocols and inherited attributes are not supported yet.
"""
if not _is_a_protocol(cls):
raise TypeError("Readonly decorator can only be applied to Protocols.")
elif any(b is not Protocol and _is_a_protocol(b) for b in cls.__bases__):
raise NotImplementedError("Subprotocols not yet supported.")
for name, typ in getattr(cls, "__annotations__", {}).items():
if not _is_classvar(typ):
prop.fget.__name__ = name # type: ignore
prop.fget.__annotations__ = {"return": typ} # type: ignore
setattr(cls, name, prop)
return cls
| 29.95 | 79 | 0.647329 |
ddbb30413bba7d94f4e08a1b8b5b0f62d116a712 | 13,818 | py | Python | gobbli/inspect/evaluate.py | RTIInternational/gobbli | d9ec8132f74ce49dc4bead2fad25b661bcef6e76 | [
"Apache-2.0"
] | 276 | 2019-09-13T08:25:51.000Z | 2022-03-05T13:07:55.000Z | gobbli/inspect/evaluate.py | RTIInternational/gobbli | d9ec8132f74ce49dc4bead2fad25b661bcef6e76 | [
"Apache-2.0"
] | 15 | 2019-09-06T14:05:30.000Z | 2022-01-01T20:15:06.000Z | gobbli/inspect/evaluate.py | RTIInternational/gobbli | d9ec8132f74ce49dc4bead2fad25b661bcef6e76 | [
"Apache-2.0"
] | 24 | 2019-09-18T15:11:42.000Z | 2021-12-23T18:59:55.000Z | from dataclasses import dataclass
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union
import altair as alt
import pandas as pd
from sklearn.metrics import (
accuracy_score,
classification_report,
f1_score,
precision_score,
recall_score,
)
from gobbli.util import (
as_multiclass,
as_multilabel,
escape_line_delimited_text,
is_multilabel,
multilabel_to_indicator_df,
pred_prob_to_pred_label,
pred_prob_to_pred_multilabel,
truncate_text,
)
MetricFunc = Callable[[Sequence[str], pd.DataFrame], float]
"""
A function used to calculate some metric. It should accept a sequence of true labels (y_true)
and a dataframe of shape (n_samples, n_classes) containing predicted probabilities; it should
output a real number.
"""
DEFAULT_METRICS: Dict[str, MetricFunc] = {
"Weighted F1 Score": lambda y_true, y_pred: f1_score(
y_true, y_pred, average="weighted"
),
"Weighted Precision Score": lambda y_true, y_pred: precision_score(
y_true, y_pred, average="weighted"
),
"Weighted Recall Score": lambda y_true, y_pred: recall_score(
y_true, y_pred, average="weighted"
),
"Accuracy": lambda y_true, y_pred: accuracy_score(y_true, y_pred),
}
"""
The default set of metrics to evaluate classification models with. Users may want to extend
this.
"""
| 35.25 | 102 | 0.56506 |
ddbc20a9147b17ccfb31328be56cce367423b65a | 791 | py | Python | victor_fake_hardware_interface/scripts/fake_grippers_node.py | MMintLab/kuka_iiwa_interface | 0dd258641377263e7275bc63f37cf32eb12f3e56 | [
"BSD-2-Clause"
] | 5 | 2021-01-11T09:00:26.000Z | 2021-12-13T15:59:01.000Z | victor_fake_hardware_interface/scripts/fake_grippers_node.py | MMintLab/kuka_iiwa_interface | 0dd258641377263e7275bc63f37cf32eb12f3e56 | [
"BSD-2-Clause"
] | 35 | 2020-07-01T14:48:40.000Z | 2021-07-13T18:38:53.000Z | victor_fake_hardware_interface/scripts/fake_grippers_node.py | MMintLab/kuka_iiwa_interface | 0dd258641377263e7275bc63f37cf32eb12f3e56 | [
"BSD-2-Clause"
] | 1 | 2021-01-08T23:39:17.000Z | 2021-01-08T23:39:17.000Z | #!/usr/bin/env python
import rospy
from victor_fake_hardware_interface.minimal_fake_arm_interface import MinimalFakeGripperInterface
if __name__ == '__main__':
main()
| 27.275862 | 114 | 0.652339 |
ddbc90c6ac864e7ce62505e078e39b8bc44056dd | 11,647 | py | Python | python/bgraph.py | brunodferrari/bdp | d320add1e451c85b6777ae34901bbd6fd3797114 | [
"Unlicense"
] | null | null | null | python/bgraph.py | brunodferrari/bdp | d320add1e451c85b6777ae34901bbd6fd3797114 | [
"Unlicense"
] | null | null | null | python/bgraph.py | brunodferrari/bdp | d320add1e451c85b6777ae34901bbd6fd3797114 | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import networkx as nx
import pandas as pd
import copy
from numba import njit
from numba.typed import Dict, List
from numba.core import types
from concurrent.futures import ThreadPoolExecutor
np.seterr(over='ignore')
#plot utilizando o lyt adequado
#encontra baricentro do vertice | 28.06506 | 145 | 0.479608 |
ddbf5e2f65d38e783f4768e0ca9abc2a32d54029 | 3,403 | py | Python | src/odin/http/server.py | stfc-aeg/odin-control | 71ab2e6e6e1a7c7ce322ea0df31a9d675f7b92bf | [
"Apache-2.0"
] | 4 | 2018-05-24T13:38:23.000Z | 2021-08-18T08:32:54.000Z | src/odin/http/server.py | stfc-aeg/odin-control | 71ab2e6e6e1a7c7ce322ea0df31a9d675f7b92bf | [
"Apache-2.0"
] | 20 | 2018-04-10T09:28:01.000Z | 2022-03-17T11:43:59.000Z | src/odin/http/server.py | stfc-aeg/odin-control | 71ab2e6e6e1a7c7ce322ea0df31a9d675f7b92bf | [
"Apache-2.0"
] | 3 | 2017-06-07T13:28:38.000Z | 2019-07-16T10:02:21.000Z | """odin.http.server - ODIN HTTP Server class.
This module provides the core HTTP server class used in ODIN, which handles all client requests,
handing off API requests to the appropriate API route and adapter plugins, and defining the
default route used to serve static content.
Tim Nicholls, STFC Application Engineering
"""
import logging
import tornado.gen
import tornado.web
import tornado.ioloop
from tornado.log import access_log
from odin.http.routes.api import ApiRoute
from odin.http.routes.default import DefaultRoute
| 35.447917 | 96 | 0.658537 |
ddc0751188e1d1856d4d69064affd55e5821f001 | 1,382 | py | Python | leetcode/ds_stack_valid_parentheses.py | ngovindaraj/Python | edbcd302533bef81aa0c01e902e6081df58f383c | [
"MIT"
] | null | null | null | leetcode/ds_stack_valid_parentheses.py | ngovindaraj/Python | edbcd302533bef81aa0c01e902e6081df58f383c | [
"MIT"
] | null | null | null | leetcode/ds_stack_valid_parentheses.py | ngovindaraj/Python | edbcd302533bef81aa0c01e902e6081df58f383c | [
"MIT"
] | null | null | null | # @file Valid Parentheses
# @brief Given a string containing just the characters '(', ')', '{', '}',
# '[' and ']', determine if the input string is valid.
# https://leetcode.com/problems/valid-parentheses/
import collections
'''
Given a string containing just the characters '(', ')', '{', '}', '[' and ']',
determine if the input string is valid.
The brackets must close in the correct order, "()" and "()[]{}" are all valid
but "(]" and "([)]" are not.
'''
# time complexity : O(n)
# space complexity: O(n)
| 40.647059 | 79 | 0.552822 |
ddc0a159fdea30685a3e6c6f67386c3bf2f75073 | 3,746 | py | Python | 2016/round_1b/technobabble.py | laichunpongben/CodeJam | a048229bce1bc680dc85c8a69ef395a2f049732a | [
"Apache-2.0"
] | null | null | null | 2016/round_1b/technobabble.py | laichunpongben/CodeJam | a048229bce1bc680dc85c8a69ef395a2f049732a | [
"Apache-2.0"
] | null | null | null | 2016/round_1b/technobabble.py | laichunpongben/CodeJam | a048229bce1bc680dc85c8a69ef395a2f049732a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
from __future__ import print_function
from collections import deque
if __name__ == '__main__':
import os
samples = [
['HYDROCARBON COMBUSTION',
'QUAIL BEHAVIOR',
'QUAIL COMBUSTION'],
['CODE JAM',
'SPACE JAM',
'PEARL JAM'],
['INTERGALACTIC PLANETARY',
'PLANETARY INTERGALACTIC'],
['BOUNDARY GRAVITY',
'BOUNDARY HERMENEUTICS',
'BOUNDARY TRANSGRESSION',
'QUANTUM GRAVITY',
'QUANTUM HERMENEUTICS',
'QUANTUM TRANSGRESSION',
'TRANSFORMATIVE GRAVITY',
'TRANSFORMATIVE HERMENEUTICS',
'TRANSFORMATIVE TRANSGRESSION'],
['GF CH',
'RO GI',
'YB GI',
'TD HI',
'YG HI',
'IZ NB',
'BQ TA',
'GF TP',
'GR WG',
'IZ ZD']
]
for sample in samples:
print(count_fake(sample))
data_files = ['C-small-practice']
for f in data_files:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'{0}.in'.format(f)), 'r') as input_file:
lines = input_file.readlines()
test_case_count = int(lines[0].replace('\n' ,''))
test_cases = []
inputs = [line.replace('\n', '') for line in lines[1:]]
i = 0
while i < len(inputs):
n = int(inputs[i])
topics = inputs[i+1:i+n+1]
test_cases.append(topics)
i += n+1
i = 1
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'{0}.out'.format(f)), 'w') as output_file:
for test_case in test_cases:
print(i)
output_file.write('Case #{0}: {1}\n'.format(i, count_fake(test_case)))
i += 1
| 30.455285 | 95 | 0.509343 |
ddc2ab09f2aabd1f5e1c2e16308c14b4efc96586 | 101 | py | Python | dask_xgboost/__init__.py | ksangeek/dask-xgboost | 8ca1d69f21a6c666eaf581fb88b20cff2b6b05ef | [
"BSD-3-Clause"
] | null | null | null | dask_xgboost/__init__.py | ksangeek/dask-xgboost | 8ca1d69f21a6c666eaf581fb88b20cff2b6b05ef | [
"BSD-3-Clause"
] | null | null | null | dask_xgboost/__init__.py | ksangeek/dask-xgboost | 8ca1d69f21a6c666eaf581fb88b20cff2b6b05ef | [
"BSD-3-Clause"
] | null | null | null | from .core import _train, train, predict, XGBClassifier, XGBRegressor # noqa
__version__ = '0.1.7'
| 25.25 | 77 | 0.742574 |
ddc39e71f4d5f6b6a53e16b07decfbb4b7887488 | 12,963 | py | Python | life360.indigoPlugin/Contents/Server Plugin/plugin.py | ryanbuckner/life360-plugin | 3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe | [
"MIT"
] | 1 | 2021-09-25T15:43:00.000Z | 2021-09-25T15:43:00.000Z | life360.indigoPlugin/Contents/Server Plugin/plugin.py | ryanbuckner/life360-plugin | 3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe | [
"MIT"
] | null | null | null | life360.indigoPlugin/Contents/Server Plugin/plugin.py | ryanbuckner/life360-plugin | 3e64108b91c4ee0f4f85f6e7aa31fa7bd1b1d6fe | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
####################
# Copyright (c) 2021 ryanbuckner
# https://github.com/ryanbuckner/life360-plugin/wiki
#
# Based on neilk's Solcast plugin
################################################################################
# Imports
################################################################################
import indigo
import sys
from life360 import life360
import datetime
try:
from geopy.geocoders import Nominatim
except:
self.logger.debug("Geopy python library is not found. Try reinstalling the Plugin")
pass
################################################################################
# Globals
################################################################################
################################################################################
| 36.931624 | 174 | 0.66011 |
ddc3d50c63fd6d2bc041dad2539a4d22872461ff | 699 | py | Python | accounts/views.py | airmoor/learnweb | 5867eadaca45b847f6fba8fd0be8a8ccdaeceea0 | [
"MIT"
] | null | null | null | accounts/views.py | airmoor/learnweb | 5867eadaca45b847f6fba8fd0be8a8ccdaeceea0 | [
"MIT"
] | null | null | null | accounts/views.py | airmoor/learnweb | 5867eadaca45b847f6fba8fd0be8a8ccdaeceea0 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.decorators import login_required
from django.urls import reverse_lazy
from django.views import generic, View
| 27.96 | 64 | 0.726753 |
ddc4e2961bdd997e8ed912766a3c871b4f8b1cc7 | 3,306 | py | Python | openmapi/globales.py | IgnacioPardo/mAPI-Provincias | 812fc12bcc72d6aa28ab2e39af2d64d0aa68c86b | [
"MIT"
] | 4 | 2020-08-02T06:51:04.000Z | 2022-03-22T21:31:44.000Z | openmapi/globales.py | Creativity-Hub/Open-mAPI | b7e0ee9acda424aec0e84513d8e968aa6ff5d7c5 | [
"MIT"
] | null | null | null | openmapi/globales.py | Creativity-Hub/Open-mAPI | b7e0ee9acda424aec0e84513d8e968aa6ff5d7c5 | [
"MIT"
] | 1 | 2022-03-24T22:20:47.000Z | 2022-03-24T22:20:47.000Z | import requests
from bs4 import BeautifulSoup | 23.614286 | 119 | 0.650333 |
ddc738c6ed27c814c11c63a6fb453a793040af60 | 947 | py | Python | openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py | jonclothcat/OpenPype | d1208cbebc0a7f378de0062ccd653295c6399195 | [
"MIT"
] | 1 | 2022-02-08T15:40:41.000Z | 2022-02-08T15:40:41.000Z | openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py | zafrs/OpenPype | 4b8e7e1ed002fc55b31307efdea70b0feaed474f | [
"MIT"
] | 2 | 2022-03-18T01:46:03.000Z | 2022-03-18T01:46:16.000Z | openpype/hosts/tvpaint/plugins/publish/validate_start_frame.py | zafrs/OpenPype | 4b8e7e1ed002fc55b31307efdea70b0feaed474f | [
"MIT"
] | null | null | null | import pyblish.api
from openpype.pipeline import PublishXmlValidationError
from openpype.hosts.tvpaint.api import lib
| 24.921053 | 57 | 0.636748 |
ddc806857072eff9c83a07e28e06781742f5341f | 1,304 | py | Python | leds/rgbLed.py | JDRyder/stardust | a1b1de99e7d10c18f7243217a72aa5e7a3566e74 | [
"MIT"
] | null | null | null | leds/rgbLed.py | JDRyder/stardust | a1b1de99e7d10c18f7243217a72aa5e7a3566e74 | [
"MIT"
] | null | null | null | leds/rgbLed.py | JDRyder/stardust | a1b1de99e7d10c18f7243217a72aa5e7a3566e74 | [
"MIT"
] | null | null | null | import board
import neopixel
import time
pixels = neopixel.NeoPixel(board.D21, 1)
GREEN = (255, 0, 0) #
RED = (0,255,0) #
BLUE = (0,0,255) #
YELLOW = (255,255,0) #
CYAN = (255,0,255) #
VIOLET = (0,127,255) #
WHITE = (255,255,255) #
OFF = (0,0,0) #
| 15.162791 | 40 | 0.624233 |
ddc862cde96df508b37a55b7bb12e12b0c12e813 | 3,548 | py | Python | utils/Model_builder.py | Devwalkar/General_codebase | d52eee09248caa715d7e5e8b87f145d1989e278d | [
"MIT"
] | null | null | null | utils/Model_builder.py | Devwalkar/General_codebase | d52eee09248caa715d7e5e8b87f145d1989e278d | [
"MIT"
] | null | null | null | utils/Model_builder.py | Devwalkar/General_codebase | d52eee09248caa715d7e5e8b87f145d1989e278d | [
"MIT"
] | null | null | null | import torch
import pretrainedmodels as PM
import torch.nn as nn
from .Mobilenet import MobileNetV2
device = 'cuda' if torch.cuda.is_available() else 'cpu'
| 37.744681 | 94 | 0.669391 |
ddc91781c017fdef90c8f25f225a0256fda47415 | 828 | py | Python | examples/main.py | marcoaaguiar/erised | 26a304afb2058f532b07ecde6c6fc85d8864696c | [
"MIT"
] | null | null | null | examples/main.py | marcoaaguiar/erised | 26a304afb2058f532b07ecde6c6fc85d8864696c | [
"MIT"
] | 3 | 2021-03-15T00:51:37.000Z | 2021-03-15T01:01:30.000Z | examples/main.py | marcoaaguiar/erised | 26a304afb2058f532b07ecde6c6fc85d8864696c | [
"MIT"
] | null | null | null | from erised.proxy import Proxy
if __name__ == "__main__":
person = Person()
person.dog = Dog()
proxy = Proxy(obj=person)
# call method remotely
call_future = proxy.dog.bark(loud=True)
print(call_future.result())
# set attributes into remote object, even if they didn't exist originally
proxy.dog.age = 3 # it generates a future that can't be retrieved
# get attributes from remote object
get_future = proxy.dog.age.retrieve()
print(get_future.result())
# if running multiprocessing mode (local=False), terminates child process
proxy.terminate()
| 23.657143 | 77 | 0.649758 |
ddcc7c8aaeb73f494f7fe3439f603884d9bf5226 | 480 | py | Python | ontask/migrations/0056_auto_20190323_1122.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 33 | 2017-12-02T04:09:24.000Z | 2021-11-07T08:41:57.000Z | ontask/migrations/0056_auto_20190323_1122.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 189 | 2017-11-16T04:06:29.000Z | 2022-03-11T23:35:59.000Z | ontask/migrations/0056_auto_20190323_1122.py | pinheiroo27/ontask_b | 23fee8caf4e1c5694a710a77f3004ca5d9effeac | [
"MIT"
] | 30 | 2017-11-30T03:35:44.000Z | 2022-01-31T03:08:08.000Z | # Generated by Django 2.1.7 on 2019-03-23 00:52
from django.db import migrations, models
| 25.263158 | 132 | 0.641667 |
ddcde07c3cbd2e093fb249312865d2348a9e3b73 | 6,863 | py | Python | proteus/MeshAdaptPUMI/Checkpoint.py | acatwithacomputer/proteus | 80dfad95da6ab4d18a88a035f55c26b03540a864 | [
"MIT"
] | null | null | null | proteus/MeshAdaptPUMI/Checkpoint.py | acatwithacomputer/proteus | 80dfad95da6ab4d18a88a035f55c26b03540a864 | [
"MIT"
] | 13 | 2018-02-08T23:22:59.000Z | 2020-12-06T19:40:32.000Z | proteus/MeshAdaptPUMI/Checkpoint.py | acatwithacomputer/proteus | 80dfad95da6ab4d18a88a035f55c26b03540a864 | [
"MIT"
] | 1 | 2020-02-17T03:25:34.000Z | 2020-02-17T03:25:34.000Z | from __future__ import division
from builtins import str
from builtins import range
import proteus
import sys
import numpy
from proteus import Profiling
#it should probably be associated with the PUMI domain somehow
#The current implementation assumes we're using NS, VOF, LS, RD, MCorr setup with lagging and Backwards Euler.
#Future work on this module should include creating an abstract class from which variations based on the models and numerical accuracy can be created
#Have the dictionary submodels be labeled by physical model names like "twp_navier_stokes"
| 48.673759 | 208 | 0.722862 |
ddd0869ac7a679df101a618ba69ece08f889a431 | 596 | py | Python | rlunch/lunch/migrations/0002_talk_how_was_it.py | acdh-oeaw/django-generic-apps | 4af63a8a95826ede787347fc86951e933ccdbc6c | [
"MIT"
] | null | null | null | rlunch/lunch/migrations/0002_talk_how_was_it.py | acdh-oeaw/django-generic-apps | 4af63a8a95826ede787347fc86951e933ccdbc6c | [
"MIT"
] | null | null | null | rlunch/lunch/migrations/0002_talk_how_was_it.py | acdh-oeaw/django-generic-apps | 4af63a8a95826ede787347fc86951e933ccdbc6c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-02-02 14:22
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
| 25.913043 | 129 | 0.644295 |
ddd0b7f89eb5fdc6f55d6efae895022ea00e5fd2 | 2,634 | py | Python | ream/decode.py | chmlee/ream-python | 13f46596f59bb411308d1c9070b8d6f8a0afeb31 | [
"MIT"
] | null | null | null | ream/decode.py | chmlee/ream-python | 13f46596f59bb411308d1c9070b8d6f8a0afeb31 | [
"MIT"
] | null | null | null | ream/decode.py | chmlee/ream-python | 13f46596f59bb411308d1c9070b8d6f8a0afeb31 | [
"MIT"
] | null | null | null | """
REAM: REAM Ain't Markdown
~~~~~~~~~~~~~~~~~~~~~~~~~
This file is part of the ream package
:copyright: Copyright 2020 by Chih-Ming Louis Lee
:license: MIT, see LICENSE for details
"""
import sys
import os
import re
import json
import pandas as pd
from ream.transformer import Ream2Dict
from ream.grammar import REAM_RULE
def ream2dict(input_raw, output_file=None, debug=False, no_comment=False):
"""ream to json"""
if no_comment:
Ream2Dict.no_comment = True
else:
Ream2Dict.no_comment = False
input_tree = REAM_RULE.parse(input_raw)
output_raw = Ream2Dict().transform(input_tree)
if debug:
print(input_tree)
print("====================")
print(input_tree.pretty())
print("====================")
print(output_raw)
print("====================")
if output_file is None:
return output_raw
else:
with open(output_file, 'w') as file:
json.dump(output_raw, file)
print(json.dumps(output_raw, indent=4))
return None
def main(input_raw, output_file, debug, no_comment):
"""
main function for decoding ream file
"""
output_ext = output_file.split('.')[-1]
# choose conversion function
if output_ext in ['json']:
ream2dict(input_raw, output_file, debug, no_comment)
elif output_ext in ['csv']:
ream2csv(input_raw, output_file)
else:
print("Output file formet not supported")
print("Complete")
| 24.849057 | 74 | 0.566059 |
ddd0baa5f55beee804fd811c66a9f9297112106b | 444 | py | Python | snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py | michielkleinnijenhuis/EM | f46a9b11298919b359e80d9f23a7e824df1356cb | [
"Apache-2.0"
] | null | null | null | snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py | michielkleinnijenhuis/EM | f46a9b11298919b359e80d9f23a7e824df1356cb | [
"Apache-2.0"
] | null | null | null | snippets/3DEM/useful_bits/scratch_hdf5_2_nii.py | michielkleinnijenhuis/EM | f46a9b11298919b359e80d9f23a7e824df1356cb | [
"Apache-2.0"
] | null | null | null | ### get all the blocked raw datafiles from ARC and convert to nifti's ###
#rsync -avz ndcn0180@arcus.arc.ox.ac.uk:/data/ndcn-fmrib-water-brain/ndcn0180/EM/M3/M3_S1_GNU/testblock/m000_?????-?????_?????-?????_?????-?????.h5 /Users/michielk/oxdata/P01/EM/M3/M3_S1_GNU/
for f in `ls m000_?????-?????_?????-?????_?????-?????.h5`; do
python $scriptdir/convert/EM_stack2stack.py ${f} ${f/.h5/.nii.gz} -i 'zyx' -l 'xyz' -e -0.0073 -0.0073 0.05 -u
done
| 74 | 191 | 0.614865 |
ddd0c824e4d2d1eee0fd05d787911e01b80ce07a | 53 | py | Python | Week1/sumofdigits_teacher.py | CrazyDi/Python1 | 016dd77ace04fccfec61edf37ec5a990ead6c4ef | [
"Unlicense"
] | null | null | null | Week1/sumofdigits_teacher.py | CrazyDi/Python1 | 016dd77ace04fccfec61edf37ec5a990ead6c4ef | [
"Unlicense"
] | null | null | null | Week1/sumofdigits_teacher.py | CrazyDi/Python1 | 016dd77ace04fccfec61edf37ec5a990ead6c4ef | [
"Unlicense"
] | null | null | null | import sys
print(sum([int(x) for x in sys.argv[1]])) | 17.666667 | 41 | 0.660377 |
ddd11496dad00035846edb605bc410d5d8e9ecb5 | 1,549 | py | Python | cyborg/accelerator/drivers/nic/base.py | NeCTAR-RC/cyborg | e0fab29181467c0c72667ea26a8b04c53b238ddb | [
"Apache-2.0"
] | 37 | 2017-03-23T02:10:35.000Z | 2021-11-25T07:57:36.000Z | cyborg/accelerator/drivers/nic/base.py | openstack/nomad | 8cd846a16c2da04c2f204b02b90db814e32acd63 | [
"Apache-2.0"
] | null | null | null | cyborg/accelerator/drivers/nic/base.py | openstack/nomad | 8cd846a16c2da04c2f204b02b90db814e32acd63 | [
"Apache-2.0"
] | 27 | 2017-07-14T02:26:24.000Z | 2022-01-19T07:55:10.000Z | # Copyright 2020 Intel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cyborg NIC driver implementation.
"""
VENDOR_MAPS = {"0x8086": "intel"}
| 29.788462 | 75 | 0.672692 |
ddd29b6ed93109a393ac788b23f262d228d22a8b | 621 | py | Python | reviews/migrations/0016_auto_20180908_1733.py | UrbanBogger/horrorexplosion | 3698e00a6899a5e8b224cd3d1259c3deb3a2ca80 | [
"MIT"
] | null | null | null | reviews/migrations/0016_auto_20180908_1733.py | UrbanBogger/horrorexplosion | 3698e00a6899a5e8b224cd3d1259c3deb3a2ca80 | [
"MIT"
] | 4 | 2020-06-05T18:21:18.000Z | 2021-06-10T20:17:31.000Z | reviews/migrations/0016_auto_20180908_1733.py | UrbanBogger/horrorexplosion | 3698e00a6899a5e8b224cd3d1259c3deb3a2ca80 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-09-08 16:33
from __future__ import unicode_literals
from django.db import migrations
| 22.178571 | 48 | 0.574879 |
ddd324b790a9312a2fcc8cac11385ac3c12a277d | 2,796 | py | Python | src/markup.py | alex-panda/PDFCompiler | 3454ee01a6e5ebb2d2bccdcdc32678bf1def895d | [
"MIT"
] | null | null | null | src/markup.py | alex-panda/PDFCompiler | 3454ee01a6e5ebb2d2bccdcdc32678bf1def895d | [
"MIT"
] | null | null | null | src/markup.py | alex-panda/PDFCompiler | 3454ee01a6e5ebb2d2bccdcdc32678bf1def895d | [
"MIT"
] | null | null | null | from constants import ALIGNMENT, STRIKE_THROUGH, UNDERLINE
| 29.125 | 114 | 0.65093 |
ddd3332668c74ceeb6666a897a79187f953f120f | 10,476 | py | Python | svirl/vars/params.py | microsoft/svirl | 8d0da6a03ad20315a690a3c65bb8b60c196c3f3d | [
"MIT"
] | 6 | 2020-12-21T20:11:13.000Z | 2022-03-21T07:55:33.000Z | svirl/vars/params.py | ivan-sadovsky/svirl | 523abe9fcf2a5e9d192782d7aeb7093c86ef4036 | [
"MIT"
] | 4 | 2021-07-15T20:12:55.000Z | 2021-08-07T22:11:18.000Z | svirl/vars/params.py | ivan-sadovsky/svirl | 523abe9fcf2a5e9d192782d7aeb7093c86ef4036 | [
"MIT"
] | 9 | 2020-12-22T06:06:16.000Z | 2022-03-25T17:26:55.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import numpy as np
import svirl.config as cfg
from svirl.storage import GArray
from . import FixedVortices
def gl_parameter_squared_h(self):
if self.solveA:
return cfg.dtype(self.gl_parameter**2)
return cfg.dtype(-1.0)
def _update_vector_potential(self, homogeneous_external_field, reset):
assert isinstance(homogeneous_external_field, (np.floating, float, np.integer, int))
if reset:
self._H = cfg.dtype(homogeneous_external_field)
# TODO: need a fill method in GArray
# self.a.fill(0.0)
# self.b.fill(0.0)
a, b = self.vars._vp.get_vec_h()
a.fill(0.0)
b.fill(0.0)
self.vars._vp.need_htod_sync()
self.vars._vp.sync()
delta_H = self._H
else:
delta_H = - self._H
self._H = cfg.dtype(homogeneous_external_field)
delta_H += self._H
self.vars._vp.sync()
# TODO: implement GPU version of ab initialization
# Possible set of gauges, A = [g*y*H, (1-g)*x*H, 0] with any g, 0 <= g <= 1
g = 0.5
_, yg = self.mesh.xy_a_grid
xg, _ = self.mesh.xy_b_grid
a, b = self.vars._vp.get_vec_h()
a -= g * (yg - 0.5*cfg.Ly) * delta_H
b += (1.0 - g) * (xg - 0.5*cfg.Lx) * delta_H
self.vars._vp.need_htod_sync()
self.vars._vp.sync()
def _homogeneous_external_field_delta(self, homogeneous_external_field):
self._update_vector_potential(homogeneous_external_field, reset=False)
homogeneous_external_field_delta = property(
fset = _homogeneous_external_field_delta,
doc = """Sets homogeneous external field, H, and adds to the vector
potential deltaA, satisfying curl(deltaA) = deltaH, where
deltaH = H - Hold and Hold is homogeneous external field
before update.""")
homogeneous_external_field_reset = property(
fset = _homogeneous_external_field_reset,
doc = """Sets homogeneous external field, H, and sets vector
potential, A, satisfying curl(A) = H.""")
def _update_gvpei(self):
"""Sets self.gvpei = (self.ae, self.be) + (ai, bi).
To be executed in self.external_vector_potential and self.fixed_vortices setters."""
assert (self.ae is None) == (self.be is None)
ai, bi = None, None
if self.fixed_vortices is not None and self.fixed_vortices._vpi is not None:
ai, bi = self.fixed_vortices._vpi.get_vec_h()
assert (ai is None) == (bi is None)
vpei = None
if self.ae is not None:
if ai is not None:
vpei = (self.ae + ai, self.be + bi)
else:
vpei = (self.ae, self.be)
else:
vpei = (ai, bi)
if self._vpei is not None and vpei is None:
self._vpei.free()
self._vpei = None
else:
#TODO: easier if GArray supports like for vector storage
shapes = [vpei[0].shape, vpei[1].shape]
self._vpei = GArray(shape = shapes, dtype = cfg.dtype)
self._vpei.set_vec_h(vpei[0], vpei[1])
self._vpei.sync()
def external_irregular_vector_potential_h(self):
if self._vpei is not None:
return self._vpei.get_d_obj()
return np.uintp(0)
| 31.939024 | 132 | 0.616648 |
ddd3cf7e6c6e22a81fc4f44dcb742ce19a9d4e7a | 1,570 | py | Python | src/2_save_images.py | Irio/photoshopped-or-not | 70ae1a2e7e54003d916b501f8d9e020c13ca6c98 | [
"MIT"
] | 77 | 2016-07-13T13:36:55.000Z | 2022-02-25T07:49:38.000Z | src/2_save_images.py | goldservice2017/FakeImageDetection | e7f618989d004e24444854df63d9f1c408d0463f | [
"MIT"
] | 1 | 2017-07-11T10:28:36.000Z | 2017-07-11T10:28:36.000Z | src/2_save_images.py | goldservice2017/FakeImageDetection | e7f618989d004e24444854df63d9f1c408d0463f | [
"MIT"
] | 20 | 2016-10-23T14:57:19.000Z | 2022-03-21T13:32:45.000Z | from hashlib import sha256
from helpers import load_dataset
import numpy as np
import os
import pandas as pd
import requests
import sys
import time
import urllib.request
CSV_PATH = sys.argv[1]
URL_COLUMN = sys.argv[2]
PATH = sys.argv[3]
if not os.path.exists(PATH):
os.mkdir(PATH)
dataset = load_dataset(CSV_PATH)
dataset[URL_COLUMN] = dataset[URL_COLUMN].astype(np.str).replace({'nan': None})
dataset['file_names'] = dataset[URL_COLUMN].map(url_to_file_name)
already_downloaded = dataset['file_names'].isin(os.listdir(PATH))
without_url = dataset[URL_COLUMN].isnull()
remaining_images = dataset[~(already_downloaded | without_url)]
print('Remaining: %i' % len(remaining_images))
for index, values in remaining_images.iterrows():
url = dict(values)[URL_COLUMN]
file_path = '%s/%s' % (PATH, url_to_file_name(url))
time.sleep(1)
download_image(url, file_path)
| 30.784314 | 83 | 0.66051 |
ddd71b2e4c6346f49e71518346e30e3f595d4613 | 1,169 | py | Python | lf3py/task/data.py | rog-works/lf3py | e89937f7aa133ed54d85764f06101ab9abf6b960 | [
"CNRI-Python"
] | null | null | null | lf3py/task/data.py | rog-works/lf3py | e89937f7aa133ed54d85764f06101ab9abf6b960 | [
"CNRI-Python"
] | 48 | 2020-12-19T13:47:26.000Z | 2021-01-07T22:27:56.000Z | lf3py/task/data.py | rog-works/lf3py | e89937f7aa133ed54d85764f06101ab9abf6b960 | [
"CNRI-Python"
] | null | null | null | from abc import ABCMeta, abstractmethod
from dataclasses import dataclass
from typing import Any, List, Type, TypeVar
from lf3py.lang.dsn import DSN
from lf3py.serialization.serializer import DictSerializer, Serializer
T_OBJ = TypeVar('T_OBJ')
Ok = Result()
| 21.648148 | 69 | 0.662104 |
ddd7b262ec09a987c21172c82cd032e817c1ba5b | 801 | py | Python | quapy/method/__init__.py | valgur/QuaPy | 6b1ba4886a1d64b086829306cbba689cdcfd60e8 | [
"BSD-3-Clause"
] | 34 | 2021-01-06T14:01:06.000Z | 2022-03-08T06:59:04.000Z | quapy/method/__init__.py | valgur/QuaPy | 6b1ba4886a1d64b086829306cbba689cdcfd60e8 | [
"BSD-3-Clause"
] | 4 | 2021-06-07T07:45:57.000Z | 2021-06-21T11:16:10.000Z | quapy/method/__init__.py | valgur/QuaPy | 6b1ba4886a1d64b086829306cbba689cdcfd60e8 | [
"BSD-3-Clause"
] | 6 | 2021-06-07T10:08:17.000Z | 2022-03-07T13:42:15.000Z | from . import aggregative
from . import base
from . import meta
from . import non_aggregative
EXPLICIT_LOSS_MINIMIZATION_METHODS = {
aggregative.ELM,
aggregative.SVMQ,
aggregative.SVMAE,
aggregative.SVMKLD,
aggregative.SVMRAE,
aggregative.SVMNKLD
}
AGGREGATIVE_METHODS = {
aggregative.CC,
aggregative.ACC,
aggregative.PCC,
aggregative.PACC,
aggregative.EMQ,
aggregative.HDy,
aggregative.X,
aggregative.T50,
aggregative.MAX,
aggregative.MS,
aggregative.MS2,
} | EXPLICIT_LOSS_MINIMIZATION_METHODS
NON_AGGREGATIVE_METHODS = {
non_aggregative.MaximumLikelihoodPrevalenceEstimation
}
META_METHODS = {
meta.Ensemble,
meta.QuaNet
}
QUANTIFICATION_METHODS = AGGREGATIVE_METHODS | NON_AGGREGATIVE_METHODS | META_METHODS
| 18.627907 | 85 | 0.741573 |
ddd886a1ce049e2677d31fc3b30fe240938605cc | 208 | py | Python | day 5 2nd.py | shalini-22/Letsupgrade-Python-essentials | fda98097be08dbb9bfbba6e0622954c6eba0f7f3 | [
"MIT"
] | null | null | null | day 5 2nd.py | shalini-22/Letsupgrade-Python-essentials | fda98097be08dbb9bfbba6e0622954c6eba0f7f3 | [
"MIT"
] | null | null | null | day 5 2nd.py | shalini-22/Letsupgrade-Python-essentials | fda98097be08dbb9bfbba6e0622954c6eba0f7f3 | [
"MIT"
] | null | null | null | lst_prime=list(filter(prime_num,range(1,2500)))
print(len(lst_prime)) | 26 | 48 | 0.533654 |
ddd9ee0a17827daaf5df8b02b71f681e46b3a8a2 | 916 | py | Python | OT/test_subplots.py | pine2104/Python_for_Lab | 571398c2422711d8a74f9c95a746537859458557 | [
"MIT"
] | 5 | 2022-02-03T20:10:21.000Z | 2022-03-30T08:05:10.000Z | OT/test_subplots.py | pine2104/Python_for_Lab | 571398c2422711d8a74f9c95a746537859458557 | [
"MIT"
] | null | null | null | OT/test_subplots.py | pine2104/Python_for_Lab | 571398c2422711d8a74f9c95a746537859458557 | [
"MIT"
] | null | null | null | from EM_Algorithm.gen_gauss import gen_gauss
from EM_Algorithm.gen_poisson import gen_poisson
import numpy as np
import matplotlib.pyplot as plt
x = gen_gauss([8],[2],[1000])
y = gen_poisson([1],[1000])
fig = plt.figure(figsize=(8, 8))
# Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
# the size of the marginal axes and the main axes in both directions.
# Also adjust the subplot parameters for a square plot.
gs = fig.add_gridspec(2, 2, width_ratios=(7, 2), height_ratios=(2, 7),
left=0.1, right=0.9, bottom=0.1, top=0.9,
wspace=0.05, hspace=0.05)
ax = fig.add_subplot(gs[1, 0])
ax.scatter(x, y)
ax_histx = fig.add_subplot(gs[0, 0], sharex=ax)
ax_histy = fig.add_subplot(gs[1, 1], sharey=ax)
ax_histx.hist(x, bins=10, color='grey', edgecolor="white")
ax_histy.hist(y, bins=10, orientation='horizontal', color='grey', edgecolor="white")
| 35.230769 | 84 | 0.689956 |
ddda6ce0c1f2ddd975f7aba52a0da244fa436a75 | 2,114 | py | Python | code/examples/VsevolodTymofyeyev2/example.py | TrackerSB/MasterThesis | 2792203d28d6c7b62f54545344ee6772d2ec5b64 | [
"MIT"
] | null | null | null | code/examples/VsevolodTymofyeyev2/example.py | TrackerSB/MasterThesis | 2792203d28d6c7b62f54545344ee6772d2ec5b64 | [
"MIT"
] | null | null | null | code/examples/VsevolodTymofyeyev2/example.py | TrackerSB/MasterThesis | 2792203d28d6c7b62f54545344ee6772d2ec5b64 | [
"MIT"
] | null | null | null | import os
from threading import Thread
from typing import List
from aiExchangeMessages_pb2 import SimulationID, TestResult
if __name__ == "__main__":
from AIExchangeService import get_service
from aiExchangeMessages_pb2 import SimStateResponse, Control, SimulationID, VehicleID, DataRequest
service = get_service()
# Send tests
sids = service.run_tests("test", "test", "xmls/criteriaA.dbc.xml", "xmls/environmentA.dbe.xml")
# -> Response status: 500
print("Tests sent")
# Interact with a simulation
if not sids:
exit(1)
sid = SimulationID()
sid.sid = sids.sids[0]
ego_requests = ["egoSpeed"]
ego_vehicle = Thread(target=_handle_vehicle, args=(sid, "ego", ego_requests))
ego_vehicle.start()
ego_vehicle.join()
| 33.555556 | 102 | 0.638127 |
dddd2ef86d68662ac04401af3b7b61b4ab5cb9ed | 1,916 | py | Python | rfap.py | anabanami/RFAP | 09b434d115ae1872810d65126bcbc9d7af510e89 | [
"MIT"
] | null | null | null | rfap.py | anabanami/RFAP | 09b434d115ae1872810d65126bcbc9d7af510e89 | [
"MIT"
] | null | null | null | rfap.py | anabanami/RFAP | 09b434d115ae1872810d65126bcbc9d7af510e89 | [
"MIT"
] | null | null | null | # PHS3350
# Week 2 - wave packet and RFAP -
# "what I cannot create I cannot understand" - R. Feynman.
# Ana Fabela Hinojosa, 13/03/2021
import os
from pathlib import Path
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import physunits
from scipy.fft import fft, ifft
plt.rcParams['figure.dpi'] = 200
folder = Path('wavepacket_time_evolution')
os.makedirs(folder, exist_ok=True)
os.system(f'rm {folder}/*.png')
# hbar = 1.0545718e-34 # [Js]
hbar = 1
m = 1
= 1
x_max = 10
x = np.linspace(-x_max, x_max, 1024, endpoint=False)
n = x.size
x_step = x[1] - x[0]
# oscillations per unit of space
k0 = 2 * np.pi / x_max * 5
# For Fourier space
k = 2 * np.pi * np.fft.fftfreq(n, x_step)
wave = np.exp(- x**2 / (2***2)) * np.exp(1j*k0*x)
# Square well potential
sw = np.zeros_like(x)
# depth
sw[0] = sw[-1] = 1000*k0**2
# Schrodinger equation (or first order time derivarive)
def Schrodinger_eqn(t, ):
r = np.linspace(0, x_max, 1024, endpoint=False)
K = -hbar**2/(2 * m) * ifft(-(k**2) * fft())
V = sw *
# I dunno #+ (-1j / hbar) * 1j*x**3 *
return (-1j / hbar) * (K + V)
def Runge_Kutta(t, delta_t, ):
k1 = Schrodinger_eqn(t, )
k2 = Schrodinger_eqn(t + delta_t / 2, + k1 * delta_t / 2)
k3 = Schrodinger_eqn(t + delta_t / 2, + k2 * delta_t / 2)
k4 = Schrodinger_eqn(t + delta_t, + k3 * delta_t)
return + (delta_t / 6) * (k1 + 2 * k2 + 2 * k3 + k4)
i = 0
t = 0
t_final = 5
delta_t = 0.0001
while t < t_final:
if not i % 400:
plt.plot(x, np.real(wave), label="real part")
plt.plot(x, np.imag(wave), label="imaginary part")
plt.xlim(-x_max, x_max)
plt.legend()
plt.xlabel("x")
plt.title(f"wave packet t = {i}")
plt.savefig(folder/f'{i:04d}.png')
# plt.show()
plt.clf()
wave = Runge_Kutta(t, delta_t, wave)
i += 1
t += delta_t
| 23.95 | 64 | 0.598643 |
dddf4b825918a14f6e6549a6e8c4a604f9609cd9 | 295 | py | Python | tests/test_pass.py | zaber-paul/base | 9c4d4e40db7a5059dcaa32d44be0146b6bb829c4 | [
"Apache-2.0"
] | null | null | null | tests/test_pass.py | zaber-paul/base | 9c4d4e40db7a5059dcaa32d44be0146b6bb829c4 | [
"Apache-2.0"
] | null | null | null | tests/test_pass.py | zaber-paul/base | 9c4d4e40db7a5059dcaa32d44be0146b6bb829c4 | [
"Apache-2.0"
] | null | null | null | """ run with
nosetests -v --nocapture
or
nosetests -v
"""
from builtins import object
from cloudmesh_base.util import HEADING
| 11.8 | 39 | 0.630508 |
dde27c4c382b986590140f153b007830bdfd2e36 | 3,725 | py | Python | tests/api/test_record_permissions.py | equadon/invenio-app-ils | 42ba282968d0aa28fb1bfc71d0709685165aaec4 | [
"MIT"
] | null | null | null | tests/api/test_record_permissions.py | equadon/invenio-app-ils | 42ba282968d0aa28fb1bfc71d0709685165aaec4 | [
"MIT"
] | null | null | null | tests/api/test_record_permissions.py | equadon/invenio-app-ils | 42ba282968d0aa28fb1bfc71d0709685165aaec4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2018-2019 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Test record permissions."""
from __future__ import unicode_literals
import uuid
import pytest
from flask_principal import RoleNeed, identity_loaded
from flask_security import login_user
from invenio_access.models import ActionRoles
from invenio_accounts.models import Role, User
from invenio_records.api import Record
from invenio_app_ils.records.permissions import RecordPermission, \
create_records_action
| 33.863636 | 77 | 0.633289 |
dde28d401374fdc16a1d1b838ec6fd235235e1cc | 945 | py | Python | src/Index.py | bhed01/bhed01.github.io | 132cf8e4afa05a00d71555afa2002a2d50c304c8 | [
"MIT"
] | 3 | 2020-10-16T12:26:31.000Z | 2022-02-03T18:06:35.000Z | src/Index.py | bhed01/bhed01.github.io | 132cf8e4afa05a00d71555afa2002a2d50c304c8 | [
"MIT"
] | null | null | null | src/Index.py | bhed01/bhed01.github.io | 132cf8e4afa05a00d71555afa2002a2d50c304c8 | [
"MIT"
] | null | null | null | from .components.Head import Head
from .components.NavIcons import Hamburger
from .components.Screens import HomeScreen, AboutScreen, ProjectsScreen
from .components.Footer import Footer
from .utils import JSON_DIR
from json import load
import os
| 32.586207 | 72 | 0.649735 |
dde2faa4056b42852281bc2be32673929adfef2b | 5,190 | py | Python | bisk/features/base.py | facebookresearch/bipedal-skills | edd424a8779e3a0121fb995cad00839d8226cf46 | [
"MIT"
] | 6 | 2021-11-05T16:57:58.000Z | 2022-03-16T10:34:46.000Z | bisk/features/base.py | facebookresearch/bipedal-skills | edd424a8779e3a0121fb995cad00839d8226cf46 | [
"MIT"
] | null | null | null | bisk/features/base.py | facebookresearch/bipedal-skills | edd424a8779e3a0121fb995cad00839d8226cf46 | [
"MIT"
] | 1 | 2021-11-05T16:57:47.000Z | 2021-11-05T16:57:47.000Z | # Copyright (c) 2021-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
import logging
from typing import Dict, List
import gym
import numpy as np
from dm_control import mujoco
from dm_control.mujoco.wrapper.mjbindings import enums as mjenums
from dm_control.mujoco.wrapper.mjbindings import mjlib
log = logging.getLogger(__name__)
| 36.293706 | 76 | 0.511753 |
dde38c8eda97903a41ce2ff61801ff8773e4a599 | 1,124 | py | Python | dns.py | bernd-wechner/OpenWRT-Tools | 4ca2eb0d0774e0d97b48a485fa18a4d4bbc3f108 | [
"Unlicense"
] | 3 | 2017-06-12T11:03:56.000Z | 2021-04-11T20:09:47.000Z | dns.py | bernd-wechner/OpenWRT-Tools | 4ca2eb0d0774e0d97b48a485fa18a4d4bbc3f108 | [
"Unlicense"
] | null | null | null | dns.py | bernd-wechner/OpenWRT-Tools | 4ca2eb0d0774e0d97b48a485fa18a4d4bbc3f108 | [
"Unlicense"
] | 1 | 2021-08-25T02:39:40.000Z | 2021-08-25T02:39:40.000Z | #!/usr/bin/python
#
# identify the DNS servers used on the WAN interface.
#
# Tries to find the owner of the DNS as well.
#
# DNS spoofing is one of the entry points for malware. I haven't seen it since I dumped
# Windows at home but in past have seen malware that would change the DNS config on
# the router. Kind of hand to see a name attached to the IP addresses then and most of
# us wouldn't recognize an IP address, will recognize a name.
import json, subprocess, os
devnull = open(os.devnull, 'w')
wan_status = get_wan_status()
if wan_status:
dns_servers = wan_status["dns-server"]
print "DNS Servers on WAN interface:"
n = 1
for dns in dns_servers:
owner = get_owner(dns)
print "\tDNS %d: %s\t%s" % (n, dns, owner)
n += 1 | 30.378378 | 109 | 0.669929 |
dde5915014c5c7fff2dcda09f7e0ecc75334cecc | 398 | py | Python | downloadHSfiles.py | McDowellLab/downloadNEON | b43d47d40cbf0e168dfa307969687025e3f5fa34 | [
"MIT"
] | null | null | null | downloadHSfiles.py | McDowellLab/downloadNEON | b43d47d40cbf0e168dfa307969687025e3f5fa34 | [
"MIT"
] | null | null | null | downloadHSfiles.py | McDowellLab/downloadNEON | b43d47d40cbf0e168dfa307969687025e3f5fa34 | [
"MIT"
] | null | null | null | from hs_restclient import HydroShare, HydroShareAuthBasic
# Download LCZO sesnor database from Hydroshare
# link to the Hydroshare resource https://www.hydroshare.org/resource/b38bc00887ec45ac9499f9dea45eb8d5/
auth = HydroShareAuthBasic(username="miguelcleon", password = "x")
hs = HydroShare(auth = auth)
hs.getResource('b38bc00887ec45ac9499f9dea45eb8d5', destination='./lczodata', unzip=True)
| 39.8 | 103 | 0.81407 |
dde641d979074c8c01c9f5c1fbef8f55228ae8fe | 339 | py | Python | protonfixes/gamefixes/287260.py | bmaupin/protonfixes | 9fc87a9a487d7dfbd0c602a079f3b026f8a84638 | [
"BSD-2-Clause"
] | 213 | 2018-10-06T01:40:26.000Z | 2022-03-16T16:17:37.000Z | protonfixes/gamefixes/287260.py | bmaupin/protonfixes | 9fc87a9a487d7dfbd0c602a079f3b026f8a84638 | [
"BSD-2-Clause"
] | 88 | 2018-10-06T17:38:56.000Z | 2022-02-19T13:27:26.000Z | protonfixes/gamefixes/287260.py | bmaupin/protonfixes | 9fc87a9a487d7dfbd0c602a079f3b026f8a84638 | [
"BSD-2-Clause"
] | 67 | 2018-10-09T16:57:16.000Z | 2022-03-14T13:06:25.000Z | """ Game fix for Toybox Turbos
"""
#pylint: disable=C0103
from protonfixes import util
from protonfixes.logger import log
def main():
""" Changes the proton argument from the launcher to the game
"""
log('Applying fixes for Toybox Turbos')
# Fix infinite startup screen
util.set_environment('PROTON_NO_ESYNC', '1')
| 21.1875 | 65 | 0.707965 |
dde74c584ce2a956ca4842502658de5be6e68e74 | 4,035 | py | Python | systrade/models/strategies.py | pdghawk/systrade | 2200b950a3172f22a424c9e547aa6fa982f54c46 | [
"BSD-3-Clause"
] | 1 | 2022-02-09T20:16:51.000Z | 2022-02-09T20:16:51.000Z | systrade/models/strategies.py | pdghawk/systrade | 2200b950a3172f22a424c9e547aa6fa982f54c46 | [
"BSD-3-Clause"
] | null | null | null | systrade/models/strategies.py | pdghawk/systrade | 2200b950a3172f22a424c9e547aa6fa982f54c46 | [
"BSD-3-Clause"
] | null | null | null | """ The strategies module provides utilities for designing trading strategies
Notes
------
All strategies should inherit from BaseStrategy, and provide a get_order_list
method. For details of the requirements of this method, see its docstring in
base/BaseStrategy, or in the method within SimpleStrategy in this module.
"""
import copy
import pandas as pd
import numpy as np
import time as timer
import matplotlib.pyplot as plt
from .base import BaseStrategy
# TODO: extend strategy types
# examples - how the signals are combined - could be done in many ways
# should check whether indivdual positions should be exited (been held at a loss too long)
# eg buy and simultaneously do a sell_limit for same_quantity or something
# portfolio re-optimization movements (modern portfolio theory) n.b maximise
# expected returns whilst minimising the portoflio variance
| 44.340659 | 91 | 0.601983 |
dde864bb1233daa956ab699eaa628d9606c4448f | 144 | py | Python | ParkFinder/Parks/migrations/__init__.py | Krause2023/CS224-ParkFinder-Web_App | ccfa2faf61d6adb300f319ae11dd983483451410 | [
"MIT"
] | 1 | 2021-12-22T16:00:36.000Z | 2021-12-22T16:00:36.000Z | ParkFinder/Parks/migrations/__init__.py | Krause2023/CS224-ParkFinder-Web_App | ccfa2faf61d6adb300f319ae11dd983483451410 | [
"MIT"
] | null | null | null | ParkFinder/Parks/migrations/__init__.py | Krause2023/CS224-ParkFinder-Web_App | ccfa2faf61d6adb300f319ae11dd983483451410 | [
"MIT"
] | null | null | null | # Create your migrations here.
# WILL USE THIS LATER IF/WHEN YOU CREATE A DATABASE AND USER ACCOUNTS - THIS MAY BE IN A DIFFERENT APP AS WELL!!! | 72 | 113 | 0.756944 |
ddea0dbcc4c809d7b5a35e5b2781bf028ff2f764 | 140 | py | Python | tests/framework/Optimizers/Infinite/infinite.py | milljm/raven | 5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b | [
"Apache-2.0"
] | 2 | 2019-10-11T15:59:10.000Z | 2021-04-08T18:23:57.000Z | tests/framework/Optimizers/Infinite/infinite.py | milljm/raven | 5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b | [
"Apache-2.0"
] | 1 | 2018-03-27T13:06:00.000Z | 2018-03-27T13:06:00.000Z | tests/framework/Optimizers/Infinite/infinite.py | milljm/raven | 5f29fe81b75e2ffbeb54a55aa63647e7b2f6457b | [
"Apache-2.0"
] | 1 | 2017-08-29T16:09:13.000Z | 2017-08-29T16:09:13.000Z | import numpy as np
| 15.555556 | 39 | 0.592857 |
ddea413a66b41dee24d47cda24474b0ccba4f292 | 1,955 | py | Python | pyinterfaces/enum/ENUM_USAGE.py | OaklandPeters/pyinterfaces | c60efaad92e8d2e1ec25df718dfb43f034a083bb | [
"MIT"
] | null | null | null | pyinterfaces/enum/ENUM_USAGE.py | OaklandPeters/pyinterfaces | c60efaad92e8d2e1ec25df718dfb43f034a083bb | [
"MIT"
] | null | null | null | pyinterfaces/enum/ENUM_USAGE.py | OaklandPeters/pyinterfaces | c60efaad92e8d2e1ec25df718dfb43f034a083bb | [
"MIT"
] | null | null | null | """
Provides example of how I would like `enum` to be used.
Implementation details:
(1) Uses Metaclass for two reasons:
(1.1) So that the subclasses can be iterable (we want class objects, not instance objects)
(1.2) To automatically collect Enumeratee
enum.Enumerator
EnumSet, Partition, Basis
Product of all possibilities...
An alternative term, mathematically correct, but misleading, would be 'Partition'
Another alternative term: Basis (as of, group of independent vectors). Note, a basis is a 'linearly independent spanning set'.
enum.Enumeratee
EnumCase, Dimension
@todo: Handle issue: How are the cases ordered?
In Python 3, there is a mechanism for preserving the order of metaclasses.
(see https://www.python.org/dev/peps/pep-3115/)
"""
import ENUM_STUBS as enum
# Cases in Enumerator: Directly referencable via attribute name
str(Beatles.John) == "John Lennon"
# Cases: equality based on descriptor's return
Beatles.John == "John Lennon"
Beatles.Paul == "Paul McCartney"
Beatles.George == "George Harrison"
Beatles.Ringo == "Ringo Starr"
# Iterable: returns cases
iterator = iter(Beatles)
iterator.next() == Beatles.John
iterator.next() == Beatles.George
iterator.next() == Beatles.Paul
iterator.next() == Beatles.Ringo
# Cases: returns case methods with names
# ... no particular order imposed here
("John", Beatles.John)
Beatles.cases == (("John", "John Lennon"))
| 29.621212 | 130 | 0.707417 |
ddebffcf3d40adc0208ac8b35c967b6d0551178a | 38,079 | py | Python | port/platform/common/automation/u_utils.py | stephanboner/ubxlib | 64025c5760771ac2accd09f9f176693c7add2919 | [
"Apache-2.0"
] | null | null | null | port/platform/common/automation/u_utils.py | stephanboner/ubxlib | 64025c5760771ac2accd09f9f176693c7add2919 | [
"Apache-2.0"
] | null | null | null | port/platform/common/automation/u_utils.py | stephanboner/ubxlib | 64025c5760771ac2accd09f9f176693c7add2919 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
'''Generally useful bits and bobs.'''
import queue # For PrintThread and exe_run
from time import sleep, time, gmtime, strftime # For lock timeout, exe_run timeout and logging
import threading # For PrintThread
import os # For ChangeDir, has_admin
import stat # To help deltree out
from telnetlib import Telnet # For talking to JLink server
import socket
import shutil # To delete a directory tree
import signal # For CTRL_C_EVENT
import subprocess
import platform # Figure out current OS
import serial # Pyserial (make sure to do pip install pyserial)
import psutil # For killing things (make sure to do pip install psutil)
import u_settings
# How long to wait for an install lock in seconds
INSTALL_LOCK_WAIT_SECONDS = u_settings.INSTALL_LOCK_WAIT_SECONDS #(60 * 60)
# The URL for Unity, the unit test framework
UNITY_URL = u_settings.UNITY_URL #"https://github.com/ThrowTheSwitch/Unity"
# The sub-directory that Unity is usually put in
# (off the working directory)
UNITY_SUBDIR = u_settings.UNITY_SUBDIR #"Unity"
# The path to DevCon, a Windows tool that allows
# USB devices to be reset, amongst other things
DEVCON_PATH = u_settings.DEVCON_PATH #"devcon.exe"
# The path to jlink.exe (or just the name 'cos it's on the path)
JLINK_PATH = u_settings.JLINK_PATH #"jlink.exe"
# The port number for SWO trace capture out of JLink
JLINK_SWO_PORT = u_settings.JLINK_SWO_PORT #19021
# The port number for GDB control of ST-LINK GDB server
STLINK_GDB_PORT = u_settings.STLINK_GDB_PORT #61200
# The port number for SWO trace capture out of ST-LINK GDB server
STLINK_SWO_PORT = u_settings.STLINK_SWO_PORT #61300
# The format string passed to strftime()
# for logging prints
TIME_FORMAT = u_settings.TIME_FORMAT #"%Y-%m-%d_%H:%M:%S"
# The default guard time waiting for a platform lock in seconds
PLATFORM_LOCK_GUARD_TIME_SECONDS = u_settings.PLATFORM_LOCK_GUARD_TIME_SECONDS #60 * 60
# The default guard time for downloading to a target in seconds
DOWNLOAD_GUARD_TIME_SECONDS = u_settings.DOWNLOAD_GUARD_TIME_SECONDS #60
# The default guard time for running tests in seconds
RUN_GUARD_TIME_SECONDS = u_settings.RUN_GUARD_TIME_SECONDS #60 * 60
# The default inactivity timer for running tests in seconds
RUN_INACTIVITY_TIME_SECONDS = u_settings.RUN_INACTIVITY_TIME_SECONDS #60 * 5
# The name of the #define that forms the filter string
# for which tests to run
FILTER_MACRO_NAME = u_settings.FILTER_MACRO_NAME #"U_CFG_APP_FILTER"
# The time for which to wait for something from the
# queue in exe_run(). If this is too short, in a
# multiprocessing world or on a slow machine, it is
# possible to miss things as the task putting things
# on the queue may be blocked from doing so until
# we've decided the queue has been completely emptied
# and moved on
EXE_RUN_QUEUE_WAIT_SECONDS = u_settings.EXE_RUN_QUEUE_WAIT_SECONDS #1
def subprocess_osify(cmd):
''' expects an array of strings being [command, param, ...] '''
if platform.system() == "Linux":
return [ ' '.join(cmd) ]
return cmd
def get_actual_path(path):
'''Given a drive number return real path if it is a subst'''
actual_path = path
# Get a list of substs
text = subprocess.check_output("subst",
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
# Lines should look like this:
# Z:\: => C:\projects\ubxlib_priv
# So, in this example, if we were given z:\blah
# then the actual path should be C:\projects\ubxlib_priv\blah
text = line.decode()
bits = text.rsplit(": => ")
if (len(bits) > 1) and (len(path) > 1) and \
(bits[0].lower()[0:2] == path[0:2].lower()):
actual_path = bits[1] + path[2:]
break
return actual_path
def get_instance_text(instance):
'''Return the instance as a text string'''
instance_text = ""
for idx, item in enumerate(instance):
if idx == 0:
instance_text += str(item)
else:
instance_text += "." + str(item)
return instance_text
def remove_readonly(func, path, exec_info):
'''Help deltree out'''
del exec_info
os.chmod(path, stat.S_IWRITE)
func(path)
def deltree(directory, printer, prompt):
'''Remove an entire directory tree'''
tries = 2
success = False
if os.path.isdir(directory):
# Retry this as sometimes Windows complains
# that the directory is not empty when it
# it really should be, some sort of internal
# Windows race condition
while not success and (tries > 0):
try:
# Need the onerror bit on Winders, seek
# this Stack Overflow post:
# https://stackoverflow.com/questions/1889597/deleting-directory-in-python
shutil.rmtree(directory, onerror=remove_readonly)
success = True
except OSError as ex:
printer.string("{}ERROR unable to delete \"{}\" {}: \"{}\"".
format(prompt, directory,
ex.errno, ex.strerror))
tries -= 1
else:
success = True
return success
# Check if admin privileges are available, from:
# https://stackoverflow.com/questions/2946746/python-checking-if-a-user-has-administrator-privileges
def has_admin():
'''Check for administrator privileges'''
admin = False
if os.name == 'nt':
try:
# only Windows users with admin privileges can read the C:\windows\temp
if os.listdir(os.sep.join([os.environ.get("SystemRoot", "C:\\windows"), "temp"])):
admin = True
except PermissionError:
pass
else:
# Pylint will complain about the following line but
# that's OK, it is only executed if we're NOT on Windows
# and there the geteuid() method will exist
if "SUDO_USER" in os.environ and os.geteuid() == 0:
admin = True
return admin
# Reset a USB port with the given Device Description
def usb_reset(device_description, printer, prompt):
''' Reset a device'''
instance_id = None
found = False
success = False
try:
# Run devcon and parse the output to find the given device
printer.string("{}running {} to look for \"{}\"...". \
format(prompt, DEVCON_PATH, device_description))
cmd = [DEVCON_PATH, "hwids", "=ports"]
text = subprocess.check_output(subprocess_osify(cmd),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
# The format of a devcon entry is this:
#
# USB\VID_1366&PID_1015&MI_00\6&38E81674&0&0000
# Name: JLink CDC UART Port (COM45)
# Hardware IDs:
# USB\VID_1366&PID_1015&REV_0100&MI_00
# USB\VID_1366&PID_1015&MI_00
# Compatible IDs:
# USB\Class_02&SubClass_02&Prot_00
# USB\Class_02&SubClass_02
# USB\Class_02
#
# Grab what we hope is the instance ID
line = line.decode()
if line.startswith("USB"):
instance_id = line
else:
# If the next line is the Name we want then we're done
if instance_id and ("Name: " + device_description in line):
found = True
printer.string("{}\"{}\" found with instance ID \"{}\"". \
format(prompt, device_description,
instance_id))
break
instance_id = None
if found:
# Now run devcon to reset the device
printer.string("{}running {} to reset device \"{}\"...". \
format(prompt, DEVCON_PATH, instance_id))
cmd = [DEVCON_PATH, "restart", "@" + instance_id]
text = subprocess.check_output(subprocess_osify(cmd),
stderr=subprocess.STDOUT,
shell=False) # Has to be False or devcon won't work
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
success = True
else:
printer.string("{}device with description \"{}\" not found.". \
format(prompt, device_description))
except subprocess.CalledProcessError:
printer.string("{} unable to find and reset device.".format(prompt))
return success
# Open the required serial port.
def open_serial(serial_name, speed, printer, prompt):
'''Open serial port'''
serial_handle = None
text = "{}: trying to open \"{}\" as a serial port...". \
format(prompt, serial_name)
try:
return_value = serial.Serial(serial_name, speed, timeout=0.05)
serial_handle = return_value
printer.string("{} opened.".format(text))
except (ValueError, serial.SerialException) as ex:
printer.string("{}{} while accessing port {}: {}.".
format(prompt, type(ex).__name__,
serial_handle.name, str(ex)))
return serial_handle
def open_telnet(port_number, printer, prompt):
'''Open telnet port on localhost'''
telnet_handle = None
text = "{}trying to open \"{}\" as a telnet port on localhost...". \
format(prompt, port_number)
try:
telnet_handle = Telnet("localhost", int(port_number), timeout=5)
if telnet_handle is not None:
printer.string("{} opened.".format(text))
else:
printer.string("{} failed.".format(text))
except (socket.error, socket.timeout, ValueError) as ex:
printer.string("{}{} failed to open telnet {}: {}.".
format(prompt, type(ex).__name__,
port_number, str(ex)))
return telnet_handle
def install_lock_acquire(install_lock, printer, prompt):
'''Attempt to acquire install lock'''
timeout_seconds = INSTALL_LOCK_WAIT_SECONDS
success = False
if install_lock:
printer.string("{}waiting for install lock...".format(prompt))
while not install_lock.acquire(False) and (timeout_seconds > 0):
sleep(1)
timeout_seconds -= 1
if timeout_seconds > 0:
printer.string("{}got install lock.".format(prompt))
success = True
else:
printer.string("{}failed to aquire install lock.".format(prompt))
else:
printer.string("{}warning, there is no install lock.".format(prompt))
return success
def install_lock_release(install_lock, printer, prompt):
'''Release install lock'''
if install_lock:
install_lock.release()
printer.string("{}install lock released.".format(prompt))
def fetch_repo(url, directory, branch, printer, prompt):
'''Fetch a repo: directory can be relative or absolute'''
got_code = False
checked_out = False
success = False
printer.string("{}in directory {}, fetching"
" {} to directory {}".format(prompt, os.getcwd(),
url, directory))
if not branch:
branch = "master"
if os.path.isdir(directory):
# Update existing code
with ChangeDir(directory):
printer.string("{}updating code in {}...".
format(prompt, directory))
try:
text = subprocess.check_output(subprocess_osify(["git", "pull",
"origin", branch]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
got_code = True
except subprocess.CalledProcessError as error:
printer.string("{}git returned error {}: \"{}\"".
format(prompt, error.returncode,
error.output))
else:
# Clone the repo
printer.string("{}cloning from {} into {}...".
format(prompt, url, directory))
try:
text = subprocess.check_output(subprocess_osify(["git", "clone", url, directory]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
got_code = True
except subprocess.CalledProcessError as error:
printer.string("{}git returned error {}: \"{}\"".
format(prompt, error.returncode,
error.output))
if got_code and os.path.isdir(directory):
# Check out the correct branch and recurse submodules
with ChangeDir(directory):
printer.string("{}checking out branch {}...".
format(prompt, branch))
try:
text = subprocess.check_output(subprocess_osify(["git", "-c",
"advice.detachedHead=false",
"checkout",
"origin/" + branch]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
checked_out = True
except subprocess.CalledProcessError as error:
printer.string("{}git returned error {}: \"{}\"".
format(prompt, error.returncode,
error.output))
if checked_out:
printer.string("{}recursing sub-modules (can take some time" \
" and gives no feedback).".format(prompt))
try:
text = subprocess.check_output(subprocess_osify(["git", "submodule",
"update", "--init",
"--recursive"]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
success = True
except subprocess.CalledProcessError as error:
printer.string("{}git returned error {}: \"{}\"".
format(prompt, error.returncode,
error.output))
return success
def exe_where(exe_name, help_text, printer, prompt):
'''Find an executable using where.exe or which on linux'''
success = False
try:
printer.string("{}looking for \"{}\"...". \
format(prompt, exe_name))
# See here:
# https://stackoverflow.com/questions/14928860/passing-double-quote-shell-commands-in-python-to-subprocess-popen
# ...for why the construction "".join() is necessary when
# passing things which might have spaces in them.
# It is the only thing that works.
if platform.system() == "Linux":
cmd = ["which {}".format(exe_name)]
printer.string("{}detected linux, calling \"{}\"...".format(prompt, cmd))
else:
cmd = ["where", "".join(exe_name)]
printer.string("{}detected nonlinux, calling \"{}\"...".format(prompt, cmd))
text = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{} found in {}".format(prompt, exe_name,
line.decode()))
success = True
except subprocess.CalledProcessError:
if help_text:
printer.string("{}ERROR {} not found: {}". \
format(prompt, exe_name, help_text))
else:
printer.string("{}ERROR {} not found". \
format(prompt, exe_name))
return success
def exe_version(exe_name, version_switch, printer, prompt):
'''Print the version of a given executable'''
success = False
if not version_switch:
version_switch = "--version"
try:
text = subprocess.check_output(subprocess_osify(["".join(exe_name), version_switch]),
stderr=subprocess.STDOUT,
shell=True) # Jenkins hangs without this
for line in text.splitlines():
printer.string("{}{}".format(prompt, line.decode()))
success = True
except subprocess.CalledProcessError:
printer.string("{}ERROR {} either not found or didn't like {}". \
format(prompt, exe_name, version_switch))
return success
def exe_terminate(process_pid):
'''Jonathan's killer'''
process = psutil.Process(process_pid)
for proc in process.children(recursive=True):
proc.terminate()
process.terminate()
def read_from_process_and_queue(process, read_queue):
'''Read from a process, non-blocking'''
while process.poll() is None:
string = process.stdout.readline().decode()
if string:
read_queue.put(string)
def queue_get_no_exception(the_queue, block=True, timeout=None):
'''A version of queue.get() that doesn't throw an Empty exception'''
thing = None
try:
thing = the_queue.get(block=block, timeout=timeout)
except queue.Empty:
pass
return thing
def capture_env_var(line, env, printer, prompt):
'''A bit of exe_run that needs to be called from two places'''
# Find a KEY=VALUE bit in the line,
# parse it out and put it in the dictionary
# we were given
pair = line.split('=', 1)
if len(pair) == 2:
env[pair[0]] = pair[1].rstrip()
else:
printer.string("{}WARNING: not an environment variable: \"{}\"".
format(prompt, line))
# Note: if returned_env is given then "set"
# will be executed after the exe and the environment
# variables will be returned in it. The down-side
# of this is that the return value of the exe is,
# of course, lost.
def exe_run(call_list, guard_time_seconds, printer, prompt,
shell_cmd=False, set_env=None, returned_env=None):
'''Call an executable, printing out what it does'''
success = False
start_time = time()
flibbling = False
kill_time = None
read_time = start_time
if returned_env is not None:
# The caller wants the environment after the
# command has run, so, from this post:
# https://stackoverflow.com/questions/1214496/how-to-get-environment-from-a-subprocess
# append a tag that we can detect
# to the command and then call set,
# from which we can parse the environment
call_list.append("&&")
call_list.append("echo")
call_list.append("flibble")
call_list.append("&&")
call_list.append("set")
# I've seen output from set get lost,
# possibly because the process ending
# is asynchronous with stdout,
# so add a delay here as well
call_list.append("&&")
call_list.append("sleep")
call_list.append("2")
try:
# Call the thang
# Note: used to have bufsize=1 here but it turns out
# that is ignored 'cos the output is considered
# binary. Seems to work in any case, I guess
# Winders, at least, is in any case line-buffered.
process = subprocess.Popen(call_list,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=shell_cmd,
env=set_env)
printer.string("{}{}, pid {} started with guard time {} second(s)". \
format(prompt, call_list[0], process.pid,
guard_time_seconds))
# This is over complex but, unfortunately, necessary.
# At least one thing that we try to run, nrfjprog, can
# crash silently: just hangs and sends no output. However
# it also doesn't flush and close stdout and so read(1)
# will hang, meaning we can't read its output as a means
# to check that it has hung.
# So, here we poll for the return value, which is normally
# how things will end, and we start another thread which
# reads from the process's stdout. If the thread sees
# nothing for guard_time_seconds then we terminate the
# process.
read_queue = queue.Queue()
read_thread = threading.Thread(target=read_from_process_and_queue,
args=(process, read_queue))
read_thread.start()
while process.poll() is None:
if guard_time_seconds and (kill_time is None) and \
((time() - start_time > guard_time_seconds) or
(time() - read_time > guard_time_seconds)):
kill_time = time()
printer.string("{}guard time of {} second(s)." \
" expired, stopping {}...".
format(prompt, guard_time_seconds,
call_list[0]))
exe_terminate(process.pid)
line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS)
read_time = time()
while line is not None:
line = line.rstrip()
if flibbling:
capture_env_var(line, returned_env, printer, prompt)
else:
if returned_env is not None and "flibble" in line:
flibbling = True
else:
printer.string("{}{}".format(prompt, line))
line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS)
read_time = time()
# Can't join() read_thread here as it might have
# blocked on a read() (if nrfjprog has anything to
# do with it). It will be tidied up when this process
# exits.
# There may still be stuff on the queue, read it out here
line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS)
while line is not None:
line = line.rstrip()
if flibbling:
capture_env_var(line, returned_env, printer, prompt)
else:
if returned_env is not None and "flibble" in line:
flibbling = True
else:
printer.string("{}{}".format(prompt, line))
line = queue_get_no_exception(read_queue, True, EXE_RUN_QUEUE_WAIT_SECONDS)
# There may still be stuff in the buffer after
# the application has finished running so flush that
# out here
line = process.stdout.readline().decode()
while line:
line = line.rstrip()
if flibbling:
capture_env_var(line, returned_env, printer, prompt)
else:
if returned_env is not None and "flibble" in line:
flibbling = True
else:
printer.string("{}{}".format(prompt, line))
line = process.stdout.readline().decode()
if (process.poll() == 0) and kill_time is None:
success = True
printer.string("{}{}, pid {} ended with return value {}.". \
format(prompt, call_list[0],
process.pid, process.poll()))
except ValueError as ex:
printer.string("{}failed: {} while trying to execute {}.". \
format(prompt, type(ex).__name__, str(ex)))
return success
# Simple SWO decoder: only handles single bytes of application
# data at a time, i.e. what ITM_SendChar() sends.
# This stolen from here:
# https://stackoverflow.com/questions/431684/how-do-i-change-the-working-directory-in-python
def wait_for_completion(list, purpose, guard_time_seconds,
printer, prompt):
'''Wait for a completion list to empty'''
completed = False
if len(list) > 0:
timeout_seconds = guard_time_seconds
printer.string("{}waiting up to {} second(s)" \
" for {} completion...". \
format(prompt, guard_time_seconds, purpose))
count = 0
while (len(list) > 0) and \
((guard_time_seconds == 0) or (timeout_seconds > 0)):
sleep(1)
timeout_seconds -= 1
count += 1
if count == 30:
list_text = ""
for item in list:
if list_text:
list_text += ", "
list_text += str(item)
printer.string("{}still waiting {} second(s)" \
" for {} to complete (waiting" \
" for {}).". \
format(prompt, timeout_seconds,
purpose, list_text))
count = 0
if len(list) == 0:
completed = True
printer.string("{}{} completed.".format(prompt, purpose))
return completed
def reset_nrf_target(connection, printer, prompt):
'''Reset a Nordic NRFxxx target'''
call_list = []
printer.string("{}resetting target...".format(prompt))
# Assemble the call list
call_list.append("nrfjprog")
call_list.append("--reset")
if connection and "debugger" in connection and connection["debugger"]:
call_list.append("-s")
call_list.append(connection["debugger"])
# Print what we're gonna do
tmp = ""
for item in call_list:
tmp += " " + item
printer.string("{}in directory {} calling{}". \
format(prompt, os.getcwd(), tmp))
# Call it
return exe_run(call_list, 60, printer, prompt)
| 42.930101 | 120 | 0.536437 |
ddee87075899569423a1b5f9ff3b0f5185a1f91c | 3,677 | py | Python | noname.py | schellenberg/lyric-grabber-for-plex | f449b480d4c234ff9d358fc7d9a0e729e9ed45e5 | [
"MIT"
] | 26 | 2019-09-30T02:29:05.000Z | 2022-01-17T20:10:54.000Z | noname.py | schellenberg/lyric-grabber-for-plex | f449b480d4c234ff9d358fc7d9a0e729e9ed45e5 | [
"MIT"
] | 5 | 2019-10-02T18:53:48.000Z | 2020-05-06T14:07:25.000Z | noname.py | schellenberg/lyric-grabber-for-plex | f449b480d4c234ff9d358fc7d9a0e729e9ed45e5 | [
"MIT"
] | 6 | 2019-09-30T13:04:22.000Z | 2022-03-24T17:47:16.000Z | # -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Oct 26 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class frameMain
###########################################################################
| 39.537634 | 213 | 0.689421 |
ddefc795048cf6d437ec89f16bc748675c74df14 | 1,371 | py | Python | feature_extraction/cli.py | widoptimization-willett/feature-extraction | 25e963e3383673aad6aedfd504e69a1df7f47f9a | [
"Apache-2.0"
] | null | null | null | feature_extraction/cli.py | widoptimization-willett/feature-extraction | 25e963e3383673aad6aedfd504e69a1df7f47f9a | [
"Apache-2.0"
] | null | null | null | feature_extraction/cli.py | widoptimization-willett/feature-extraction | 25e963e3383673aad6aedfd504e69a1df7f47f9a | [
"Apache-2.0"
] | null | null | null | import json
import click
from tqdm import tqdm
import numpy as np
from PIL import Image
from skimage.exposure import rescale_intensity
from . import extraction, pipeline
np.seterr(all='raise')
| 32.642857 | 106 | 0.745441 |
ddeff9d5eb6e649b509f4b345051a62872b3798f | 186 | py | Python | homepage/templatetags/infos_tags.py | phodal/phodaldev | b5a48339a21b5674a70d284a85ef8c45e010fe43 | [
"MIT"
] | 94 | 2015-01-28T15:46:02.000Z | 2020-11-02T12:56:15.000Z | homepage/templatetags/infos_tags.py | phodal/phodaldev | b5a48339a21b5674a70d284a85ef8c45e010fe43 | [
"MIT"
] | 56 | 2015-04-05T03:18:41.000Z | 2021-08-29T00:50:57.000Z | homepage/templatetags/infos_tags.py | phodal/phodaldev | b5a48339a21b5674a70d284a85ef8c45e010fe43 | [
"MIT"
] | 38 | 2015-08-26T08:10:12.000Z | 2021-06-11T19:36:31.000Z | from mezzanine import template
from homepage.models import Info
register = template.Library()
| 18.6 | 42 | 0.747312 |
ddf10fa882b2377b78f180954bd012323f534965 | 514 | py | Python | test/unit/test_main.py | CMPUT291PROJECT1F18/Mini-Project-1 | b58144dd80c40466de755877b7c3996f4aa67af9 | [
"MIT"
] | 1 | 2018-11-06T01:04:13.000Z | 2018-11-06T01:04:13.000Z | test/unit/test_main.py | CMPUT291PROJECT1F18/Mini-Project-1 | b58144dd80c40466de755877b7c3996f4aa67af9 | [
"MIT"
] | 39 | 2018-10-23T00:28:13.000Z | 2018-11-06T16:14:56.000Z | test/unit/test_main.py | CMPUT291PROJECT1F18/Mini-Project-1 | b58144dd80c40466de755877b7c3996f4aa67af9 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""pytests for :mod:`.__main__`"""
from mini_project_1.__main__ import get_parser, main
import mock
| 23.363636 | 93 | 0.680934 |
ddf357f8f64530a6e0a779ab33c258cb0322ac3e | 189 | py | Python | kafka/1/consumer/kaktatest.py | adriancarriger/experiments | 7e4248592dc8fbb08522c9b5f0393c80dc7e2699 | [
"MIT"
] | 1 | 2021-06-22T13:38:36.000Z | 2021-06-22T13:38:36.000Z | kafka/1/consumer/kaktatest.py | adriancarriger/experiments | 7e4248592dc8fbb08522c9b5f0393c80dc7e2699 | [
"MIT"
] | 108 | 2019-05-23T16:12:32.000Z | 2020-09-04T15:47:33.000Z | kafka/1/consumer/kaktatest.py | adriancarriger/experiments | 7e4248592dc8fbb08522c9b5f0393c80dc7e2699 | [
"MIT"
] | null | null | null | from kafka import KafkaConsumer
consumer = KafkaConsumer('myTestTopic', bootstrap_servers='localhost:9092')
for item in consumer:
print("The Message is :", item)
# https://kafka-1:9092
| 31.5 | 75 | 0.756614 |
ddf3aa22469fb9543ce65c54187ad310deed6e44 | 67,701 | py | Python | src/genie/libs/parser/iosxe/tests/ShowIpBgpDetail/cli/equal/golden_output2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/iosxe/tests/ShowIpBgpDetail/cli/equal/golden_output2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/iosxe/tests/ShowIpBgpDetail/cli/equal/golden_output2_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"instance": {
"default": {
"vrf": {
"L3VPN-0050": {
"address_family": {
"vpnv4": {
"default_vrf": "L3VPN-0050",
"prefixes": {
"10.4.1.0/24": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2467",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933597",
},
"10.44.105.0/24": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6620",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933498",
},
"172.16.100.10/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2904",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933541",
},
"172.16.100.11/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2903",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933540",
},
"172.16.100.12/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2901",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933539",
},
"172.16.100.13/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5466",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933538",
},
"172.16.100.14/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5465",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933537",
},
"172.16.100.15/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5464",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933536",
},
"172.16.100.16/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2914",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933551",
},
"172.16.100.17/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2913",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933550",
},
"172.16.100.18/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2912",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933549",
},
"172.16.100.19/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2911",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933548",
},
"172.16.100.2/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"community": "62000:2",
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2468",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933598",
},
"172.16.100.20/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2910",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933547",
},
"172.16.100.21/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2909",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933546",
},
"172.16.100.22/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2908",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933545",
},
"172.16.100.23/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2907",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933544",
},
"172.16.100.24/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2923",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933559",
},
"172.16.100.25/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2922",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933558",
},
"172.16.100.26/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2920",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933557",
},
"172.16.100.27/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2919",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933556",
},
"172.16.100.28/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2918",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933555",
},
"172.16.100.29/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2917",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933554",
},
"172.16.100.3/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5463",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933535",
},
"172.16.100.30/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2916",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933553",
},
"172.16.100.31/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2915",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933552",
},
"172.16.100.32/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6630",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933506",
},
"172.16.100.33/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6629",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933505",
},
"172.16.100.34/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6627",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933504",
},
"172.16.100.35/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6626",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933503",
},
"172.16.100.36/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6625",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933502",
},
"172.16.100.37/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6624",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933501",
},
"172.16.100.38/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "6623",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933500",
},
"172.16.100.4/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5462",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933534",
},
"172.16.100.5/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5461",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933533",
},
"172.16.100.6/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5460",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933532",
},
"172.16.100.7/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "5459",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933531",
},
"172.16.100.8/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2906",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933543",
},
"172.16.100.9/32": {
"available_path": "1",
"best_path": "1",
"index": {
1: {
"ext_community": "RT:5918:50",
"gateway": "192.168.10.253",
"localpref": 100,
"metric": 0,
"mpls_labels": {
"in": "2905",
"out": "nolabel",
},
"next_hop": "192.168.10.253",
"next_hop_via": "vrf L3VPN-0050",
"origin_codes": "?",
"originator": "192.168.10.253",
"recipient_pathid": "0",
"refresh_epoch": 1,
"route_info": "62000",
"status_codes": "*>",
"transfer_pathid": "0x0",
"update_group": 724,
}
},
"paths": "1 available, best #1, table L3VPN-0050",
"table_version": "16933542",
},
},
"route_distinguisher": "5918:50",
}
}
}
}
}
}
}
| 60.936994 | 86 | 0.203084 |
ddf4a85d16651173e5cecae5af1d1cdea3c48b47 | 154 | py | Python | pages/apps.py | slideclimb/django-template | c489c118c47fe3510f7d5a36781d2d8835ef5ba6 | [
"MIT"
] | 2 | 2019-07-11T08:24:20.000Z | 2021-09-16T22:08:42.000Z | pages/apps.py | slideclimb/django-template | c489c118c47fe3510f7d5a36781d2d8835ef5ba6 | [
"MIT"
] | 3 | 2021-06-09T17:17:16.000Z | 2021-06-10T17:25:04.000Z | pages/apps.py | slideclimb/django-template | c489c118c47fe3510f7d5a36781d2d8835ef5ba6 | [
"MIT"
] | 1 | 2018-10-07T15:32:27.000Z | 2018-10-07T15:32:27.000Z | """ This file is here to register apps. """
from django.apps import AppConfig
| 19.25 | 43 | 0.655844 |
ddf5245a022beb682381f18774e07b7d784a9065 | 1,150 | py | Python | Tests/test_protected_views.py | ShavImigne/PlayChess | 1eb0dcaf1d58b02bdc880f8e51857a87696ee461 | [
"MIT"
] | 12 | 2018-05-02T13:13:41.000Z | 2022-03-13T15:37:29.000Z | Tests/test_protected_views.py | ShavImigne/PlayChess | 1eb0dcaf1d58b02bdc880f8e51857a87696ee461 | [
"MIT"
] | 24 | 2018-05-03T13:06:56.000Z | 2019-08-13T14:49:03.000Z | Tests/test_protected_views.py | ShavImigne/PlayChess | 1eb0dcaf1d58b02bdc880f8e51857a87696ee461 | [
"MIT"
] | 17 | 2018-05-03T13:46:40.000Z | 2021-06-26T13:57:05.000Z | from .client import client
from .config import db_pass, db_user
import string
import random
| 41.071429 | 112 | 0.736522 |
ddf773cd5e3b923f5c8398a0b49c16c0818baa89 | 1,575 | py | Python | Tiny-Blockchain/Blockchain.py | gusjasponde/Tiny-Blockchain | e1abe11f70b845cc461afd329c54d5d7d79fd9c8 | [
"MIT"
] | 2 | 2021-04-17T17:36:45.000Z | 2021-06-22T21:48:54.000Z | Tiny-Blockchain/Blockchain.py | gusjasponde/Tiny-Blockchain | e1abe11f70b845cc461afd329c54d5d7d79fd9c8 | [
"MIT"
] | null | null | null | Tiny-Blockchain/Blockchain.py | gusjasponde/Tiny-Blockchain | e1abe11f70b845cc461afd329c54d5d7d79fd9c8 | [
"MIT"
] | 1 | 2017-10-24T00:53:49.000Z | 2017-10-24T00:53:49.000Z | import hashlib
import json
import datetime
import Util
#Defining the block into our blockchain
#Genesis block creator
def create_genesis_block():
return Block(0, datetime.datetime.now(), "Genesis block", "0")
def next_block(last_block):
this_data = json.dumps({
"proof-of-work": last_block.index + 1,
"transactions": "Initial block"
})
return Block(last_block.index + 1, datetime.datetime.now(), this_data, last_block.hash)
#Code for running the blockchain
blockchain = [create_genesis_block()]
previous_block = blockchain[0]
genesis_blocks_qty = 10
#adding blocks in the whole chain
for i in range(0, genesis_blocks_qty):
block_to_add = next_block(previous_block)
blockchain.append(block_to_add)
previous_block = block_to_add
print "Block #",block_to_add.index," added"
print "Hash: ",block_to_add.hash
| 30.288462 | 129 | 0.675556 |
ddf791c1dd4726087d87f8647d381ae32e01c53c | 437 | py | Python | 6. Heap exploitation/exploit_3.py | MBWlodarczyk/bso_project | a4620fb18d7f789d917627232dc85ef9bcad7e3d | [
"MIT"
] | null | null | null | 6. Heap exploitation/exploit_3.py | MBWlodarczyk/bso_project | a4620fb18d7f789d917627232dc85ef9bcad7e3d | [
"MIT"
] | null | null | null | 6. Heap exploitation/exploit_3.py | MBWlodarczyk/bso_project | a4620fb18d7f789d917627232dc85ef9bcad7e3d | [
"MIT"
] | 1 | 2021-05-27T22:04:35.000Z | 2021-05-27T22:04:35.000Z | from pwn import *
p = process("./vuln_3.o")
gdb.attach(p)
for i in range(8):
malloc('a')
malloc('a')
malloc('a')
for i in range(9):
free(i-1)
free(8)
free(9)
free(8)
for i in range(8):
malloc('a')
malloc('\x48\xc0\x04\x08')
malloc('1')
malloc('1')
p.sendlineafter('>> ','1 ' +'admin')
p.interactive()
| 11.5 | 40 | 0.567506 |
ddf7f46a03e2f875d36d3fadd6c70b90528b78f0 | 1,202 | py | Python | setup.py | originell/sentry-twilio | 716c444649b38b68f9d6a02986de090bb7e580b9 | [
"MIT"
] | null | null | null | setup.py | originell/sentry-twilio | 716c444649b38b68f9d6a02986de090bb7e580b9 | [
"MIT"
] | 1 | 2017-02-01T16:49:25.000Z | 2017-02-01T16:49:25.000Z | setup.py | originell/sentry-twilio | 716c444649b38b68f9d6a02986de090bb7e580b9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
sentry-twilio
=============
Sentry Notification plugin for Twilio Programmable SMS.
:copyright: 2017 Luis Nell.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
from setuptools import setup, find_packages
VERSION = '1.0'
install_requires = [
'twilio==6.0.0rc10',
]
setup(
name='sentry-twilio',
version=VERSION,
author='Luis Nell',
author_email='luis.nell@simpleloop.com',
url='https://github.com/originell/sentry-twilio',
description='Sentry Notification plugin for Twilio Programmable SMS.',
long_description=__doc__,
license='MIT',
package_dir={'': 'src'},
packages=find_packages('src'),
zip_safe=False,
install_requires=install_requires,
include_package_data=True,
entry_points={
'sentry.apps': [
'twilio = sentry_twilio',
],
'sentry.plugins': [
'twilio = sentry_twilio.plugin:TwilioPlugin',
],
},
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
| 24.04 | 74 | 0.645591 |
ddf8e32544c5c3cd5719527188568b3163339c08 | 3,146 | py | Python | tests/test_refresh_subscription.py | Fak3/websubsub | f7ca8953197104483b152367c716028d841facbb | [
"MIT"
] | 4 | 2018-03-18T03:44:24.000Z | 2019-08-10T00:40:54.000Z | tests/test_refresh_subscription.py | Fak3/websubsub | f7ca8953197104483b152367c716028d841facbb | [
"MIT"
] | 9 | 2020-02-12T13:38:38.000Z | 2021-06-02T01:03:43.000Z | tests/test_refresh_subscription.py | Fak3/websubsub | f7ca8953197104483b152367c716028d841facbb | [
"MIT"
] | 2 | 2019-08-10T00:41:00.000Z | 2020-02-09T10:09:59.000Z | import re
from datetime import timedelta
import responses
from django.test import override_settings
from django.utils.timezone import now
from model_mommy.mommy import make
from websubsub.models import Subscription
from websubsub.tasks import refresh_subscriptions, retry_failed
from .base import BaseTestCase, method_url_body
| 35.75 | 92 | 0.61602 |
ddf9fcd921f244664cbca84e5f6bac067c77d492 | 281 | py | Python | ares/physics/__init__.py | astrojhgu/ares | 42008c8e4bf79f0b000cc833e02a86510bce7611 | [
"MIT"
] | 1 | 2019-01-04T15:13:18.000Z | 2019-01-04T15:13:18.000Z | ares/physics/__init__.py | astrojhgu/ares | 42008c8e4bf79f0b000cc833e02a86510bce7611 | [
"MIT"
] | null | null | null | ares/physics/__init__.py | astrojhgu/ares | 42008c8e4bf79f0b000cc833e02a86510bce7611 | [
"MIT"
] | null | null | null | import Constants
from .Hydrogen import Hydrogen
from .Cosmology import Cosmology
from .HaloMassFunction import HaloMassFunction
from .RateCoefficients import RateCoefficients
from .SecondaryElectrons import SecondaryElectrons
from .CrossSections import PhotoIonizationCrossSection
| 35.125 | 54 | 0.886121 |
ddfc5813cf287a659f99142896404836acf1a2ad | 9,452 | py | Python | src/agents/base.py | anindex/drqn-study | ab357178bbe6a1e09eda0f19583e8e8444bf4a54 | [
"MIT"
] | null | null | null | src/agents/base.py | anindex/drqn-study | ab357178bbe6a1e09eda0f19583e8e8444bf4a54 | [
"MIT"
] | null | null | null | src/agents/base.py | anindex/drqn-study | ab357178bbe6a1e09eda0f19583e8e8444bf4a54 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import numpy as np
import random
from collections import deque
import torch
from os.path import exists
from torch.nn.functional import smooth_l1_loss, mse_loss # noqa
from torch.optim import Adam, Adagrad, RMSprop # noqa
from tensorboardX import SummaryWriter
| 44.375587 | 167 | 0.643779 |
ddfd26bd635a43dc9642fa41b92d7f6a9fd3de78 | 1,449 | py | Python | mongo_commander/widgets.py | thieman/mongo_commander | 407cccb1abdf16b3eb07f813f5dd5d9152930f4d | [
"MIT"
] | 2 | 2015-02-22T04:52:22.000Z | 2018-12-14T10:33:25.000Z | mongo_commander/widgets.py | thieman/mongo_commander | 407cccb1abdf16b3eb07f813f5dd5d9152930f4d | [
"MIT"
] | null | null | null | mongo_commander/widgets.py | thieman/mongo_commander | 407cccb1abdf16b3eb07f813f5dd5d9152930f4d | [
"MIT"
] | null | null | null | """Widgets abstract out common View rendering patterns like displaying
a list of logging messages or a bar chart. They typically take the ClusterData
object, a window, and a list of keys they should care about from ClusterData.
They then draw directly onto the window."""
from operator import itemgetter
from .curses_util import movedown, movex
| 39.162162 | 99 | 0.655625 |
ddfe5b6a2bd63f44708eacd4d1f196837c88804e | 958 | py | Python | tests/feature_propagation_test.py | emalgorithm/feature-propagation | de9ec54d5c035abe8d52d6ac4079156cc537e489 | [
"Apache-2.0"
] | 20 | 2022-03-09T00:06:23.000Z | 2022-03-18T09:59:36.000Z | tests/feature_propagation_test.py | emalgorithm/feature-propagation | de9ec54d5c035abe8d52d6ac4079156cc537e489 | [
"Apache-2.0"
] | 2 | 2022-03-14T22:00:58.000Z | 2022-03-21T02:11:50.000Z | tests/feature_propagation_test.py | twitter-research/feature-propagation | af2733589eab4023fca67f7e71a3b46ddbbea8cd | [
"Apache-2.0"
] | 3 | 2022-03-09T05:36:53.000Z | 2022-03-11T13:53:45.000Z | """
Copyright 2020 Twitter, Inc.
SPDX-License-Identifier: Apache-2.0
"""
import unittest
import math
import torch
from feature_propagation import FeaturePropagation
if __name__ == "__main__":
unittest.main()
| 29.9375 | 114 | 0.59499 |
ddfeb380f562b06f02d54e1879c575812aad04dd | 1,282 | py | Python | publication_backbone/views/promotion.py | Excentrics/publication-backbone | 65c9820308b09a6ae1086c265f8d49e36f3724b9 | [
"BSD-3-Clause"
] | 6 | 2016-05-19T14:59:51.000Z | 2020-03-19T10:08:29.000Z | publication_backbone/views/promotion.py | Excentrics/publication-backbone | 65c9820308b09a6ae1086c265f8d49e36f3724b9 | [
"BSD-3-Clause"
] | null | null | null | publication_backbone/views/promotion.py | Excentrics/publication-backbone | 65c9820308b09a6ae1086c265f8d49e36f3724b9 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from publication_backbone.views.publication import PublicationListHybridView
from publication_backbone import conf as config
#==============================================================================
# PromotionListHybridView
#============================================================================== | 42.733333 | 132 | 0.620125 |
ddff02eefab9048a47d0d1a8a7f90b6f135ea01d | 2,427 | py | Python | tests/test_file.py | gaiadhi/traDSSAT | 1d5615dbd4965bab5c2740134c706c1748ff5fae | [
"MIT"
] | 6 | 2020-10-05T11:50:37.000Z | 2022-02-24T08:36:22.000Z | tests/test_file.py | gaiadhi/traDSSAT | 1d5615dbd4965bab5c2740134c706c1748ff5fae | [
"MIT"
] | 23 | 2018-11-08T19:16:36.000Z | 2021-07-20T23:34:18.000Z | tests/test_file.py | gaiadhi/traDSSAT | 1d5615dbd4965bab5c2740134c706c1748ff5fae | [
"MIT"
] | 9 | 2018-11-06T21:04:07.000Z | 2021-06-19T05:43:24.000Z | import os
import unittest
import numpy.testing as npt
from tradssat import SoilFile, WTHFile, MTHFile, ExpFile, CULFile, ECOFile, DSSATResults
from tradssat.out import SoilTempOut, SoilNiOut, SummaryOut, PlantGroOut, ETOut, SoilWatOut, MulchOut
from tests.utils import _test_read, _test_write, rsrcs, read_json, get_ref_var
input_classes = [SoilFile, WTHFile, MTHFile, ExpFile, CULFile, ECOFile]
rsrcs_out = os.path.join(rsrcs, 'Out')
output_classes = [PlantGroOut, SoilNiOut, SoilTempOut, SoilWatOut, MulchOut, ETOut]
final_out_classes = [SummaryOut]
# Inputs must be read and written
# Outputs are only read, not written
| 35.173913 | 101 | 0.689328 |
fb00206f76f0396ffc60257de95610a6a4ddebea | 2,840 | py | Python | airflow_spark_k8s/hooks/kubernetes.py | roitvt/airflow-spark-k8s | cd2a0ec63e1fb9ad43beb725a65e4d65a4d85206 | [
"Apache-2.0"
] | 2 | 2020-04-26T11:12:11.000Z | 2020-09-14T16:36:42.000Z | airflow_spark_k8s/hooks/kubernetes.py | roitvt/airflow-spark-k8s | cd2a0ec63e1fb9ad43beb725a65e4d65a4d85206 | [
"Apache-2.0"
] | 1 | 2020-04-14T18:20:20.000Z | 2020-04-14T18:26:27.000Z | airflow_spark_k8s/hooks/kubernetes.py | roitvt/airflow-spark-k8s | cd2a0ec63e1fb9ad43beb725a65e4d65a4d85206 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tempfile
from kubernetes import client, config
from airflow.hooks.base_hook import BaseHook
| 37.866667 | 109 | 0.679225 |
fb00a41d3f7f3756cedf6911bccc4f0b80b7ea08 | 325 | py | Python | tests/test_main.py | david-kirby/gh-action-docs | b231d14b0b629b4f308eba6cff50a35a98c9f521 | [
"MIT"
] | 1 | 2021-03-21T14:31:46.000Z | 2021-03-21T14:31:46.000Z | tests/test_main.py | david-kirby/gh-action-docs | b231d14b0b629b4f308eba6cff50a35a98c9f521 | [
"MIT"
] | null | null | null | tests/test_main.py | david-kirby/gh-action-docs | b231d14b0b629b4f308eba6cff50a35a98c9f521 | [
"MIT"
] | null | null | null | import logging
import unittest
from src.gh_action_docs import app
logging.disable(logging.CRITICAL)
if __name__ == "__main__":
unittest.main()
| 20.3125 | 57 | 0.747692 |
fb00e00f9183ef8f33d3c9279268db8384609198 | 2,785 | py | Python | astwro/tools/pickstat.py | majkelx/astwro | 4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794 | [
"MIT"
] | 6 | 2017-06-15T20:34:51.000Z | 2020-04-15T14:21:43.000Z | astwro/tools/pickstat.py | majkelx/astwro | 4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794 | [
"MIT"
] | 18 | 2017-08-15T20:53:55.000Z | 2020-10-05T23:40:34.000Z | astwro/tools/pickstat.py | majkelx/astwro | 4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794 | [
"MIT"
] | 2 | 2017-11-06T15:33:53.000Z | 2020-10-02T21:06:05.000Z | #! /usr/bin/env python
# coding=utf-8
from __future__ import print_function, division
from scipy.stats import sigmaclip
from astwro.pydaophot import daophot
from astwro.pydaophot import fname
from astwro.pydaophot import allstar
from astwro.starlist import read_dao_file
from astwro.starlist import write_ds9_regions
import __commons as commons
# TODO: expand this script to (optionally) leave result files - make it allstar runner
# TODO: implement creating ds9 region (why? or remove that option)
# Below: standard skeleton for astwro.tools
if __name__ == '__main__':
__args = __arg_parser().parse_args()
print(__do(__args))
| 34.8125 | 99 | 0.65386 |
fb0391e58115974e49fe694882ae6ffe7af0d172 | 2,330 | py | Python | 5.4/IPheaderChecksum.py | Fecer/Computer-Networks | fe4fc5bd1af7f2957aee407675cb018fa83c6735 | [
"MIT"
] | null | null | null | 5.4/IPheaderChecksum.py | Fecer/Computer-Networks | fe4fc5bd1af7f2957aee407675cb018fa83c6735 | [
"MIT"
] | null | null | null | 5.4/IPheaderChecksum.py | Fecer/Computer-Networks | fe4fc5bd1af7f2957aee407675cb018fa83c6735 | [
"MIT"
] | null | null | null | from xml.dom.minidom import parse
if __name__ == '__main__':
#
DOMTree = parse("config.xml")
collection = DOMTree.documentElement
header = collection.getElementsByTagName('header')[0].childNodes[0].data
printLine()
print('Header:\n', header)
printLine()
version = header[:1]
print('Version:\n', version)
printLine()
headerLen = header[1:2]
print('Header Length:\n', headerLen)
printLine()
service = header[2:4]
print('Differentiated Services Field:\n', service)
printLine()
totalLen = header[4:8]
totalLen = int(totalLen, 16)
print('Total Length:\n', totalLen)
printLine()
id = header[8:12]
id = int(id, 16)
print('Identification:\n', id)
printLine()
flags = header[12:16]
flags = int(flags, 16)
print('Flags and Offset:\n', flags)
printLine()
ttl = header[16:18]
ttl = int(ttl, 16)
print('Time to live:\n', ttl)
printLine()
protocol = header[18:20]
protocol = int(protocol, 16)
print('Protocol:\n', protocol)
printLine()
checksum = header[20:24]
checksum = int(checksum, 16)
print('Checksum in header:\n', checksum, '(' + header[20:24] + ')')
printLine()
source = header[24:32]
a = str(int(source[0:2], 16))
b = str(int(source[2:4], 16))
c = str(int(source[4:6], 16))
d = str(int(source[6:8], 16))
print('Source IP:\n', a+'.'+b+'.'+c+'.'+d)
printLine()
dest = header[32:40]
a = str(int(dest[0:2], 16))
b = str(int(dest[2:4], 16))
c = str(int(dest[4:6], 16))
d = str(int(dest[6:8], 16))
print('Destination IP:\n', a + '.' + b + '.' + c + '.' + d)
printLine()
a = int(header[0:4], 16)
b = int(header[4:8], 16)
c = int(header[8:12], 16)
d = int(header[12:16], 16)
e = int(header[16:20], 16)
f = int(header[24:28], 16)
g = int(header[28:32], 16)
h = int(header[32:36], 16)
i = int(header[36:40], 16)
res1 = hex(a + b + c + d + e + f + g + h + i)
opnd1 = str(res1)[2:3]
opnd2 = str(res1)[3:7]
res2 = int(opnd1, 16) + int(opnd2, 16)
all = int('ffff', 16)
res2 = all - res2
checksum2 = res2
print('Checksum by calculated:')
print('0x%04x' % checksum2)
| 24.526316 | 76 | 0.546781 |
fb04b4d690c1875517b3d05188b4d5a597035143 | 865 | py | Python | server/src/dixit/game/test/player.py | rcjsuen/dixit-online | e7a1c9b5b1a2a92160c6d90397adaa81ddcae91a | [
"MIT"
] | 75 | 2016-08-05T10:37:32.000Z | 2022-01-27T17:05:50.000Z | server/src/dixit/game/test/player.py | rcjsuen/dixit-online | e7a1c9b5b1a2a92160c6d90397adaa81ddcae91a | [
"MIT"
] | 15 | 2020-03-19T16:27:59.000Z | 2022-02-13T08:41:54.000Z | server/src/dixit/game/test/player.py | rcjsuen/dixit-online | e7a1c9b5b1a2a92160c6d90397adaa81ddcae91a | [
"MIT"
] | 27 | 2018-02-27T13:32:46.000Z | 2021-12-26T06:42:08.000Z |
from django.test import TestCase
from django.contrib.auth.models import User
from dixit import settings
from dixit.game.models.game import Game, GameStatus
from dixit.game.models.player import Player
| 36.041667 | 101 | 0.719075 |
fb065d69e3fc3e1cb562662179076e51434cc538 | 7,933 | py | Python | portality/settings.py | genonfire/portality | 1d94382fa0e6685f106ae18d4a44f4f4b5946771 | [
"MIT"
] | 2 | 2018-01-16T04:01:57.000Z | 2020-04-06T14:07:11.000Z | portality/settings.py | genonfire/portality | 1d94382fa0e6685f106ae18d4a44f4f4b5946771 | [
"MIT"
] | null | null | null | portality/settings.py | genonfire/portality | 1d94382fa0e6685f106ae18d4a44f4f4b5946771 | [
"MIT"
] | 1 | 2017-12-31T02:45:38.000Z | 2017-12-31T02:45:38.000Z | # -*- coding: utf-8 -*-
"""
Django settings for portality project.
Generated by 'django-admin startproject' using Django 1.8.17.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from collections import namedtuple
import json
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Theme
THEME = 'haru'
TEMPLATES_DIR = os.path.join(BASE_DIR, 'templates')
THEME_DIR = os.path.join(BASE_DIR, 'templates', THEME)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
if 'DJANGO_DEBUG' in os.environ:
if (os.environ['DJANGO_DEBUG'] == 'Debug'):
DEBUG = True
ALLOWED_HOSTS = ['nolooknews.com', 'gencode.me', 'localhost']
# Application definition
DJANGO_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
THIRD_PARTY_APPS = (
'rest_framework',
'graphos',
# 'allauth',
# 'allauth.account',
# 'allauth.socialaccount',
# 'allauth.socialaccount.providers.naver',
)
EDITOR_APPS = (
)
LOCAL_APPS = (
'core',
'giza',
'issue',
'accounts',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + EDITOR_APPS + LOCAL_APPS
# SITE_ID = 1
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'portality.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(THEME_DIR),
os.path.join(TEMPLATES_DIR),
],
'OPTIONS': {
'debug': DEBUG,
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'core.context_processors.global_settings',
# 'django.template.context_processors.request',
],
},
},
]
WSGI_APPLICATION = 'portality.wsgi.application'
try:
with open(os.path.join(BASE_DIR, "secrets.json")) as f:
data = json.loads(f.read())
SecretsNamedTuple = namedtuple('SecretsNamedTuple', data.keys(), verbose=False)
secrets = SecretsNamedTuple(*[data[x] for x in data.keys()])
SECRET_KEY = getattr(secrets, "SECRET_KEY")
DB_NAME = getattr(secrets, "DB_NAME")
DB_USER = getattr(secrets, "DB_USER")
DB_PASSWORD = getattr(secrets, "DB_PASSWORD")
EMAIL_HOST = getattr(secrets, "EMAIL_HOST")
EMAIL_HOST_USER = getattr(secrets, "EMAIL_HOST_USER")
EMAIL_HOST_PASSWORD = getattr(secrets, "EMAIL_HOST_PASSWORD")
DEFAUL_FROM_EMAIL = getattr(secrets, "DEFAUL_FROM_EMAIL")
SERVER_EMAIL = getattr(secrets, "SERVER_EMAIL")
except IOError:
SECRET_KEY = 'k8n13h0y@$=v$uxg*^brlv9$#hm8w7nye6km!shc*&bkgkcd*p'
DB_NAME = ''
DB_USER = ''
DB_PASSWORD = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
DEFAUL_FROM_EMAIL = ''
SERVER_EMAIL = ''
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': DB_NAME,
'USER': DB_USER,
'PASSWORD': DB_PASSWORD,
'HOST': 'localhost',
'PORT': '',
}
}
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
]
}
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
# 'allauth.account.auth_backends.AuthenticationBackend'
)
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'ko-KR'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/assets/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
MEDIA_URL = '/upload/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'upload')
LOGIN_REDIRECT_URL = 'login'
# Setting for ranking
RANKING_START_YEAR = 2017
RANKING_START_MONTH = 5
RANKING_DATE_DELTA = 7 # ()
RANKING_LIST_LIMIT = 10 #
# Setting for today
HOTISSUE_LIMIT = 20 #
HOTISSUE_DATE_DELTA = 7 # ()
# SEtting for Burst Call
POINT_MAX = 50 # point max
BURST_CALL_MIN_POINT = 10 #
BURST_CALL_ACOM = 1 # acom for burster call
# Setting for best
BEST_LIST_LIMIT = 20 #
BEST_THRESHOLD = 20 #
# Setting for issue
FILTER_DATE_DELTA = 7 # ()
MEDIA_CHOICE = ( #
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('KBS', 'KBS'),
('MBC', 'MBC'),
('SBS', 'SBS'),
('TV', 'TV'),
('A', 'A'),
('JTBC', 'JTBC'),
('MBN', 'MBN'),
('YTN', 'YTN'),
('', ''),
('', ''),
('1', '1'),
('', ''),
('', ''),
('CBS', 'CBS'),
('NewBC', 'NewBC'),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('21', '21'),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('iN', 'iN'),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('ZDNet', 'ZDNet'),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
('', ''),
)
GIZA_IMAGE_SIZE_LIMIT = 100 * 1024 #
ABOUT_LINK = "/assets/html/howto_newissue.html"
LOGO_NAME = "/assets/images/nolooknews.png"
NEWBC_LINK = "http://newbc.kr/bbs/board.php?bo_table=nolook"
NEWBC_IMG = "/assets/images/newbc.png"
NEWBC_IMG_SMALL = "/assets/images/newbc-small.png"
# Admin information
ADMIN_EMAIL = 'gencode.me@gmail.com'
ADMIN_TWITTER = 'https://twitter.com/nolooknews'
FOOTER_TAGS = '<li><a href="%s"> </a></li>\
<li> : %s</li>\
<li>, </li>\
<li><a href="mailto:%s"><img src="/assets/icons/email24.png"></a></li>\
<li><a href="%s"><img src="/assets/icons/twitter24.png" target="_blank"></a></li>'\
% (ABOUT_LINK, THEME, ADMIN_EMAIL, ADMIN_TWITTER)
| 27.167808 | 83 | 0.63343 |
fb07a87646537aa7d24a8747c95b7298fca053cd | 98 | py | Python | backup_codes/apps.py | biligunb/portal-web-app | cd10cda3d8bfc8f055a268e14f9665f142051c90 | [
"MIT"
] | null | null | null | backup_codes/apps.py | biligunb/portal-web-app | cd10cda3d8bfc8f055a268e14f9665f142051c90 | [
"MIT"
] | null | null | null | backup_codes/apps.py | biligunb/portal-web-app | cd10cda3d8bfc8f055a268e14f9665f142051c90 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
| 16.333333 | 35 | 0.77551 |
fb07d1f256a2f6d7a6cc9dbdf801ef7f4558d52a | 323 | py | Python | TP_ALGO_3/convert.py | PierreLeGuen/ALGO_S5 | 9067e887d14fe997c6944292a0cff23ceda47b6e | [
"MIT"
] | null | null | null | TP_ALGO_3/convert.py | PierreLeGuen/ALGO_S5 | 9067e887d14fe997c6944292a0cff23ceda47b6e | [
"MIT"
] | null | null | null | TP_ALGO_3/convert.py | PierreLeGuen/ALGO_S5 | 9067e887d14fe997c6944292a0cff23ceda47b6e | [
"MIT"
] | null | null | null |
print(convert(10,2))
print(convert_inv(10,2))
| 16.15 | 49 | 0.544892 |
fb083e9034d5ab0e5ac6315a7e5ffb3f614cc66e | 1,029 | py | Python | tests/motors/spikebase_tank1.py | cschlack/pybricks-micropython | 0abfd2918267a4e6e7a04062976ac1bb3da1f4b1 | [
"MIT"
] | 1 | 2021-12-27T00:09:37.000Z | 2021-12-27T00:09:37.000Z | tests/motors/spikebase_tank1.py | cschlack/pybricks-micropython | 0abfd2918267a4e6e7a04062976ac1bb3da1f4b1 | [
"MIT"
] | null | null | null | tests/motors/spikebase_tank1.py | cschlack/pybricks-micropython | 0abfd2918267a4e6e7a04062976ac1bb3da1f4b1 | [
"MIT"
] | null | null | null | from pybricks.pupdevices import Motor
from pybricks.tools import wait
from pybricks.parameters import Port, Direction
from pybricks.robotics import SpikeBase
from pybricks import version
print(version)
# Initialize base.
left_motor = Motor(Port.C)
right_motor = Motor(Port.D)
spike_base = SpikeBase(left_motor, right_motor)
# Allocate logs for motors and controller signals.
DURATION = 6000
left_motor.log.start(DURATION)
right_motor.log.start(DURATION)
spike_base.distance_control.log.start(DURATION)
spike_base.heading_control.log.start(DURATION)
# Turn in place, almost.
spike_base.tank_move_for_degrees(speed_left=250, speed_right=-247, angle=182)
# Wait so we can also log hold capability, then turn off the motor completely.
wait(100)
spike_base.stop()
# Transfer data logs.
print("Transferring data...")
left_motor.log.save("servo_left.txt")
right_motor.log.save("servo_right.txt")
spike_base.distance_control.log.save("control_distance.txt")
spike_base.heading_control.log.save("control_heading.txt")
print("Done")
| 29.4 | 78 | 0.808552 |