hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2f7b986c2b053cb63e12ef06cb1f0c6623d1ab5a | 4,043 | py | Python | Generator.py | pawelmakarov/ORM | 1a17599b31ce6d73b08c8fa424e0a4201abfb3d3 | [
"MIT"
] | null | null | null | Generator.py | pawelmakarov/ORM | 1a17599b31ce6d73b08c8fa424e0a4201abfb3d3 | [
"MIT"
] | null | null | null | Generator.py | pawelmakarov/ORM | 1a17599b31ce6d73b08c8fa424e0a4201abfb3d3 | [
"MIT"
] | null | null | null |
if __name__ == '__main__':
Generator().create_statements('many_to_many.yaml', 'schema.sql')
| 43.010638 | 108 | 0.594855 |
2f7d4b66c546d9cc934e15934759dad2ba2d7078 | 1,967 | py | Python | src/zope/error/interfaces.py | zopefoundation/zope.error | 8dc7b77a60388bdfb5b2a4606ed8b400db06d3ef | [
"ZPL-2.1"
] | null | null | null | src/zope/error/interfaces.py | zopefoundation/zope.error | 8dc7b77a60388bdfb5b2a4606ed8b400db06d3ef | [
"ZPL-2.1"
] | 9 | 2016-03-24T07:48:35.000Z | 2018-10-19T13:28:04.000Z | src/zope/error/interfaces.py | zopefoundation/zope.error | 8dc7b77a60388bdfb5b2a4606ed8b400db06d3ef | [
"ZPL-2.1"
] | 1 | 2015-04-03T08:42:41.000Z | 2015-04-03T08:42:41.000Z | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Error Reporting Utility interfaces
"""
__docformat__ = 'restructuredtext'
from zope.interface import Interface
| 32.783333 | 79 | 0.627351 |
2f7fed0d5af3b96a037248f18b9e9cf73c47b30c | 7,847 | py | Python | python/sacconfig.py | Cadair/VivaTalk | d9bd5b0c0fca7756c07cbe1adb2f809cd00e9dae | [
"MIT"
] | null | null | null | python/sacconfig.py | Cadair/VivaTalk | d9bd5b0c0fca7756c07cbe1adb2f809cd00e9dae | [
"MIT"
] | null | null | null | python/sacconfig.py | Cadair/VivaTalk | d9bd5b0c0fca7756c07cbe1adb2f809cd00e9dae | [
"MIT"
] | null | null | null | import os
import ConfigParser
mu0 = 1.25663706e-6
| 29.836502 | 79 | 0.521601 |
2f80a6cd8804248e492bd75be4cdf855bd46b3e3 | 1,606 | py | Python | location/models.py | swallville/driverBackEnd | 3599e5a2e58304e08502b10a3856b77a05c7fd16 | [
"MIT"
] | null | null | null | location/models.py | swallville/driverBackEnd | 3599e5a2e58304e08502b10a3856b77a05c7fd16 | [
"MIT"
] | 3 | 2021-03-30T12:53:49.000Z | 2021-09-22T18:44:52.000Z | location/models.py | swallville/driverBackEnd | 3599e5a2e58304e08502b10a3856b77a05c7fd16 | [
"MIT"
] | null | null | null | from django.contrib.gis.db import models
from django.db.models import Q
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
# Create your models here.
| 30.301887 | 105 | 0.619552 |
2f80bcfa95cb3fbe5e797d073a38bc12a4dae7f4 | 842 | py | Python | sitemap_urls_auditor/logger/main_logger.py | alena-kono/sitemap-urls-auditor | b9f1651c48fd8e4131eca8ee44122ffa54a4576e | [
"MIT"
] | null | null | null | sitemap_urls_auditor/logger/main_logger.py | alena-kono/sitemap-urls-auditor | b9f1651c48fd8e4131eca8ee44122ffa54a4576e | [
"MIT"
] | null | null | null | sitemap_urls_auditor/logger/main_logger.py | alena-kono/sitemap-urls-auditor | b9f1651c48fd8e4131eca8ee44122ffa54a4576e | [
"MIT"
] | null | null | null | """Module configures project's main logger."""
import logging
from loguru import logger
def disable_usp_logging() -> None:
"""Disable logging of ultimate-sitemap-parser (usp) library.
Usp package initializes default logging.Logger() each time it
imports something from its core submodules.
Therefore, this function disables usp loggers after it imports
one of the usp functions.
"""
from usp.tree import sitemap_tree_for_homepage # noqa: F401, WPS433
for name, each_logger in logging.root.manager.loggerDict.items():
if name.startswith('usp') and isinstance(each_logger, logging.Logger):
each_logger.disabled = True
def get_loguru_logger():
"""Get loguru Logger object.
Returns:
Loguru Logger object.
"""
return logger
main_logger = get_loguru_logger()
| 24.764706 | 78 | 0.709026 |
2f8142cd627ecd115f6acdab00511ac3d94dfb10 | 14,213 | py | Python | matroska_cache/dep/scopes.py | kolypto/py-matroska-cache | b40030f97d463aac8e3a6f4b0e0e9f081dfc92b1 | [
"MIT"
] | null | null | null | matroska_cache/dep/scopes.py | kolypto/py-matroska-cache | b40030f97d463aac8e3a6f4b0e0e9f081dfc92b1 | [
"MIT"
] | null | null | null | matroska_cache/dep/scopes.py | kolypto/py-matroska-cache | b40030f97d463aac8e3a6f4b0e0e9f081dfc92b1 | [
"MIT"
] | null | null | null | from __future__ import annotations
import warnings
from typing import Any, List, Callable, Tuple, Union, Collection, FrozenSet, Optional, Iterable, Set
from .base import DependencyBase, dataclass
from .tag import Tag
ExtractorFunc = Callable[[Any], Optional[dict]]
class InvalidateAll(Tag):
""" A custom tag, used in production, to invalidate all scopes in cases when Scopes is misconfigured """
# Use the same prefix. Not important; just looks nice
# There will be no clashes because all `ConditionalDependency` have "&" in their names
PREFIX = ConditionalDependency.PREFIX
| 42.810241 | 139 | 0.642651 |
2f8296613ef32d75696e51924c20cb22faf6bba2 | 812 | py | Python | dataporten/middleware.py | frafra/django-dataporten | 4236017611e08d08bd810be0beae1b994cb5fc67 | [
"MIT"
] | 4 | 2019-01-06T17:56:07.000Z | 2021-03-21T19:16:35.000Z | dataporten/middleware.py | frafra/django-dataporten | 4236017611e08d08bd810be0beae1b994cb5fc67 | [
"MIT"
] | 9 | 2019-10-21T17:23:53.000Z | 2021-06-10T21:06:25.000Z | dataporten/middleware.py | frafra/django-dataporten | 4236017611e08d08bd810be0beae1b994cb5fc67 | [
"MIT"
] | 2 | 2019-04-29T11:48:59.000Z | 2020-01-06T09:54:55.000Z | import logging
import requests
import requests_cache
from django.conf import settings
from django.http import HttpResponse, HttpRequest
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError:
MiddlewareMixin = object
from .api import usergroups
from .models import DataportenUser
# Cache requests for 15 minutes
if settings.DATAPORTEN_CACHE_REQUESTS:
requests_cache.install_cache(
settings.DATAPORTEN_CACHE_PATH + 'dataporten_cache',
backend='sqlite',
expire_after=900,
allowable_codes=(200,),
include_get_headers=True,
)
| 26.193548 | 60 | 0.758621 |
2f853129c44d31a1158c0bd481a49cd736cdcaa4 | 7,326 | py | Python | sm4.py | cclauss/Pythonista-sm | ef5c6527f36334a2b4dc3f0a92f957161aa3bdd3 | [
"Apache-2.0"
] | 3 | 2021-08-23T02:49:09.000Z | 2021-08-24T01:48:14.000Z | sm4.py | cclauss/Pythonista-sm | ef5c6527f36334a2b4dc3f0a92f957161aa3bdd3 | [
"Apache-2.0"
] | null | null | null | sm4.py | cclauss/Pythonista-sm | ef5c6527f36334a2b4dc3f0a92f957161aa3bdd3 | [
"Apache-2.0"
] | 1 | 2021-08-23T03:02:39.000Z | 2021-08-23T03:02:39.000Z | # -*-coding:utf-8-*-
import base64
import copy
from .func import xor, rotl, get_uint32_be, put_uint32_be, bytes_to_list, list_to_bytes, padding, un_padding
BOXES_TABLE = [
0xd6,
0x90,
0xe9,
0xfe,
0xcc,
0xe1,
0x3d,
0xb7,
0x16,
0xb6,
0x14,
0xc2,
0x28,
0xfb,
0x2c,
0x05,
0x2b,
0x67,
0x9a,
0x76,
0x2a,
0xbe,
0x04,
0xc3,
0xaa,
0x44,
0x13,
0x26,
0x49,
0x86,
0x06,
0x99,
0x9c,
0x42,
0x50,
0xf4,
0x91,
0xef,
0x98,
0x7a,
0x33,
0x54,
0x0b,
0x43,
0xed,
0xcf,
0xac,
0x62,
0xe4,
0xb3,
0x1c,
0xa9,
0xc9,
0x08,
0xe8,
0x95,
0x80,
0xdf,
0x94,
0xfa,
0x75,
0x8f,
0x3f,
0xa6,
0x47,
0x07,
0xa7,
0xfc,
0xf3,
0x73,
0x17,
0xba,
0x83,
0x59,
0x3c,
0x19,
0xe6,
0x85,
0x4f,
0xa8,
0x68,
0x6b,
0x81,
0xb2,
0x71,
0x64,
0xda,
0x8b,
0xf8,
0xeb,
0x0f,
0x4b,
0x70,
0x56,
0x9d,
0x35,
0x1e,
0x24,
0x0e,
0x5e,
0x63,
0x58,
0xd1,
0xa2,
0x25,
0x22,
0x7c,
0x3b,
0x01,
0x21,
0x78,
0x87,
0xd4,
0x00,
0x46,
0x57,
0x9f,
0xd3,
0x27,
0x52,
0x4c,
0x36,
0x02,
0xe7,
0xa0,
0xc4,
0xc8,
0x9e,
0xea,
0xbf,
0x8a,
0xd2,
0x40,
0xc7,
0x38,
0xb5,
0xa3,
0xf7,
0xf2,
0xce,
0xf9,
0x61,
0x15,
0xa1,
0xe0,
0xae,
0x5d,
0xa4,
0x9b,
0x34,
0x1a,
0x55,
0xad,
0x93,
0x32,
0x30,
0xf5,
0x8c,
0xb1,
0xe3,
0x1d,
0xf6,
0xe2,
0x2e,
0x82,
0x66,
0xca,
0x60,
0xc0,
0x29,
0x23,
0xab,
0x0d,
0x53,
0x4e,
0x6f,
0xd5,
0xdb,
0x37,
0x45,
0xde,
0xfd,
0x8e,
0x2f,
0x03,
0xff,
0x6a,
0x72,
0x6d,
0x6c,
0x5b,
0x51,
0x8d,
0x1b,
0xaf,
0x92,
0xbb,
0xdd,
0xbc,
0x7f,
0x11,
0xd9,
0x5c,
0x41,
0x1f,
0x10,
0x5a,
0xd8,
0x0a,
0xc1,
0x31,
0x88,
0xa5,
0xcd,
0x7b,
0xbd,
0x2d,
0x74,
0xd0,
0x12,
0xb8,
0xe5,
0xb4,
0xb0,
0x89,
0x69,
0x97,
0x4a,
0x0c,
0x96,
0x77,
0x7e,
0x65,
0xb9,
0xf1,
0x09,
0xc5,
0x6e,
0xc6,
0x84,
0x18,
0xf0,
0x7d,
0xec,
0x3a,
0xdc,
0x4d,
0x20,
0x79,
0xee,
0x5f,
0x3e,
0xd7,
0xcb,
0x39,
0x48,
]
#
FK = [0xa3b1bac6, 0x56aa3350, 0x677d9197, 0xb27022dc]
#
CK = [
0x00070e15, 0x1c232a31, 0x383f464d, 0x545b6269, 0x70777e85, 0x8c939aa1,
0xa8afb6bd, 0xc4cbd2d9, 0xe0e7eef5, 0xfc030a11, 0x181f262d, 0x343b4249,
0x50575e65, 0x6c737a81, 0x888f969d, 0xa4abb2b9, 0xc0c7ced5, 0xdce3eaf1,
0xf8ff060d, 0x141b2229, 0x30373e45, 0x4c535a61, 0x686f767d, 0x848b9299,
0xa0a7aeb5, 0xbcc3cad1, 0xd8dfe6ed, 0xf4fb0209, 0x10171e25, 0x2c333a41,
0x484f565d, 0x646b7279
]
ENCRYPT = 0
DECRYPT = 1
SM4_KEY = b'ED0Z2TCK2JN9SGV2'
SM4_IV = b'GM6PR0EL5TT4YUT6'
#
def sm4_encrypt(value: str) -> str:
"""
"""
sm = Crypt()
data = bytearray(value.encode('utf-8', 'ignore'))
sm.set_key(SM4_KEY, ENCRYPT)
digest = sm.crypt_cbc(SM4_IV, data)
digest = base64.b64encode(digest).decode('utf-8', 'ignore')
return digest
def sm4_decrypt(value: str) -> str:
"""
"""
sm = Crypt()
data = base64.b64decode(value)
sm.set_key(SM4_KEY, DECRYPT)
digest = sm.crypt_cbc(SM4_IV, data)
return digest.decode('utf-8', 'ignore')
#
| 15.822894 | 108 | 0.632678 |
2f86b378e72ad44c8909918ac3d29f4b3f63ef71 | 617 | py | Python | question_bank/unique-paths/unique-paths.py | yatengLG/leetcode-python | 5d48aecb578c86d69835368fad3d9cc21961c226 | [
"Apache-2.0"
] | 9 | 2020-08-12T10:01:00.000Z | 2022-01-05T04:37:48.000Z | question_bank/unique-paths/unique-paths.py | yatengLG/leetcode-python | 5d48aecb578c86d69835368fad3d9cc21961c226 | [
"Apache-2.0"
] | 1 | 2021-02-16T10:19:31.000Z | 2021-02-16T10:19:31.000Z | question_bank/unique-paths/unique-paths.py | yatengLG/leetcode-python | 5d48aecb578c86d69835368fad3d9cc21961c226 | [
"Apache-2.0"
] | 4 | 2020-08-12T10:13:31.000Z | 2021-11-05T01:26:58.000Z | # -*- coding: utf-8 -*-
# @Author : LG
"""
40 ms, Python3 74.47%
13.8 MB, Python3 7.95%
+
dp[i][j] = dp[i-1][j] + dp[i][j-1]
1 1 1 1 1 1
1 2 3 4 5 6
1 3 6 10 15 21
1 4 10 20 35 56
"""
| 22.851852 | 54 | 0.458671 |
2f8851b9c216915fb1f4051cf734644949f0036e | 1,207 | py | Python | crusoe_observe/ansible/roles/mlData/files/build-ml.py | CSIRT-MU/CRUSOE | 73e4ac0ced6c3ac46d24ac5c3feb01a1e88bd36b | [
"MIT"
] | 3 | 2021-11-09T09:55:17.000Z | 2022-02-19T02:58:27.000Z | crusoe_observe/ansible/roles/mlData/files/build-ml.py | CSIRT-MU/CRUSOE | 73e4ac0ced6c3ac46d24ac5c3feb01a1e88bd36b | [
"MIT"
] | null | null | null | crusoe_observe/ansible/roles/mlData/files/build-ml.py | CSIRT-MU/CRUSOE | 73e4ac0ced6c3ac46d24ac5c3feb01a1e88bd36b | [
"MIT"
] | null | null | null | import sys
import structlog
from osrest import Tcpml
import services_component
if __name__ == "__main__":
main()
| 30.948718 | 85 | 0.697597 |
2f88bf2c5f6df294804820f97f5c583aad4fe844 | 129 | py | Python | ted_lm/to_run/evaluate.py | mvdwerve/NLP-Classifier | 5494a789c9b87476ec4253e00e3349462f08d3d2 | [
"MIT"
] | null | null | null | ted_lm/to_run/evaluate.py | mvdwerve/NLP-Classifier | 5494a789c9b87476ec4253e00e3349462f08d3d2 | [
"MIT"
] | null | null | null | ted_lm/to_run/evaluate.py | mvdwerve/NLP-Classifier | 5494a789c9b87476ec4253e00e3349462f08d3d2 | [
"MIT"
] | null | null | null | version https://git-lfs.github.com/spec/v1
oid sha256:b28da294230f24c172729139b1988b8008f6fb2c259b1c3425772b2c80cfb9dd
size 2688
| 32.25 | 75 | 0.883721 |
2f8911a3ffc8a10cc46f6545eeb625b8d7a7c1f6 | 4,049 | py | Python | converter.py | TheSpiritXIII/Qt-Creator-TmTheme | 3eba37c3712da9964e775a750732b6fda7cb6536 | [
"Apache-2.0"
] | 1 | 2022-01-02T19:55:18.000Z | 2022-01-02T19:55:18.000Z | converter.py | TheSpiritXIII/Qt-Creator-TmTheme | 3eba37c3712da9964e775a750732b6fda7cb6536 | [
"Apache-2.0"
] | null | null | null | converter.py | TheSpiritXIII/Qt-Creator-TmTheme | 3eba37c3712da9964e775a750732b6fda7cb6536 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import sys
import xml.etree.ElementTree as ET
main()
| 29.992593 | 97 | 0.667078 |
2f8acf77c5feae7c80644d1d8292864bc245ea00 | 1,768 | py | Python | panamsquad/urls.py | the-mandarine/mypanamsquad | b34c1c6169a3b7496e171b9536472a1ede0bdc84 | [
"Beerware"
] | null | null | null | panamsquad/urls.py | the-mandarine/mypanamsquad | b34c1c6169a3b7496e171b9536472a1ede0bdc84 | [
"Beerware"
] | null | null | null | panamsquad/urls.py | the-mandarine/mypanamsquad | b34c1c6169a3b7496e171b9536472a1ede0bdc84 | [
"Beerware"
] | null | null | null | """panamsquad URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth import views as auth_views
from core import views as core_views
from votes import urls as votes_urls
from opinions import urls as opinions_urls
from docs import urls as docs_urls
from infos import urls as infos_urls
from derby import urls as derby_urls
from events import urls as events_urls
admin.site.site_header = 'Panam Squad Administration'
urlpatterns = [
url(r'^$', core_views.home, name='home'),
url(r'^privacy/$', core_views.privacy, name='privacy'),
url(r'^accounts/', include('django.contrib.auth.urls')),
# url(r'^login/$', auth_views.LoginView, name='login'),
# url(r'^logout/$', auth_views.LogoutView, name='logout'),
url(r'^oauth/', include('social_django.urls', namespace='social')),
url(r'^admin/', admin.site.urls),
url(r'^votes/', include(votes_urls)),
url(r'^opinions/', include(opinions_urls)),
url(r'^docs/', include(docs_urls)),
url(r'^profile/', include(infos_urls)),
url(r'^derby/', include(derby_urls)),
url(r'^events/', include(events_urls)),
]
| 40.181818 | 79 | 0.70871 |
2f8c9b6939590e2c0b5c1ffc1236673c73cfa389 | 1,015 | py | Python | setup.py | yamahigashi/sphinx-git-lowdown | 3df8ba2bb44c987f9510d45cd31198cfc5249f14 | [
"Apache-2.0"
] | null | null | null | setup.py | yamahigashi/sphinx-git-lowdown | 3df8ba2bb44c987f9510d45cd31198cfc5249f14 | [
"Apache-2.0"
] | null | null | null | setup.py | yamahigashi/sphinx-git-lowdown | 3df8ba2bb44c987f9510d45cd31198cfc5249f14 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='sphinx-git-lowdown',
version='0.0.1',
url='https://github.com/yamahigashi/sphinx-git-lowdown',
# download_url='http://pypi.python.org/pypi/sphinx-git-lowdown',
license='Apache',
author='yamahigashi',
author_email='yamahigashi@gmail.com',
description='Sphinx extension to wrap git changelog',
long_description="",
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
include_package_data=True,
install_requires=['Sphinx>=1.1', 'GitPython', 'lowdown'],
# namespace_packages=['sphinx_git_lowdown'],
packages=['sphinx_git_lowdown'],
)
| 30.757576 | 68 | 0.634483 |
2f8e310bf9e77d36d1ba6cf080e2e966d1ebdb66 | 63 | py | Python | gira_homeserver_api/devices/value_device.py | leoyn/gira-homeserver-api | 7d642413a56078f694518d9189b4b7cc9776482d | [
"MIT"
] | 5 | 2020-03-17T12:45:50.000Z | 2022-03-07T10:55:50.000Z | gira_homeserver_api/devices/value_device.py | leoyn/gira-homeserver-api | 7d642413a56078f694518d9189b4b7cc9776482d | [
"MIT"
] | 3 | 2020-04-17T09:53:45.000Z | 2021-01-25T22:14:14.000Z | gira_homeserver_api/devices/value_device.py | leoyn/gira-homeserver-api | 7d642413a56078f694518d9189b4b7cc9776482d | [
"MIT"
] | 1 | 2020-04-17T06:51:50.000Z | 2020-04-17T06:51:50.000Z | from .device import Device | 15.75 | 26 | 0.761905 |
2f8eec6be049e9fe4a729f243ebe752e635be903 | 1,756 | py | Python | rfim2d/tests/test_param_dict.py | lxh3/rfim2d | 5283d0df492ad20ecef30b17803437ca9155f8b3 | [
"MIT"
] | null | null | null | rfim2d/tests/test_param_dict.py | lxh3/rfim2d | 5283d0df492ad20ecef30b17803437ca9155f8b3 | [
"MIT"
] | null | null | null | rfim2d/tests/test_param_dict.py | lxh3/rfim2d | 5283d0df492ad20ecef30b17803437ca9155f8b3 | [
"MIT"
] | null | null | null | from rfim2d import param_dict
key_dict = {
'A': ['Sigma', 'a', 'b'],
'dMdh': ['hMax', 'eta', 'a', 'b', 'c'],
'joint': ['rScale', 'rc', 'sScale', 'etaScale', 'df',
'lambdaH', 'B', 'C', 'F'],
'Sigma': ['rScale', 'rc', 'sScale', 'df', 'B', 'C'],
'eta': ['rScale', 'rc', 'etaScale', 'lambdaH', 'B', 'F']
}
powerlaw_key_dict = {
'joint': ['rScale', 'rc', 'sScale', 'etaScale', 'sigma', 'betaDelta'],
'Sigma': ['rScale', 'rc', 'sScale', 'sigma'],
'eta': ['rScale', 'rc', 'etaScale', 'betaDelta']
}
| 30.807018 | 83 | 0.600228 |
2f8f9cde054908beafa4eaad551bd52319f17a78 | 80 | py | Python | main.py | Zerex1/Test | eb1030a90961c18d7aac5f3ca63e3a540756349b | [
"MIT"
] | null | null | null | main.py | Zerex1/Test | eb1030a90961c18d7aac5f3ca63e3a540756349b | [
"MIT"
] | 7 | 2022-02-19T16:54:51.000Z | 2022-02-20T21:52:54.000Z | main.py | Scarloran/Error | 07a1d70fb69b0c8ae45f500c92ad04eb59ed26f5 | [
"MIT"
] | null | null | null | print('hi all')
print('hii')
print('hello world')
print('hi')
print('hello')
| 8.888889 | 20 | 0.625 |
2f8fc48275f4387b3e97bc8ce0893dd23b2af531 | 2,250 | py | Python | core.py | sure-fire/derbypi | 681e266d40fa238effe5de54bd4bbff963db028e | [
"MIT"
] | 1 | 2016-10-31T17:34:40.000Z | 2016-10-31T17:34:40.000Z | core.py | sure-fire/derbypi | 681e266d40fa238effe5de54bd4bbff963db028e | [
"MIT"
] | null | null | null | core.py | sure-fire/derbypi | 681e266d40fa238effe5de54bd4bbff963db028e | [
"MIT"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2015 Aaron Soto
# Released under the MIT license
# Incorporates libraries from AdaFruit, also released under the MIT license
# TODO functions:
# display_progress(percentage,[title]) - display a progress bar (0-1,1-100) / check for float
# display_error(message,[timeout],[title]) - change backlight color, display error
import glob # module_scan(): to find files
import imp # module_run(): to load and run plugin modules
import traceback # module_run(): to gather a traceback during exceptions
import lcd # module_run(): to print fatal errors to the screen
import time # log(): to print the epoch time in log messages
import os # main(): to check UID for root
import sys # main(): to exit with status code
import subprocess # shutdown(): to call the 'halt' command
if __name__=="__main__":
if "wipe" in module_scan("."):
module_run("modules/wipe2/main.py") | 33.088235 | 107 | 0.651556 |
2f90a5c7e193988dc43d8564c22a87b2b8ba9258 | 753 | py | Python | populator/exercise_splitter.py | Calvibert/workout-generator | 0c905a2132be4e0f440d8ecbfaba71592c0fe9e2 | [
"MIT"
] | null | null | null | populator/exercise_splitter.py | Calvibert/workout-generator | 0c905a2132be4e0f440d8ecbfaba71592c0fe9e2 | [
"MIT"
] | null | null | null | populator/exercise_splitter.py | Calvibert/workout-generator | 0c905a2132be4e0f440d8ecbfaba71592c0fe9e2 | [
"MIT"
] | null | null | null | # Upper-lower splitter for the exercise list
import sys
import exercise_populator_config as conf
print('Enter the file name: ')
filename = sys.stdin.readline()
filename = filename[0:len(filename)-1]
f = open(filename, 'r')
upper = conf.CONST_MUSCLES['upper']
lower = conf.CONST_MUSCLES['lower']
uex = []
lex = []
for ex in f:
i = ex.find(',')
t = ex[i+2:].rstrip()
if t in upper:
uex.append(ex.rstrip())
continue
lex.append(ex.rstrip())
upper_filename = 'upper.txt'
lower_filename = 'lower.txt'
o_stdout = sys.stdout
f = open(upper_filename, 'w+')
sys.stdout = f
for i in uex:
print(i)
f.close()
f = open(lower_filename, 'w+')
sys.stdout = f
for i in lex:
print(i)
sys.stdout = o_stdout
f.close()
| 16.733333 | 44 | 0.648074 |
2f9197c39f4c2b4b9b35a18f55ab839142699e80 | 4,893 | py | Python | fbpmp/pcf/mpc/emp.py | benliugithub/fbpcs | 7af984264428058645847135026d474d7e28144e | [
"MIT"
] | null | null | null | fbpmp/pcf/mpc/emp.py | benliugithub/fbpcs | 7af984264428058645847135026d474d7e28144e | [
"MIT"
] | null | null | null | fbpmp/pcf/mpc/emp.py | benliugithub/fbpcs | 7af984264428058645847135026d474d7e28144e | [
"MIT"
] | null | null | null | #!/usr/bin/env/python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import asyncio
import logging
import os
import pathlib
import shutil
from typing import Dict, List
from fbpmp.pcf import call_process
from fbpmp.pcf.errors import MPCRuntimeError, MPCStartupError
from fbpmp.pcf.games import (
ConversionLift,
ConverterLift,
SecretShareConversionLift,
SecretShareConverterLift,
)
from fbpmp.pcf.mpc.base import ServerClientMPCFramework
from fbpmp.pcf.structs import Game, Metric, Status
EMP_GAME_DIR = pathlib.Path(os.environ.get("EMP_GAME_DIR", os.getcwd()))
MAX_ROWS_PER_PARTITION = 1000000 # 1 million
| 35.977941 | 95 | 0.625179 |
2f939a72fbb64e7dc423500b36e371b897a8fc9b | 2,168 | py | Python | 01_Plots/plot_time_differences.py | awareseven/Reproducibility-and-Replicability-of-Web-Measurement-Studies | 38953c70a9ab03e1d29e4f9c6da13ffcaaeac84b | [
"Apache-2.0"
] | 3 | 2022-01-27T07:36:24.000Z | 2022-02-22T09:32:53.000Z | 01_Plots/plot_time_differences.py | awareseven/Reproducibility-and-Replicability-of-Web-Measurement-Studies | 38953c70a9ab03e1d29e4f9c6da13ffcaaeac84b | [
"Apache-2.0"
] | null | null | null | 01_Plots/plot_time_differences.py | awareseven/Reproducibility-and-Replicability-of-Web-Measurement-Studies | 38953c70a9ab03e1d29e4f9c6da13ffcaaeac84b | [
"Apache-2.0"
] | 1 | 2022-02-02T08:21:39.000Z | 2022-02-02T08:21:39.000Z | import matplotlib.font_manager as font_manager
import matplotlib.pyplot as plt
import pandas as pd
import os
# Read the data
path = os.path.join(os.getcwd(), "results")
df = pd.read_csv(os.path.join(path, "tracker_AND_cookies.csv"))
x = df["day"]
y1 = df["total_tracker"]
y2 = df["tracker_distinct"]
y3 = df["is_session"]
# Some styling stuff
fig, ax = plt.subplots(1, figsize=(7, 4))
legend_properties = {'weight': 'bold', 'size': 9}
font = font_manager.FontProperties(family='sans-serif',
weight='bold',
style='normal',
size=14)
plt.legend(loc='best', frameon=False, prop=font)
plt.xticks(weight='bold', fontname='sans-serif', size=14)
plt.yticks(weight='bold', fontname='sans-serif', size=14)
plt.xlabel("Measurement point", weight='bold', fontname='sans-serif', size=14)
# Add first y-axis (Number of tracking requests)
ax.plot(x, y1, color="#999999", label="Number of tracking requests", marker='o', linestyle='dashed')
ax.set_ylabel('Number of tracking requests')
ax.legend(loc=2, prop=legend_properties)
plt.ylabel("Number of tracking requests", weight='bold', fontname='sans-serif', size=14)
# Add second y-axis
ax2 = ax.twinx() # instantiate a second axes that shares the same x-axis
ax2.plot(x, y2, color="#555555", label="Number of distinct trackers", marker='x', linestyle='solid')
ax2.set_ylabel('Number of distinct trackers')
ax2.set_ylim(3500, 4200)
ax2.legend(loc=1, prop=legend_properties)
plt.ylabel("Number of distinct trackers", weight='bold', fontname='sans-serif', size=14)
plt.yticks(weight='bold', fontname='sans-serif')
# Save plot to disc
plt.grid(False)
#plt.show()
plt.savefig(path + "/04_long_term_tracker_cookies.pdf", dpi=600,
transparent=False, bbox_inches='tight', format="pdf")
# Simple min / max calculations
max_value = y1.max()
min_value = y1.min()
max_day = y1.index[df['total_tracker'] == max_value].tolist()
min_day = y1.index[df['total_tracker'] == min_value].tolist()
print("Max at: ", max_day, "max value: ", max_value)
print("Min at: ", min_day, "min value: ", min_value)
print("std:", y1.std())
| 37.37931 | 100 | 0.683579 |
2f9b8862aa5b57db0e4c23d664291957a9fbe6a4 | 379 | py | Python | task_function.py | feihong/asyncio-tasks-talk | a4ef4e7246906d89aab81db69b7cba0c76258288 | [
"CC-BY-4.0"
] | 1 | 2016-09-10T02:47:26.000Z | 2016-09-10T02:47:26.000Z | task_function.py | feihong/asyncio-tasks-talk | a4ef4e7246906d89aab81db69b7cba0c76258288 | [
"CC-BY-4.0"
] | null | null | null | task_function.py | feihong/asyncio-tasks-talk | a4ef4e7246906d89aab81db69b7cba0c76258288 | [
"CC-BY-4.0"
] | null | null | null | import asyncio
coroutine = long_task(Writer())
asyncio.ensure_future(coroutine)
asyncio.get_event_loop()run_forever()
| 18.95 | 59 | 0.664908 |
2f9feffcaa4a8285a2abe800ba2837e256eb6e2b | 2,636 | py | Python | nebula_utils/nebula_utils/persist_compute/utils.py | threathunterX/python_lib | e2d4052de04c82cb7bccd08042f28db824cab442 | [
"Apache-2.0"
] | 2 | 2019-03-17T04:03:08.000Z | 2019-05-01T09:42:23.000Z | nebula_utils/nebula_utils/persist_compute/utils.py | threathunterX/python_lib | e2d4052de04c82cb7bccd08042f28db824cab442 | [
"Apache-2.0"
] | null | null | null | nebula_utils/nebula_utils/persist_compute/utils.py | threathunterX/python_lib | e2d4052de04c82cb7bccd08042f28db824cab442 | [
"Apache-2.0"
] | 4 | 2019-06-24T05:47:24.000Z | 2020-09-29T05:00:31.000Z | # -*- coding: utf-8 -*-
Group_Key_To_Dimension = dict(
c_ip = 'ip',
uid = 'user',
page = 'page',
did = 'did',
# c_ipc = 'ipc',
)
Avail_Dimensions = tuple(Group_Key_To_Dimension.values())
# dimension : variable_name()
Click_Variable_Names = dict(
ip='ip__visit__dynamic_count__1h__slot',
did='did__visit__dynamic_count__1h__slot',
user='user__visit__dynamic_count__1h__slot',
page='page__visit__dynamic_count__1h__slot'
)
IP_Stat_Type = 2
IPC_Stat_Type = 3
DID_Stat_Type = 4
UID_Stat_Type = 5
PAGE_Stat_Type = 6
Dimension_Stat_Prefix = dict(
ip = IP_Stat_Type,
ipc = IPC_Stat_Type,
did = DID_Stat_Type,
user = UID_Stat_Type,
page = PAGE_Stat_Type,
)
Category = ['VISITOR', 'ACCOUNT', 'ORDER',
'TRANSACTION', 'MARKETING', 'OTHER']
Scene_Variable_Names = dict(
VISITOR='total__visit__visitor_incident_count__1h__slot',
ACCOUNT='total__visit__account_incident_count__1h__slot',
ORDER='total__visit__order_incident_count__1h__slot',
TRANSACTION='total__visit__transaction_incident_count__1h__slot',
MARKETING='total__visit__marketing_incident_count__1h__slot',
OTHER='total__visit__other_incident_count__1h__slot'
)
def get_dimension(group_key_name):
"""
groupbykeyStat_Dictkey
"""
return Group_Key_To_Dimension.get(group_key_name, None)
def dict_merge(src_dict, dst_dict):
"""
dict,
:
>>> s = dict(a=1,b='2')
>>> d = {'b': 3, 'c': 4}
>>> dict_merge(s,d)
>>> t = {'a': 1, 'b': 5, 'c': 4}
>>> s == t
True
>>> s = dict(a=set([1,2]), )
>>> d = dict(a=set([2, 3]),)
>>> dict_merge(s,d)
>>> t = {'a':set([1,2,3])}
>>> s == t
True
>>> s = dict(a={'a':1, 'b':2})
>>> d = dict(a={'a':1, 'b':2})
>>> dict_merge(s, d)
>>> t = dict(a={'a':2, 'b':4})
>>> s == t
True
"""
for k,v in dst_dict.iteritems():
if not src_dict.has_key(k):
src_dict[k] = v
else:
if isinstance(v, (basestring, int, float)):
src_dict[k] = int(v) + int(src_dict[k])
elif isinstance(v, set):
assert type(v) == type(src_dict[k]), 'key %s,dst_dict value: %s type: %s, src_dict value: %s type:%s' % (k, v, type(v), src_dict[k], type(src_dict[k]))
src_dict[k].update(v)
elif isinstance(v, dict):
assert type(v) == type(src_dict[k]), 'key %s,dst_dict value: %s type: %s, src_dict value: %s type:%s' % (k, v, type(v), src_dict[k], type(src_dict[k]))
dict_merge(src_dict[k], v)
| 28.967033 | 167 | 0.60091 |
85ca2de785e536315d8103867a6f6f3babb8d84b | 346 | py | Python | a301/scripts/check_nearir.py | Pearl-Ayem/ATSC_Notebook_Data | c075d166c235ac4e68a4b77750e02b2a5e77abd0 | [
"MIT"
] | null | null | null | a301/scripts/check_nearir.py | Pearl-Ayem/ATSC_Notebook_Data | c075d166c235ac4e68a4b77750e02b2a5e77abd0 | [
"MIT"
] | null | null | null | a301/scripts/check_nearir.py | Pearl-Ayem/ATSC_Notebook_Data | c075d166c235ac4e68a4b77750e02b2a5e77abd0 | [
"MIT"
] | null | null | null | from pyhdf.SD import SD, SDC
from pathlib import Path
import numpy as np
import a301
m5_file = a301.data_dir / Path('myd05_l2_10_7.hdf')
the_file = SD(str(m5_file), SDC.READ)
wv_nearir_data = the_file.select('Water_Vapor_Near_Infrared').get()
the_file.end
positive = wv_nearir_data > 0.
print(f'found {np.sum(positive.flat)} positive pixels')
| 24.714286 | 67 | 0.768786 |
85cb5db8536dff080788a2b44e8c7498ab0bd3f3 | 2,649 | py | Python | course_grader/dao/message.py | uw-it-aca/gradepage | 7059d715cc112ad0ecb0e5012f716e525ee7b3bc | [
"Apache-2.0"
] | 1 | 2017-01-29T09:52:06.000Z | 2017-01-29T09:52:06.000Z | course_grader/dao/message.py | uw-it-aca/gradepage | 7059d715cc112ad0ecb0e5012f716e525ee7b3bc | [
"Apache-2.0"
] | 287 | 2017-03-09T00:17:20.000Z | 2022-01-08T00:36:34.000Z | course_grader/dao/message.py | uw-it-aca/gradepage | 7059d715cc112ad0ecb0e5012f716e525ee7b3bc | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from course_grader.dao import current_datetime, display_datetime
from course_grader.dao.term import (
next_gradable_term, previous_gradable_term, submission_deadline_warning,
is_grading_period_open)
from persistent_message.models import Message
| 35.797297 | 76 | 0.710834 |
85cb84708ec1159fcbafba9f83ab692e7fdf9668 | 4,541 | py | Python | swn/file.py | wkitlasten/surface-water-network | fd36ad5ee3fbd7a1107f0c4c376c4af1295b5b1b | [
"BSD-3-Clause"
] | 18 | 2019-12-04T14:59:47.000Z | 2021-12-21T12:34:28.000Z | swn/file.py | jonathanqv/surface-water-network | 362217c897345042464564440be08b34f6f0915d | [
"BSD-3-Clause"
] | 17 | 2020-04-15T04:49:49.000Z | 2022-03-04T05:22:17.000Z | swn/file.py | jonathanqv/surface-water-network | 362217c897345042464564440be08b34f6f0915d | [
"BSD-3-Clause"
] | 6 | 2020-05-07T23:56:12.000Z | 2022-01-08T16:56:32.000Z | """File reading/writing helpers."""
__all__ = ["topnet2ts", "gdf_to_shapefile"]
import geopandas
import pandas as pd
from swn.logger import get_logger, logging
def topnet2ts(nc_path, varname, mult=None, log_level=logging.INFO):
"""Read TopNet data from a netCDF file into a pandas.DataFrame timeseries.
User may need to multiply DataFrame to convert units.
Parameters
----------
nc_path : str
File path to netCDF file
varname : str
Variable name in netCDF file to read
mult : float, optional
Multiplier applied to dataset, which preserves dtype. For example,
to convert from "meters3 second-1" to "meters3 day-1", use 86400.
verbose : int, optional
Level used by logging module; default is 20 (logging.INFO)
Returns
-------
pandas.DataFrame
Where columns is rchid and index is DatetimeIndex.
"""
try:
from netCDF4 import Dataset
except ImportError:
raise ImportError('function requires netCDF4')
try:
from cftime import num2pydate as n2d
except ImportError:
from cftime import num2date as n2d
logger = get_logger("topnet2ts", log_level)
logger.info("reading file: %s", nc_path)
with Dataset(nc_path, "r") as nc:
nc.set_auto_mask(False)
var = nc.variables[varname]
logger.info("variable %s:\n%s", varname, var)
# Evaluate dimensions
dim_has_time = False
dim_has_nrch = False
dim_ignore = []
varslice = [Ellipsis] # take first dimensions
for name, size in zip(var.dimensions, var.shape):
if name == "time":
dim_has_time = True
elif name == "nrch":
dim_has_nrch = True
elif size == 1:
dim_ignore.append(name)
varslice.append(0)
if not dim_has_time:
logger.error("no 'time' dimension found")
if not dim_has_nrch:
logger.error("no 'nrch' dimension found")
if dim_ignore:
logger.info("ignoring size 1 dimensions: %s", dim_ignore)
dat = var[tuple(varslice)]
if len(dat.shape) != 2:
logger.error("expected 2 dimensions, found shape %s", dat.shape)
if dim_has_time and var.dimensions.index("time") == 1:
dat = dat.T
if mult is not None and mult != 1.0:
dat *= mult
df = pd.DataFrame(dat)
df.columns = nc.variables["rchid"]
time_v = nc.variables["time"]
df.index = pd.DatetimeIndex(n2d(time_v[:], time_v.units))
logger.info("data successfully read")
return df
def gdf_to_shapefile(gdf, shp_fname, **kwargs):
"""Write any GeoDataFrame to a shapefile.
This is a workaround to the to_file method, which cannot save
GeoDataFrame objects with other data types, such as set.
Parameters
----------
gdf : geopandas.GeoDataFrame
GeoDataFrame to export
shp_fname : str
File path for output shapefile
kwargs : mapping
Keyword arguments passed to to_file and to fiona.open
Returns
-------
None
"""
if not isinstance(gdf, geopandas.GeoDataFrame):
raise ValueError("expected gdf to be a GeoDataFrame")
gdf = gdf.copy()
geom_name = gdf.geometry.name
for col, dtype in gdf.dtypes.iteritems():
if col == geom_name:
continue
if dtype == object:
is_none = gdf[col].map(lambda x: x is None)
gdf[col] = gdf[col].astype(str)
gdf.loc[is_none, col] = ""
elif dtype == bool:
gdf[col] = gdf[col].astype(int)
# potential names that need to be shortened to <= 10 characters for DBF
colname10 = {
"to_segnum": "to_seg",
"from_segnums": "from_seg",
"num_to_outlet": "num_to_out",
"dist_to_outlet": "dst_to_out",
"stream_order": "strm_order",
"upstream_length": "upstr_len",
"upstream_area": "upstr_area",
"inflow_segnums": "inflow_seg",
"zcoord_count": "zcoord_num",
"zcoord_first": "zcoordfrst",
"zcoord_last": "zcoordlast",
"strtop_incopt": "stpincopt",
"prev_ibound": "previbound",
"prev_idomain": "prevdomain",
}
for k, v in list(colname10.items()):
assert len(v) <= 10, v
if k == v or k not in gdf.columns:
del colname10[k]
gdf.rename(columns=colname10).reset_index(drop=False)\
.to_file(str(shp_fname), **kwargs)
| 32.435714 | 78 | 0.602731 |
85cc4f7ba3e6215d40e3cc9668b7b4fc514ab919 | 5,752 | py | Python | assignment4/src/clean_documents.py | jschmidtnj/cs584 | d1d4d485d1fac8743cdbbc2996792db249dcf389 | [
"MIT"
] | null | null | null | assignment4/src/clean_documents.py | jschmidtnj/cs584 | d1d4d485d1fac8743cdbbc2996792db249dcf389 | [
"MIT"
] | null | null | null | assignment4/src/clean_documents.py | jschmidtnj/cs584 | d1d4d485d1fac8743cdbbc2996792db249dcf389 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
data clean for books (clean_documents.py)
note - this is the same as in assignment 1 for the most part
"""
import re
from ast import literal_eval
from os.path import basename, splitext, exists
from typing import Optional, List
from utils import get_glob, file_path_relative
from variables import part_1_data_folder, clean_data_folder, class_key, label_key, paragraph_key
from loguru import logger
from books import BookType, start_end_map, class_map
import pandas as pd
from typing import Tuple
import yaml
title_split: str = 'title: '
author_split: str = 'author: '
start_book: str = 'start of this project gutenberg ebook'
the_end: str = 'the end'
end_book: str = 'end of this project gutenberg ebook'
chapter: str = 'Chapter '
adventure: str = 'ADVENTURE '
multi_quote_identifier: str = '"'
min_line_len: int = 6 # line discarded if less than this number of characters
default_file_name: str = f'{clean_data_folder}/documents.csv'
classes_file_name: str = f'{clean_data_folder}/doc_classes.txt'
whitespace_regex = re.compile(r"\s+")
def normalize_sentence(sentence: str) -> str:
"""
remove punctuation, return list of words
"""
sentence = whitespace_regex.sub(' ', sentence).strip()
return sentence
def clean(clean_data_basename: Optional[str] = default_file_name) -> Tuple[pd.DataFrame, List[BookType]]:
"""
data cleaning
"""
class_count: int = 0
label_list: List[BookType] = []
get_from_disk = clean_data_basename is not None
if not get_from_disk:
clean_data_basename = default_file_name
clean_data_path = file_path_relative(clean_data_basename)
classes_path = file_path_relative(classes_file_name)
if get_from_disk and exists(clean_data_path) and exists(classes_path):
logger.info(f'reading data from {clean_data_path}')
data = pd.read_csv(clean_data_path, converters={
paragraph_key: literal_eval})
label_list_enum: Optional[List[BookType]] = None
with open(classes_path) as classes_file:
label_list = yaml.load(classes_file, Loader=yaml.FullLoader)
label_list_enum = [BookType(elem) for elem in label_list]
return data, label_list_enum
data: pd.DataFrame = pd.DataFrame()
# preprocess data and construct examples
found_files: bool = False
for file_path in get_glob(f'{part_1_data_folder}/*.txt'):
found_files = True
file_name: str = basename(splitext(file_path)[0])
logger.info(f'processing {file_name}')
title: Optional[str] = None
book_key: Optional[BookType] = None
book_started: bool = False
paragraphs: List[List[str]] = []
num_newline_count: int = 0
line_number: int = 0
with open(file_path, 'r') as current_file:
while True:
line = current_file.readline()
line_number += 1
line_trim: Optional[str] = None
if line:
line_trim = line.strip()
if not book_started and \
((line_trim is not None and line_trim.startswith(start_book))
or (book_key is not None and line_number >= start_end_map[book_key].start)):
book_started = True
if line_trim is None or line_trim.startswith(end_book) \
or line_trim == the_end or \
(book_key is not None and line_number >= start_end_map[book_key].end):
# done with reading the file
break
if not book_started:
if title is None and line_trim.startswith(title_split):
title = line_trim.split(title_split)[1]
logger.info(f'title: {title}')
if book_key is None and line_trim.startswith(author_split):
author: str = line_trim.split(author_split)[1]
logger.info(f'author: {author}')
book_key = BookType(author.split(' ')[-1])
else:
if len(line_trim) < min_line_len or \
line.startswith(chapter) or line.startswith(chapter):
num_newline_count += 1
else:
multi_line_quotes = line_trim.startswith(multi_quote_identifier) \
and paragraphs[-1][0].startswith(multi_quote_identifier)
if len(paragraphs) == 0 or \
(num_newline_count > 0 and not multi_line_quotes):
paragraphs.append([])
num_newline_count = 0
paragraphs[-1].append(line_trim)
if not found_files:
raise RuntimeError('no files found')
if book_key is None:
raise RuntimeError('no book key found')
class_name = class_map[book_key]
logger.info(
f'number of paragraphs in class "{class_name}": {len(paragraphs)}')
paragraphs = [[normalize_sentence(sentence) for sentence in paragraph] for paragraph in paragraphs]
data = pd.concat([data, pd.DataFrame({
paragraph_key: paragraphs,
label_key: [class_name] * len(paragraphs),
class_key: class_count
})], ignore_index=True)
label_list.append(book_key)
class_count += 1
data.to_csv(clean_data_path, index=False)
with open(classes_path, 'w') as classes_file:
label_list_str = [elem.name for elem in label_list]
yaml.dump(label_list_str, classes_file)
return data, label_list
if __name__ == '__main__':
clean()
| 39.129252 | 107 | 0.619784 |
85ccd6d8d9bc17b243d312e04343cd6c75bdd27f | 6,041 | py | Python | miniproject/api/organization/views.py | dandy7373/HR_web | 65dd80159c7e3113961d55ef126b7df75c7bda13 | [
"MIT"
] | null | null | null | miniproject/api/organization/views.py | dandy7373/HR_web | 65dd80159c7e3113961d55ef126b7df75c7bda13 | [
"MIT"
] | null | null | null | miniproject/api/organization/views.py | dandy7373/HR_web | 65dd80159c7e3113961d55ef126b7df75c7bda13 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from rest_framework.generics import RetrieveAPIView,CreateAPIView
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST,HTTP_201_CREATED
from rest_framework.views import APIView
from .models import UserOrganization
from api.individual.models import Userprofile
from .serializers import UserOrganizationRegistrationSerializer,OrganizationLoginSerializer, OrganizationSerializer
from bson import ObjectId | 37.290123 | 140 | 0.608343 |
85ccf00c2aab76068a1c4fc3ab1b4c929b9cff1a | 9,378 | py | Python | nutils/cli.py | JochenHinz/nutils | ac18dd6825b107e2e4c186ebb1598dbf0fff0f77 | [
"MIT"
] | null | null | null | nutils/cli.py | JochenHinz/nutils | ac18dd6825b107e2e4c186ebb1598dbf0fff0f77 | [
"MIT"
] | null | null | null | nutils/cli.py | JochenHinz/nutils | ac18dd6825b107e2e4c186ebb1598dbf0fff0f77 | [
"MIT"
] | null | null | null | # Copyright (c) 2014 Evalf
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
The cli (command line interface) module provides the `cli.run` function that
can be used set up properties, initiate an output environment, and execute a
python function based arguments specified on the command line.
"""
from . import util, config, long_version, warnings, matrix, cache
import sys, inspect, os, io, time, pdb, signal, subprocess, contextlib, traceback, pathlib, html, treelog as log, stickybar
def run(func, *, skip=1, loaduserconfig=True):
'''parse command line arguments and call function'''
configs = []
if loaduserconfig:
home = os.path.expanduser('~')
configs.append(dict(richoutput=sys.stdout.isatty()))
configs.extend(path for path in (os.path.join(home, '.config', 'nutils', 'config'), os.path.join(home, '.nutilsrc')) if os.path.isfile(path))
params = inspect.signature(func).parameters.values()
if '-h' in sys.argv[skip:] or '--help' in sys.argv[skip:]:
print('usage: {} (...)'.format(' '.join(sys.argv[:skip])))
print()
for param in params:
cls = param.default.__class__
print(' --{:<20}'.format(param.name + '=' + cls.__name__.upper() if cls != bool else '(no)' + param.name), end=' ')
if param.annotation != param.empty:
print(param.annotation, end=' ')
print('[{}]'.format(param.default))
sys.exit(1)
kwargs = {param.name: param.default for param in params}
cli_config = {}
for arg in sys.argv[skip:]:
name, sep, value = arg.lstrip('-').partition('=')
if not sep:
value = not name.startswith('no')
if not value:
name = name[2:]
if name in kwargs:
default = kwargs[name]
args = kwargs
else:
try:
default = getattr(config, name)
except AttributeError:
print('invalid argument {!r}'.format(arg))
sys.exit(2)
args = cli_config
try:
if isinstance(default, bool) and not isinstance(value, bool):
raise Exception('boolean value should be specifiec as --{0}/--no{0}'.format(name))
args[name] = default.__class__(value)
except Exception as e:
print('invalid argument for {!r}: {}'.format(name, e))
sys.exit(2)
with config(*configs, **cli_config):
status = call(func, kwargs, scriptname=os.path.basename(sys.argv[0]), funcname=None if skip==1 else func.__name__)
sys.exit(status)
def choose(*functions, loaduserconfig=True):
'''parse command line arguments and call one of multiple functions'''
assert functions, 'no functions specified'
funcnames = [func.__name__ for func in functions]
if len(sys.argv) == 1 or sys.argv[1] in ('-h', '--help'):
print('usage: {} [{}] (...)'.format(sys.argv[0], '|'.join(funcnames)))
sys.exit(1)
try:
ifunc = funcnames.index(sys.argv[1])
except ValueError:
print('invalid argument {!r}; choose from {}'.format(sys.argv[1], ', '.join(funcnames)))
sys.exit(2)
run(functions[ifunc], skip=2, loaduserconfig=loaduserconfig)
def call(func, kwargs, scriptname, funcname=None):
'''set up compute environment and call function'''
outdir = config.outdir or os.path.join(os.path.expanduser(config.outrootdir), scriptname)
with contextlib.ExitStack() as stack:
stack.enter_context(cache.enable(os.path.join(outdir, config.cachedir)) if config.cache else cache.disable())
stack.enter_context(matrix.backend(config.matrix))
stack.enter_context(log.set(log.FilterLog(log.RichOutputLog() if config.richoutput else log.StdoutLog(), minlevel=5-config.verbose)))
if config.htmloutput:
htmllog = stack.enter_context(log.HtmlLog(outdir, title=scriptname, htmltitle='<a href="http://www.nutils.org">{}</a> {}'.format(SVGLOGO, html.escape(scriptname)), favicon=FAVICON))
uri = (config.outrooturi.rstrip('/') + '/' + scriptname if config.outrooturi else pathlib.Path(outdir).resolve().as_uri()) + '/' + htmllog.filename
if config.richoutput:
t0 = time.perf_counter()
bar = lambda running: '{0} [{1}] {2[0]}:{2[1]:02d}:{2[2]:02d}'.format(uri, 'RUNNING' if running else 'STOPPED', _hms(time.perf_counter()-t0))
stack.enter_context(stickybar.activate(bar, update=1))
else:
log.info('opened log at', uri)
htmllog.write('<ul style="list-style-position: inside; padding-left: 0px; margin-top: 0px;">{}</ul>'.format(''.join(
'<li>{}={} <span style="color: gray;">{}</span></li>'.format(param.name, kwargs.get(param.name, param.default), param.annotation)
for param in inspect.signature(func).parameters.values())), level=1, escape=False)
stack.enter_context(log.add(htmllog))
stack.enter_context(warnings.via(log.warning))
stack.callback(signal.signal, signal.SIGINT, signal.signal(signal.SIGINT, _sigint_handler))
log.info('nutils v{}'.format(_version()))
log.info('start', time.ctime())
try:
func(**kwargs)
except (KeyboardInterrupt, SystemExit, pdb.bdb.BdbQuit):
log.error('killed by user')
return 1
except:
log.error(traceback.format_exc())
if config.pdb:
print(_mkbox(
'YOUR PROGRAM HAS DIED. The Python debugger',
'allows you to examine its post-mortem state',
'to figure out why this happened. Type "h"',
'for an overview of commands to get going.'))
pdb.post_mortem()
return 2
else:
log.info('finish', time.ctime())
return 0
SVGLOGO = '''\
<svg style="vertical-align: middle;" width="32" height="32" xmlns="http://www.w3.org/2000/svg">
<path d="M7.5 19 v-6 a6 6 0 0 1 12 0 v6 M25.5 13 v6 a6 6 0 0 1 -12 0 v-6" fill="none" stroke-width="3" stroke-linecap="round"/>
</svg>'''
FAVICON = 'data:image/png;base64,' \
'iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAQAAAD9CzEMAAACAElEQVRYw+2YS04bQRCGP2wJ' \
'gbAimS07WABXGMLzAgiBcgICFwDEEiGiDCScggWPHVseC1AIZ8AIJBA2hg1kF5DiycLYqppp' \
'M91j2KCp3rSq//7/VldPdfVAajHW0nAkywDjeHSTBx645IRdfvPvLWTbWeSewNDuWKC9Wfov' \
'3BjJa+2aqWa2bInKq/QBARV8MknoM2zHktfaVhKJ79b0AQEr7nsfpthjml466KCPr+xHNmrS' \
'7eTo0J4xFMEMUwiFu81eYFFNPSJvROU5Vrh5W/qsOvdnDegBOjkXyDJZO4Fhta7RV7FDCvvZ' \
'TmBdhTbODgT6R9zJr9qA8G2LfiurlCji0yq8O6LvKT4zHlQEeoXfr3t94e1TUSAWDzyJKTnh' \
'L9W9t8KbE+i/iieCr6XroEEKb9qfee8LJxVIBVKBjyRQqnuKavxZpTiZ1Ez4Typ9KoGN+sCG' \
'Evgj+l2ib8ZLxCOhi8KnaLgoTkVino7Fzwr0L7st/Cmm7MeiDwV6zU5gUF3wYw6Fg2dbztyJ' \
'SQWHcsb6fC6odR3T2YBeF2RzLiXltZpaYCSCGVWrD7hyKSlhKvJiOGCGfnLk6GdGhbZaFE+4' \
'fo7fnMr65STf+5Y1/Way9PPOT6uqTYbCHW5X7nsftjbmKRvJy8yZT05Lgnh4jOPR8/JAv+CE' \
'XU6ppH81Etp/wL7MKaEwo4sAAAAASUVORK5CYII='
# vim:sw=2:sts=2:et
| 44.028169 | 187 | 0.683301 |
85ceb804c95eaa5e6ed011f7728feba8c174befd | 6,336 | py | Python | experiments/alpha_analysis.py | oeg-upm/tada-entity | 6e538129229bed49bf1aa960fcd97a8468eca765 | [
"Apache-2.0"
] | 3 | 2019-06-11T10:19:25.000Z | 2022-02-28T22:58:29.000Z | experiments/alpha_analysis.py | oeg-upm/tada-entity | 6e538129229bed49bf1aa960fcd97a8468eca765 | [
"Apache-2.0"
] | 7 | 2019-02-04T08:57:54.000Z | 2021-11-01T12:42:03.000Z | experiments/alpha_analysis.py | oeg-upm/tada-entity | 6e538129229bed49bf1aa960fcd97a8468eca765 | [
"Apache-2.0"
] | null | null | null | """
This script analyses optimal alphas for each class and draws them in a box and whisker plot
"""
import pandas as pd
import argparse
import seaborn as sns
import matplotlib.pyplot as plt
import itertools
def analyse_alpha_for_all(falpha, classes, draw_fname, midalpha):
"""
:param fmeta: path to the meta file
:param classes: a dict of fnames and their classes
:return:
"""
df_all = pd.read_csv(falpha)
for fsid in range(1, 6):
df = df_all[df_all.fsid == fsid]
al_per_cls = aggregate_alpha_per_class(df, classes)
analyse_alpha(al_per_cls, draw_fname+"_fsid%d" % (fsid), midalpha)
# analyse_alpha(al_per_cls, "wcv2_alpha_%s_original_fsid%d" % (fattr,fsid))
# analyse_alpha(al_per_cls, "wcv2_alpha_fsid%d" % fsid)
# break
def aggregate_alpha_per_class(df, classes):
"""
:param df: DataFrame of a meta file
:param calsses: a dict of fnames and their classes
:return:
"""
"""fname,colid,fsid,from_alpha,to_alpha"""
d = dict()
for idx, row in df.iterrows():
# print("fname: <%s>" % row['fname'])
# DEBUG
print("classes: ")
print(classes)
c = classes[row['fname']]
if c not in d:
d[c] = {'from_alpha': [], 'to_alpha': [], 'mid_alpha': []}
d[c]['from_alpha'].append(row['from_alpha'])
d[c]['to_alpha'].append(row['to_alpha'])
d[c]['mid_alpha'].append((row['from_alpha'] + row['to_alpha'])/2)
return d
def main():
"""
Parse the arguments
:return:
"""
parser = argparse.ArgumentParser(description='Alpha Analysis')
# parser.add_argument('--debug', action="store_true", default=False, help="Whether to enable debug messages.")
parser.add_argument('falpha', help="The path to the alpha results file.")
parser.add_argument('fmeta', help="The path to the meta file which contain the classes.")
parser.add_argument('dataset', choices=["wcv1", "wcv2", "st19-r1", "st19-r2", "st19-r3", "st19-r4"],
help="The name of the dataset as the meta file differ for each")
parser.add_argument('--draw', default="test.svg", help="The filename prefix to draw (without the extension)")
parser.add_argument('--midalpha', action="store_true", default=False,
help="Whether to report the mid ranges of the optimal alpha or just the ranges")
parser.print_usage()
parser.print_help()
args = parser.parse_args()
workflow(args.falpha, args.fmeta, args.draw, args.midalpha, args.dataset)
if __name__ == "__main__":
main()
| 34.622951 | 114 | 0.574337 |
85cf779b9a1cc2e9b35950583be014be08b8ba73 | 1,009 | py | Python | p039m/combination_sum.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | 1 | 2020-02-20T12:04:46.000Z | 2020-02-20T12:04:46.000Z | p039m/combination_sum.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | null | null | null | p039m/combination_sum.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | null | null | null | from typing import List
# TESTS
tests = [
([2, 3, 6, 7], 7, [[2, 2, 3], [7]]),
([2, 3, 5], 8, [[2, 2, 2, 2], [2, 3, 3], [3, 5]]),
([2], 1, []),
([1], 1, [[1]]),
([1], 2, [[1, 1]]),
]
for candidates, target, expected in tests:
sol = Solution()
actual = sol.combinationSum(candidates, target)
print("Combinations in", candidates, "sum to", target, "->", actual)
assert actual == expected
| 27.27027 | 84 | 0.489594 |
85cff0c0609514a2aa77da41be4f85d685342405 | 12,197 | py | Python | sdk/python/pulumi_proxmox/ct/container.py | meyskens/pulumi-proxmox | bf48570690350be68fa554e1cec376212eb449ab | [
"ECL-2.0",
"Apache-2.0"
] | 16 | 2021-01-11T11:26:19.000Z | 2022-01-23T02:32:34.000Z | sdk/python/pulumi_proxmox/ct/container.py | meyskens/pulumi-proxmox | bf48570690350be68fa554e1cec376212eb449ab | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-01-29T08:15:46.000Z | 2021-10-17T16:33:19.000Z | sdk/python/pulumi_proxmox/ct/container.py | meyskens/pulumi-proxmox | bf48570690350be68fa554e1cec376212eb449ab | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2021-04-06T00:36:05.000Z | 2021-12-16T14:25:07.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['Container']
| 44.677656 | 145 | 0.649832 |
85d0ab0de0a1394f2406e3a5b3e99179d2c59390 | 486 | py | Python | tests/test/stateful/conftest.py | ActorForth/brownie | ef0d5af3bb48edcd11abf985626fc99dbc577c7d | [
"MIT"
] | 1,595 | 2020-06-01T19:41:53.000Z | 2022-03-31T16:09:54.000Z | tests/test/stateful/conftest.py | ActorForth/brownie | ef0d5af3bb48edcd11abf985626fc99dbc577c7d | [
"MIT"
] | 532 | 2020-05-30T12:06:17.000Z | 2022-03-31T22:33:41.000Z | tests/test/stateful/conftest.py | ActorForth/brownie | ef0d5af3bb48edcd11abf985626fc99dbc577c7d | [
"MIT"
] | 303 | 2020-06-17T00:38:34.000Z | 2022-03-31T10:59:48.000Z | #!/usr/bin/python3
import pytest
from hypothesis import settings
# derandomizing prevents flaky test outcomes
# we are testing hypothesis itself, not testing with hypothesis
settings.register_profile("derandomize", derandomize=True)
| 19.44 | 63 | 0.709877 |
85d1bb79ecc810612d2ce67b9924416144e6d28f | 7,706 | py | Python | singleimagemodel.py | severinaklingler/kaggle-ocular-disease | a6641f6005d1a7f2399b4de9e804ab3ac7f20dd2 | [
"Apache-2.0"
] | null | null | null | singleimagemodel.py | severinaklingler/kaggle-ocular-disease | a6641f6005d1a7f2399b4de9e804ab3ac7f20dd2 | [
"Apache-2.0"
] | null | null | null | singleimagemodel.py | severinaklingler/kaggle-ocular-disease | a6641f6005d1a7f2399b4de9e804ab3ac7f20dd2 | [
"Apache-2.0"
] | null | null | null | from logging import getLevelName
import numpy as np
import os
import tensorflow as tf
import pathlib
import pandas as pd
import re
import matplotlib.pyplot as plt
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Flatten , Conv1D
from tensorflow.keras.layers import concatenate
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import MaxPooling2D,MaxPooling1D
from tensorflow.keras.utils import plot_model
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
import datetime
import argparse
# Global config (TODO)
random_seed = 77
data_path = "./input/ocular-disease-recognition-odir5k/preprocessed_images/"
data_path_tensor = tf.constant(data_path)
data_dir = pathlib.Path(data_path)
AUTOTUNE = tf.data.AUTOTUNE
batch_size = 32
img_height = 224
img_width = 224
class_count = 8
image_channels = 3
num_threads = 4
label_dict = {}
# tf.config.run_functions_eagerly(True)
if __name__ == '__main__':
main() | 32.514768 | 139 | 0.706982 |
85d25582e5cd6979f09e8f1ee727114f02ab78b7 | 1,377 | py | Python | graph/models.py | insung151/piro | aecbf8ce27e6e47856e2afd4a6e9e406bffa7a40 | [
"MIT"
] | null | null | null | graph/models.py | insung151/piro | aecbf8ce27e6e47856e2afd4a6e9e406bffa7a40 | [
"MIT"
] | null | null | null | graph/models.py | insung151/piro | aecbf8ce27e6e47856e2afd4a6e9e406bffa7a40 | [
"MIT"
] | null | null | null | import datetime
from django.db import models
from django.utils import timezone
# from .backends import update_data
| 28.102041 | 102 | 0.664488 |
85d2e42b2d5769672b5d6fd5964f344d0f20bc08 | 546 | py | Python | gore/models/project.py | akx/gentry | f4205f5a14054231d064657347862a15ecf4c0e0 | [
"MIT"
] | 4 | 2017-07-26T13:23:06.000Z | 2019-02-21T14:55:34.000Z | gore/models/project.py | akx/gentry | f4205f5a14054231d064657347862a15ecf4c0e0 | [
"MIT"
] | 26 | 2017-08-02T08:52:06.000Z | 2022-03-04T15:13:26.000Z | gore/models/project.py | akx/gentry | f4205f5a14054231d064657347862a15ecf4c0e0 | [
"MIT"
] | null | null | null | from django.db import models
from django.utils import timezone
from gentry.utils import make_absolute_uri
| 26 | 95 | 0.661172 |
85d2ee56a1605c4085ef6834b7da596c8770a900 | 17,167 | py | Python | features/steps/prs_steps.py | spidezad/python-pptx | eab3f55b84b54906876d5486172d5d0c457d55f8 | [
"BSD-2-Clause"
] | 1 | 2021-05-17T06:33:32.000Z | 2021-05-17T06:33:32.000Z | features/steps/prs_steps.py | spidezad/python-pptx | eab3f55b84b54906876d5486172d5d0c457d55f8 | [
"BSD-2-Clause"
] | null | null | null | features/steps/prs_steps.py | spidezad/python-pptx | eab3f55b84b54906876d5486172d5d0c457d55f8 | [
"BSD-2-Clause"
] | null | null | null | import os
from datetime import datetime, timedelta
from behave import given, when, then
from hamcrest import (
assert_that, equal_to, has_item, is_, is_not, greater_than, less_than
)
from StringIO import StringIO
from pptx import packaging
from pptx import Presentation
from pptx.constants import MSO_AUTO_SHAPE_TYPE as MAST, MSO, PP
from pptx.util import Inches
thisdir = os.path.split(__file__)[0]
scratch_dir = absjoin(thisdir, '../_scratch')
test_file_dir = absjoin(thisdir, '../../test/test_files')
basic_pptx_path = absjoin(test_file_dir, 'test.pptx')
no_core_props_pptx_path = absjoin(test_file_dir, 'no-core-props.pptx')
saved_pptx_path = absjoin(scratch_dir, 'test_out.pptx')
test_image_path = absjoin(test_file_dir, 'python-powered.png')
test_text = "python-pptx was here!"
# logging.debug("saved_pptx_path is ==> '%s'\n", saved_pptx_path)
# given ===================================================
# when ====================================================
# then ====================================================
| 33.926877 | 74 | 0.732044 |
85d564c27e33a35fe1a491c27c616e6e1a78f815 | 2,604 | py | Python | DailyProgrammer/DP20160818B.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | 2 | 2020-12-23T18:59:22.000Z | 2021-04-14T13:16:09.000Z | DailyProgrammer/DP20160818B.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | null | null | null | DailyProgrammer/DP20160818B.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | null | null | null | """
[2016-08-18] Challenge #279 [Intermediate] Text Reflow
https://www.reddit.com/r/dailyprogrammer/comments/4ybbcz/20160818_challenge_279_intermediate_text_reflow/
#Description:
Text reflow means to break up lines of text so that they fit within a certain width. It is useful in e.g. mobile
browsers. When you zoom in on a web page the lines will become too long to fit the width of the screen, unless the text
is broken up into shorter lines.
#Input:
You will be given a text with a maximum line width of 80 characters.
#Output:
Produce the same text with a maximum line width of 40 characters
#Challenge Input:
In the beginning God created the heavens and the earth. Now the earth was
formless and empty, darkness was over the surface of the deep, and the Spirit of
God was hovering over the waters.
And God said, "Let there be light," and there was light. God saw that the light
was good, and he separated the light from the darkness. God called the light
"day," and the darkness he called "night." And there was evening, and there was
morning - the first day.
#Challenge Output:
In the beginning God created the heavens
and the earth. Now the earth was
formless and empty, darkness was over
the surface of the deep, and the Spirit
of God was hovering over the waters.
And God said, "Let there be light," and
there was light. God saw that the light
was good, and he separated the light
from the darkness. God called the light
"day," and the darkness he called
"night." And there was evening, and
there was morning - the first day.
#Bonus:
Let's get rid of the jagged right margin of the text and make the output prettier. Output the text with full
justification; Adjusting the word spacing so that the text is flush against both the left and the right margin.
#Bonus Output:
In the beginning God created the heavens
and the earth. Now the earth was
formless and empty, darkness was over
the surface of the deep, and the Spirit
of God was hovering over the waters.
And God said, "Let there be light," and
there was light. God saw that the light
was good, and he separated the light
from the darkness. God called the light
"day," and the darkness he called
"night." And there was evening, and
there was morning - the first day.
#Finally
This challenge is posted by /u/slampropp
Also have a good challenge idea?
Consider submitting it to /r/dailyprogrammer_ideas
"""
if __name__ == "__main__":
main()
| 40.061538 | 119 | 0.720814 |
85d71db4dff31a27689c64809381f6863f31ac08 | 3,177 | py | Python | PomodoroTimer/Python/main2.py | zcribe/SmallProjectsCollection | fbd6bc9884468eba7519728e295b36b24043af27 | [
"MIT"
] | null | null | null | PomodoroTimer/Python/main2.py | zcribe/SmallProjectsCollection | fbd6bc9884468eba7519728e295b36b24043af27 | [
"MIT"
] | null | null | null | PomodoroTimer/Python/main2.py | zcribe/SmallProjectsCollection | fbd6bc9884468eba7519728e295b36b24043af27 | [
"MIT"
] | null | null | null | from time import time, sleep
from math import floor
import argparse
import csv
import datetime
# Constants
TIME_WORK = 25
TIME_REST = 5
TIME_REST_LONG = 30
ONE_MINUTE = 60
SESSIONS_WORK_MAX = 4
LOOP_LIMIT = 9999
# Console
parser = argparse.ArgumentParser(description='===== Pomodoro timer CLI =====')
parser.add_argument('-wt', '-worktime', type=int, help=f'Minutes of work in a work sessions (default {TIME_WORK})',
default=TIME_WORK, nargs='?')
parser.add_argument('-rt', '-resttime', type=int, help=f'Minutes of rest in a rest sessions (default {TIME_REST})',
default=TIME_REST, nargs='?')
parser.add_argument('-rtl', '-resttimelong', type=int,
help=f'Minutes of rest in a long rest sessions (default {TIME_REST_LONG})',
default=TIME_REST_LONG, nargs='?')
parser.add_argument('-mws', '-maxworksessions', type=int,
help=f'Number of work sessions cycles before long rest session (default {SESSIONS_WORK_MAX})',
default=SESSIONS_WORK_MAX, nargs='?')
parser.add_argument('-ll', '-looplimit', type=int,
help=f'Maximum number of total sessions (default 9999)', default=LOOP_LIMIT, nargs='?')
parser.add_argument('-log', '-logsessions', type=bool,
help='Should sessions be logged (False)', default=False, nargs='?')
arguments = vars(parser.parse_args())
time_work = arguments['wt']
time_rest = arguments['rt']
time_rest_long = arguments['rtl']
sessions_work_max = arguments['mws']
loop_lim = arguments['ll']
# Core
if __name__ == "__main__":
run()
| 33.09375 | 115 | 0.664463 |
85d8b4fdcc862d0733f6de5e3fdf2b8f4d3ba6b3 | 2,454 | py | Python | PinVidderer/PinVidderer.py | Gestas/PinVidderer | c980906cd77bf9a8cb66be022676e57e9a54702e | [
"MIT"
] | null | null | null | PinVidderer/PinVidderer.py | Gestas/PinVidderer | c980906cd77bf9a8cb66be022676e57e9a54702e | [
"MIT"
] | null | null | null | PinVidderer/PinVidderer.py | Gestas/PinVidderer | c980906cd77bf9a8cb66be022676e57e9a54702e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import signal
import click
from .client import Client
from .utils import Utils
utils = Utils()
signal.signal(signal.SIGINT, utils.signal_handler)
pass_config = click.make_pass_decorator(Config, ensure=True)
if __name__ == "__main__":
cli()
| 25.831579 | 119 | 0.698859 |
85db5b6d5b5a64186bb3b9c04d0a279e4a5f0c0a | 998 | py | Python | hw1/1.6/encrpyt_equals_decrypt.py | rocke97/crypto | 89c4e595adf74558e12ceb1762025fd2f0275fec | [
"MIT"
] | null | null | null | hw1/1.6/encrpyt_equals_decrypt.py | rocke97/crypto | 89c4e595adf74558e12ceb1762025fd2f0275fec | [
"MIT"
] | null | null | null | hw1/1.6/encrpyt_equals_decrypt.py | rocke97/crypto | 89c4e595adf74558e12ceb1762025fd2f0275fec | [
"MIT"
] | null | null | null | from itertools import count
from string import ascii_lowercase
plain_text = 'july'
results_file = open('results.txt', 'w')
letters_to_numbers = dict(zip(ascii_lowercase, count(0)))
numbers_to_letters = dict(zip(count(0), ascii_lowercase))
plain_text_numbers = [letters_to_numbers[letter] for letter in plain_text]
for i in range(0, 26):
#encrypt the plain text by shifting by some number
cipher_numbers = [(num + i)%26 for num in plain_text_numbers]
#try to decrypt the plain text by shifting forward by the same number (encrypt function = decrypt function)
decrypted_cipher_numbers = [(num + i)%26 for num in cipher_numbers]
attempted_plain_text = [numbers_to_letters[num] for num in decrypted_cipher_numbers]
if ''.join(attempted_plain_text) == plain_text: #if we decrypt print which key values work
print('At shift = ' + str(i) + ':')
print('Plain text: ' + plain_text)
print('Attempted Plain Text Decrypt: ' + ''.join(attempted_plain_text)) | 52.526316 | 111 | 0.728457 |
85db89656ff34bccb3df57eb36eff9c756872dce | 2,663 | py | Python | generator.py | mann1/DD_SIM_Template | 84c7787b6b3c52f08e7031114894c98416c02fcf | [
"MIT"
] | null | null | null | generator.py | mann1/DD_SIM_Template | 84c7787b6b3c52f08e7031114894c98416c02fcf | [
"MIT"
] | null | null | null | generator.py | mann1/DD_SIM_Template | 84c7787b6b3c52f08e7031114894c98416c02fcf | [
"MIT"
] | null | null | null | import os, pickle
import numpy as np
import tensorflow as tf
if __name__ == "__main__":
train_generator = Generator('datasets/train')
val_generator = Generator('datasets/val')
print(len(train_generator))
print(len(val_generator))
data_batch, target_batch = train_generator.__getitem__(0)
print(data_batch.shape)
print(target_batch.shape)
| 32.876543 | 108 | 0.613969 |
85db99fa2aa9b948ffca4017b69512e862fe9571 | 5,096 | py | Python | src/mlb/schedule/schedule_view.py | benbrandt22/MagTagMLB | 1ec347743bc7df9339fb8e3de0f86ea037b7694f | [
"MIT"
] | null | null | null | src/mlb/schedule/schedule_view.py | benbrandt22/MagTagMLB | 1ec347743bc7df9339fb8e3de0f86ea037b7694f | [
"MIT"
] | null | null | null | src/mlb/schedule/schedule_view.py | benbrandt22/MagTagMLB | 1ec347743bc7df9339fb8e3de0f86ea037b7694f | [
"MIT"
] | null | null | null | from mlb.models.game_detail import GameDetail
import time
import board
import displayio
from adafruit_display_text import label
from adafruit_display_shapes.roundrect import RoundRect
import fonts.fonts as FONTS
from mlb.schedule.schedule_view_model import ScheduleViewModel
from time_utils import day_of_week, month_name_short, relative_day, utc_to_local, month_name, hour_12, ampm | 38.315789 | 114 | 0.649333 |
85dc9bd56f92f2f2d3c556c7f4e9a56721c6e747 | 1,307 | py | Python | bootcamp/chapter-1/strings.py | pushkar2112/Python-practice | 75f88eaa2b4f3c47570b1a11e0e221436551ce89 | [
"Apache-2.0"
] | 1 | 2021-11-23T08:36:43.000Z | 2021-11-23T08:36:43.000Z | bootcamp/chapter-1/strings.py | pushkar2112/Python-practice | 75f88eaa2b4f3c47570b1a11e0e221436551ce89 | [
"Apache-2.0"
] | 1 | 2021-07-18T12:39:40.000Z | 2021-09-08T09:48:16.000Z | bootcamp/chapter-1/strings.py | pushkar2112/Python-practice | 75f88eaa2b4f3c47570b1a11e0e221436551ce89 | [
"Apache-2.0"
] | null | null | null | # Strings are used in Python to record text information, such as names.
# Strings in Python are actually a sequence, which basically means Python keeps track
# of every element in the string as a sequence.
# For example, Python understands the string "hello' to be a sequence of letters in a specific order.
# This means we will be able to use indexing to grab particular letters
# (like the first letter, or the last letter).
s1 = 'hello'
s2 = "This is a sample string"
print('This is another sample string')
print('it"s an example to show how to use a quote in a quote')
# len function on string
print(len(s2))
# string sllicing
# string indexes starts from 0 when going left to right
# -1 from right
# [start:stop:step]
print(s2[5::])
# Reversing a string(shortcut)
print(s2[::-1])
# STRING PROPERTIES
#It's important to note that strings have an important property known as immutability.
# This means that once a string is created, the elements within it can not be changed or replaced.
# String Concatenation
print(s1 + s2)
# REPITITION
print(s1 * 5)
# Built-in Methods
cap = 'HELLO PEOPLE'
print(cap.lower())
cap = cap.lower()
print(cap.upper())
print(cap.split())
print(cap.split('h'))
#String formatting
print('this is sample formatting and {}'.format(s2)) | 25.627451 | 102 | 0.719204 |
85dcdeab8f386b3045fce501c4a13cd8e441b56a | 1,138 | py | Python | FindFT_NyquistFreq.py | PrabhjotKaurGosal/AudioAnalysisScripts | a752f62b2634022c1c2737b21998da218cef4dff | [
"MIT"
] | null | null | null | FindFT_NyquistFreq.py | PrabhjotKaurGosal/AudioAnalysisScripts | a752f62b2634022c1c2737b21998da218cef4dff | [
"MIT"
] | null | null | null | FindFT_NyquistFreq.py | PrabhjotKaurGosal/AudioAnalysisScripts | a752f62b2634022c1c2737b21998da218cef4dff | [
"MIT"
] | null | null | null | # This code finds the Fourier Tranform of a signal and the Nyquist frequency
import matplotlib.pyplot as plt
import numpy as np
import librosa
import librosa as lr
from scipy import signal
from scipy.fft import fft, ifft
import math
import matplotlib.pyplot as plt
if __name__ == "__main__":
# Read the audio files
ch1, sfreq = lr.load("ch1.wav", sr=44100)
ch2, sfreq = lr.load("ch2.wav", sr=44100)
ch3, sfreq = lr.load("ch3.wav", sr=44100)
# # # Find the spectrogram of the signal
f,t, Sxx = signal.spectrogram(ch1, fs=sfreq)
plt.pcolormesh(t, f, Sxx, shading='gouraud')
plt.ylabel('Frequency [Hz]')
plt.xlabel('Time [sec]')
plt.show()
ind_max = np.unravel_index(np.argmax(Sxx, axis=None), Sxx.shape)
ind_min = np.unravel_index(np.argmin(Sxx, axis=None), Sxx.shape)
row_max = ind_max[0]
col_max = ind_max[1]
row_min = ind_min[0]
col_min = ind_min[1]
Bandwidth = Sxx[row_max][col_max] - Sxx[row_min][col_min]
fsample = 2*Bandwidth # This is called Nyquist frequency
print("The sampling frequency of the signal must be greater than: ", fsample) | 33.470588 | 81 | 0.681019 |
85dddc830d151d3b583e5d23116cb924afd1cfe8 | 2,106 | py | Python | src/platform_controller/scripts/controlTiltMotors.py | ahmohamed1/activeStereoVisionPlatform | 6c928ca242e4de68c7b15a8748bff1d9f7fa1382 | [
"MIT"
] | null | null | null | src/platform_controller/scripts/controlTiltMotors.py | ahmohamed1/activeStereoVisionPlatform | 6c928ca242e4de68c7b15a8748bff1d9f7fa1382 | [
"MIT"
] | null | null | null | src/platform_controller/scripts/controlTiltMotors.py | ahmohamed1/activeStereoVisionPlatform | 6c928ca242e4de68c7b15a8748bff1d9f7fa1382 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
import actionlib
from control_msgs.msg import *
from std_msgs.msg import Float64
from sensor_msgs.msg import JointState
PI = 3.14159265359
if __name__=='__main__':
main()
exit()
| 23.931818 | 104 | 0.746439 |
85dde154e71416994a5fa1e8b1afe91eea13927c | 14,888 | py | Python | py/Parser.py | Sqazine/ComputeDuck | d307d88a24601d433aa7507ea90000207a34e1f0 | [
"Apache-2.0"
] | 2 | 2021-12-05T12:38:26.000Z | 2022-03-09T02:24:44.000Z | py/Parser.py | Sqazine/ComputeDuck | d307d88a24601d433aa7507ea90000207a34e1f0 | [
"Apache-2.0"
] | null | null | null | py/Parser.py | Sqazine/ComputeDuck | d307d88a24601d433aa7507ea90000207a34e1f0 | [
"Apache-2.0"
] | null | null | null | from ast import Lambda
from enum import IntEnum
from typing import Any
from Ast import Stmt
from Ast import Expr
from Token import Token, TokenType
from Utils import Assert
from Ast import AstType, ArrayExpr, BoolExpr, ExprStmt, FunctionCallExpr, FunctionStmt, GroupExpr, IdentifierExpr, IfStmt, IndexExpr, InfixExpr, NilExpr, NumExpr, PrefixExpr, ReturnStmt, ScopeStmt, StrExpr, StructCallExpr, StructStmt, VarStmt, WhileStmt, RefExpr,LambdaExpr
| 39.076115 | 273 | 0.637493 |
85e03a75a96c393560650c8bb391a58fe00c64f1 | 302 | py | Python | code/color.py | Archkitten/sleep | dd81d8fe379d8e37c58b101d78fe258588d6c1bc | [
"MIT"
] | null | null | null | code/color.py | Archkitten/sleep | dd81d8fe379d8e37c58b101d78fe258588d6c1bc | [
"MIT"
] | null | null | null | code/color.py | Archkitten/sleep | dd81d8fe379d8e37c58b101d78fe258588d6c1bc | [
"MIT"
] | null | null | null | # COLORS
black = "\033[30m"
red = "\033[31m"
green = "\033[32m"
yellow = "\033[33m"
blue = "\033[34m"
magenta = "\033[35m"
cyan = "\033[36m"
white = "\033[37m"
nc = "\n"
# COLOR TESTING
| 17.764706 | 56 | 0.566225 |
85e1dc6359b959fbe3bde169c1c1df0d7df72888 | 253 | py | Python | database/urls.py | shrishtickling/train_coding | ba2918ce13379940f359e2ae253987691a00f3a9 | [
"Apache-2.0"
] | null | null | null | database/urls.py | shrishtickling/train_coding | ba2918ce13379940f359e2ae253987691a00f3a9 | [
"Apache-2.0"
] | null | null | null | database/urls.py | shrishtickling/train_coding | ba2918ce13379940f359e2ae253987691a00f3a9 | [
"Apache-2.0"
] | null | null | null | from django.urls import path
from . import views
app_name = 'database'
urlpatterns = [
path('update/', views.update),
path('update2/', views.update2),
path('update3/', views.update3),
path('upload-user/', views.create_user_dataset)
]
| 19.461538 | 51 | 0.675889 |
85e2f348622632176aeb4fc8c874b128321e99b9 | 135 | py | Python | CodeWars/7 Kyu/Unlucky Days.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/7 Kyu/Unlucky Days.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/7 Kyu/Unlucky Days.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | from datetime import datetime | 33.75 | 80 | 0.733333 |
85e2fec7419e462992cdef82f856f348913b6d84 | 714 | py | Python | backend/errors.py | cryptSky/hlsa_task7 | 40365033e24ec147640f828cccc69f3711eedfc0 | [
"MIT"
] | 1 | 2021-05-20T06:04:13.000Z | 2021-05-20T06:04:13.000Z | backend/errors.py | cryptSky/hlsa_task7 | 40365033e24ec147640f828cccc69f3711eedfc0 | [
"MIT"
] | null | null | null | backend/errors.py | cryptSky/hlsa_task7 | 40365033e24ec147640f828cccc69f3711eedfc0 | [
"MIT"
] | null | null | null | from werkzeug.exceptions import HTTPException
errors = {
"InternalServerError": {
"message": "Oops something wrong",
"status": 500
},
"SchemaValidationError": {
"message": "Required fields missing",
"status": 400
},
"UserNotFoundError": {
"message": "User not found in database",
"status": 400
},
"EmailAlreadyExistError": {
"message": "User with specified email already exists in database",
"status": 400
},
} | 21.636364 | 74 | 0.648459 |
85e31f8319151021136e63792aab66a8fe4825ad | 421 | py | Python | scripts/read_radar.py | jdiasn/raincoat | b0249c88f1a5ca22a720285e87be4b06b67705b5 | [
"MIT"
] | 1 | 2020-04-22T05:41:08.000Z | 2020-04-22T05:41:08.000Z | scripts/read_radar.py | jdiasn/raincoat | b0249c88f1a5ca22a720285e87be4b06b67705b5 | [
"MIT"
] | null | null | null | scripts/read_radar.py | jdiasn/raincoat | b0249c88f1a5ca22a720285e87be4b06b67705b5 | [
"MIT"
] | 4 | 2019-01-01T11:33:14.000Z | 2021-01-04T20:34:43.000Z | from raincoat.radarFunctions import getVarTimeRange, getRadarVar
import pandas as pd
data = getRadarVar('../samplefiles/radar/181202_000000_P09_ZEN_compact.nc',
'2001.01.01. 00:00:00',
'Ze')
start = pd.to_datetime('2018-12-02 00:00:00', format='%Y-%m-%d %H:%M:%S')
stop = pd.to_datetime('2018-12-02 01:00:00',format='%Y-%m-%d %H:%M:%S')
data = getVarTimeRange(data,1,2000, start, stop)
| 35.083333 | 75 | 0.655582 |
85e397373b9dc700b3ec2e1bd8bc94f48fdddec5 | 1,527 | py | Python | gecko/geckolib/driver/protocol/reminders.py | mmillmor/home_assistant-components | 625f97413bd6516a2358220a80819b85cc8072c6 | [
"Apache-2.0"
] | null | null | null | gecko/geckolib/driver/protocol/reminders.py | mmillmor/home_assistant-components | 625f97413bd6516a2358220a80819b85cc8072c6 | [
"Apache-2.0"
] | null | null | null | gecko/geckolib/driver/protocol/reminders.py | mmillmor/home_assistant-components | 625f97413bd6516a2358220a80819b85cc8072c6 | [
"Apache-2.0"
] | 1 | 2022-03-07T20:04:05.000Z | 2022-03-07T20:04:05.000Z | """ Gecko REQRM/RMREQ handlers """
import logging
import struct
from .packet import GeckoPacketProtocolHandler
REQRM_VERB = b"REQRM"
RMREQ_VERB = b"RMREQ"
_LOGGER = logging.getLogger(__name__)
| 30.54 | 87 | 0.606418 |
85e52bfde40a74e45c8231c717edf8c32b7d97fa | 376 | py | Python | components/studio/apps/migrations/0005_auto_20210209_1244.py | aitmlouk/stackn | c8029394a15b03796a4864938f9db251b65c7354 | [
"Apache-2.0"
] | 25 | 2020-05-08T22:24:54.000Z | 2022-03-11T18:16:58.000Z | components/studio/apps/migrations/0005_auto_20210209_1244.py | aitmlouk/stackn | c8029394a15b03796a4864938f9db251b65c7354 | [
"Apache-2.0"
] | 75 | 2020-05-08T22:15:59.000Z | 2021-11-22T10:00:04.000Z | components/studio/apps/migrations/0005_auto_20210209_1244.py | aitmlouk/stackn | c8029394a15b03796a4864938f9db251b65c7354 | [
"Apache-2.0"
] | 12 | 2020-11-04T13:09:46.000Z | 2022-03-14T16:22:40.000Z | # Generated by Django 2.2.13 on 2021-02-09 12:44
from django.db import migrations
| 19.789474 | 48 | 0.598404 |
85e79f4d2b450460c3e188d3ec311565e5eee0d2 | 30,714 | py | Python | SoundServer.py | yoyoberenguer/SoundServer | 3a824a8f519f205d5f4c277d314cb92732a157b1 | [
"MIT"
] | null | null | null | SoundServer.py | yoyoberenguer/SoundServer | 3a824a8f519f205d5f4c277d314cb92732a157b1 | [
"MIT"
] | null | null | null | SoundServer.py | yoyoberenguer/SoundServer | 3a824a8f519f205d5f4c277d314cb92732a157b1 | [
"MIT"
] | null | null | null | # encoding: utf-8
__version__ = "1.0.1"
try:
import pygame
from pygame import mixer
except ImportError:
raise ImportError("\n<pygame> library is missing on your system."
"\nTry: \n C:\\pip install pygame on a window command prompt.")
from time import time
| 39.226054 | 119 | 0.513088 |
85e90c8a65010ce9ecba5749d22457498fa4d999 | 2,931 | py | Python | tests/extmethods/run.py | dariobig/pyangbind | db0808f719bb963dac85606fddd65a1930a84aef | [
"Apache-2.0"
] | 1 | 2020-04-01T05:45:41.000Z | 2020-04-01T05:45:41.000Z | tests/extmethods/run.py | dariobig/pyangbind | db0808f719bb963dac85606fddd65a1930a84aef | [
"Apache-2.0"
] | null | null | null | tests/extmethods/run.py | dariobig/pyangbind | db0808f719bb963dac85606fddd65a1930a84aef | [
"Apache-2.0"
] | 3 | 2016-11-01T23:51:35.000Z | 2018-05-23T10:09:08.000Z | #!/usr/bin/env python
import os
import sys
import getopt
TESTNAME = "extmethods"
# generate bindings in this folder
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "k", ["keepfiles"])
except getopt.GetoptError as e:
sys.exit(127)
k = False
for o, a in opts:
if o in ["-k", "--keepfiles"]:
k = True
pythonpath = os.environ.get("PATH_TO_PYBIND_TEST_PYTHON") if \
os.environ.get('PATH_TO_PYBIND_TEST_PYTHON') is not None \
else sys.executable
pyangpath = os.environ.get('PYANGPATH') if \
os.environ.get('PYANGPATH') is not None else False
pyangbindpath = os.environ.get('PYANGBINDPATH') if \
os.environ.get('PYANGBINDPATH') is not None else False
assert pyangpath is not False, "could not find path to pyang"
assert pyangbindpath is not False, "could not resolve pyangbind directory"
this_dir = os.path.dirname(os.path.realpath(__file__))
cmd = "%s " % pythonpath
cmd += "%s --plugindir %s/pyangbind/plugin" % (pyangpath, pyangbindpath)
cmd += " -f pybind -o %s/bindings.py" % this_dir
cmd += " -p %s" % this_dir
cmd += " --use-extmethods"
cmd += " %s/%s.yang" % (this_dir, TESTNAME)
os.system(cmd)
extdict = {
'/item/one': extmethodcls()
}
from bindings import extmethods
x = extmethods(extmethods=extdict)
results = [
("commit", True, "COMMIT_CALLED"),
("presave", True, "PRESAVE_CALLED"),
("postsave", True, "POSTSAVE_CALLED"),
("oam_check", True, "OAM_CHECK_CALLED"),
("doesnotexist", False, "")
]
for chk in results:
method = getattr(x.item.one, "_" + chk[0], None)
assert (method is not None) == chk[1], \
"Method %s retrieved incorrectly, method was: %s" % method
if method is not None:
result = method()
assert result == chk[2], "Incorrect return from %s -> %s != %s" \
% (chk[0], result, chk[2])
expected_return = {'args': ('one',), 'kwargs': {'caller': ['item', 'one'],
'two': 2, 'path_helper': False}}
assert x.item.one._echo('one', two=2) == expected_return, \
"args+kwargs not echoed correctly"
try:
x.item.two = False
assert False, \
"incorrectly set an attribute that did not exist in extmethods"
except AttributeError:
pass
if not k:
os.system("/bin/rm %s/bindings.py" % this_dir)
os.system("/bin/rm %s/bindings.pyc" % this_dir)
if __name__ == '__main__':
main()
| 29.019802 | 76 | 0.604572 |
85eb93c822a019fc750d57de9e82b6de5c0352f3 | 790 | py | Python | scripts/solved/031_TRAN.py | akikuno/rosalind | 7015dc63e493d870e5789e99f2ee523a9b1f5ab9 | [
"MIT"
] | null | null | null | scripts/solved/031_TRAN.py | akikuno/rosalind | 7015dc63e493d870e5789e99f2ee523a9b1f5ab9 | [
"MIT"
] | null | null | null | scripts/solved/031_TRAN.py | akikuno/rosalind | 7015dc63e493d870e5789e99f2ee523a9b1f5ab9 | [
"MIT"
] | null | null | null | # https://rosalind.info/problems/tran/
file = "data/tran.txt"
def read_fasta(file: str):
"""
Args
file: path of fasta file
"""
with open(file) as f:
fa = f.read().splitlines()
prev = True
header = []
seq = []
for f in fa:
if ">" in f:
header.append(f[1:])
prev = True
elif prev:
seq.append(f)
prev = False
else:
seq[-1] += f
return header, seq
_, seq = read_fasta(file)
seq1, seq2 = seq
transition = 0
transversion = 0
import re
for s1, s2 in zip(seq1, seq2):
if s1 == s2:
continue
s = s1 + s2
if re.match(r"(AG)|(GA)|(CT)|(TC)", s):
transition += 1
else:
transversion += 1
print(transition / transversion)
| 16.458333 | 43 | 0.501266 |
85eee91df955bbbecc3244390928eb3edef3f3cf | 5,730 | py | Python | trueWorld.py | str1k/dwatravel | 47be1377a60431ffcb3ad09846be7647d44e5760 | [
"MIT"
] | null | null | null | trueWorld.py | str1k/dwatravel | 47be1377a60431ffcb3ad09846be7647d44e5760 | [
"MIT"
] | null | null | null | trueWorld.py | str1k/dwatravel | 47be1377a60431ffcb3ad09846be7647d44e5760 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
import re
import time
import MySQLdb
s = requests.Session()
login_url = "http://www.trueworldagency.com/member/login_ok.asp"
login_data = {'upw': '07092519', 'uid': '07092519'}
s.post(login_url, login_data)
counter = 0
trip_bookingID = []
trip_period = []
trip_programDN = []
trip_programName = []
trip_HasDC = []
trip_price = []
trip_seatAvailable = []
result = s.get("http://www.trueworldagency.com/index.asp?area=1#gnb").text
soup = BeautifulSoup(result, "lxml")
while(True):
counter = counter + 1
if counter = 50000:
counter = 0
if counter%50 == 0:
s.post(login_url, login_data)
conn = MySQLdb.connect(host= "localhost",
user="dwatravel",
passwd="ni00065996",
db="trueworld_DB")
x = conn.cursor()
for tag in soup.findAll('tr'):
if "chk" in str(tag):
allTd = list(tag.find_all('td'))
#Get booking ID
regRet = re.search("(?<=\()(.*)(?=\))",str(allTd[0]))
bookingItem = regRet.group().split(',')
bookingItem[1] = bookingItem[1].replace(" ","")
bookingItem[2] = bookingItem[2].replace("\'","")
bookingItem[2] = bookingItem[2].replace(" ","")
trip_bookingID.append(bookingItem)
#Get Trip period
periodItem = []
val = str(allTd[1]).split(" ")
regRet = re.search("\d{2}\/\w+\/\d{4}",val[0])
periodItem.append(regRet.group())
regRet = re.search("\d{2}\/\w+\/\d{4}",val[2])
periodItem.append(regRet.group())
trip_period.append(periodItem)
#Get Trip Code
trip_programDN.append(str(allTd[2]).replace("<td>","").replace("</td>",""))
#Get Trip Name
regRet = re.search("(?<=\>)(.*)(?=\<)",str(allTd[3]))
nameItem = regRet.group()
trip_programName.append(nameItem)
#Get price
if "</span>" in str(allTd[4]):
priceItem = []
val = str(allTd[4]).split("</span>")
regRet = re.search("\d+,\d+",val[0])
priceItem.append(regRet.group())
regRet = re.search("\d+,\d+",val[1])
priceItem.append(regRet.group())
trip_price.append(priceItem)
trip_HasDC.append(True)
else:
regRet = re.search("\d+,\d+",str(allTd[4]))
trip_price.append(regRet.group())
trip_HasDC.append(False)
#Get Available seat
if "Booking" in str(allTd[6]):
trip_seatAvailable.append(re.search("\d+",str(allTd[5])).group())
elif "Waiting" in str(allTd[6]):
trip_seatAvailable.append("0")
else:
trip_seatAvailable.append(re.search("\d+",str(allTd[5])).group())
#Get Flight code
'''
if "Agency com." in str(tag):
print(tag)
'''
'''
for i in range(0,len(trip_bookingID)):
print("Program Name : "+ str(trip_programName[i]))
print("Booking ID :"+ str(trip_bookingID[i][0]) + ": :" + str(trip_bookingID[i][1]) + ": :" + str(trip_bookingID[i][2]))
print("From " + trip_period[i][0] + " To " + trip_period[i][1])
print("Period : " + trip_programDN[i])
if trip_HasDC[i]:
print("Price : " + trip_price[i][0] + " Discounted Price : " + trip_price[i][1])
else:
print("Price : " + trip_price[i])
print("Available Seat : " + trip_seatAvailable[i])
print("\n\n")
'''
tour_desNum = "0"
for tag in soup.findAll('p'):
if "onclick" in str(tag):
regRet = re.search("\d+",str(tag))
tour_desNum = regRet.group()
print(tour_desNum)
result = s.get("http://www.trueworldagency.com/viewer/program_tour.asp?seq="+tour_desNum).text
tourDes = BeautifulSoup(result, "lxml")
for tag in tourDes.findAll('p'):
if "program" in str(tag):
print (str(tag.text))
x.execute("SELECT * FROM trip_trueworld")
queryRet = x.fetchall()
for i in range(0,len(trip_bookingID)):
booking_key = str(trip_bookingID[i][0]) + "gnb" + str(trip_bookingID[i][1])
trip_desc = str(trip_bookingID[i][0]) + "gnb"
existed = 0
foundMatchedSeat = 0
for row in queryRet:
if booking_key == row[0]:
existed = 1
foundMatchedSeat = row[5]
if existed == 1:
#UPDATE seat
#print(foundMatchedSeat + " "+ trip_seatAvailable[i])
if foundMatchedSeat != trip_seatAvailable[i]:
x.execute ("UPDATE trip_trueworld SET seat_available=%s WHERE booking_key=%s",\
(trip_seatAvailable[i], booking_key))
#x.execute ("UPDATE trip_trueworld SET seat_available=%s WHERE booking_key=%s",(1232,'1gnb1900'))
print(" Successfully update row : "+booking_key)
else:
print(" Everything is up-to-date : "+booking_key )
conn.commit()
time.sleep(0.1)
else:
#INSERT row
dep = time.strptime(trip_period[i][0], "%d/%b/%Y")
ret = time.strptime(trip_period[i][0], "%d/%b/%Y")
if trip_HasDC[i]:
x.execute("""INSERT INTO trip_trueworld VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",\
(booking_key,str(trip_bookingID[i][0]),str(trip_bookingID[i][1]),str(trip_bookingID[i][2]),"gnb",trip_seatAvailable[i], \
time.strftime('%Y-%m-%d %H:%M:%S', dep),time.strftime('%Y-%m-%d %H:%M:%S', ret),trip_price[i][0].replace(",",""),trip_price[i][1].replace(",",""),trip_price[i][0].replace(",",""),trip_price[i][1].replace(",",""), \
4900,8900,6900,900,300,trip_desc))
else:
x.execute("""INSERT INTO trip_trueworld VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",\
(booking_key,str(trip_bookingID[i][0]),str(trip_bookingID[i][1]),str(trip_bookingID[i][2]),"gnb",trip_seatAvailable[i], \
time.strftime('%Y-%m-%d %H:%M:%S', dep),time.strftime('%Y-%m-%d %H:%M:%S', ret),trip_price[i].replace(",",""),trip_price[i].replace(",",""),trip_price[i].replace(",",""),trip_price[i].replace(",",""), \
4900,8900,6900,900,300,trip_desc))
conn.commit()
time.sleep(0.1)
conn.close()
for i in range(1, 15):
print("Waiting for next update in " + str(15-i) + " sec")
time.sleep(1) | 35.590062 | 218 | 0.621466 |
85ef49b97d17705c81cdeeb0ece8add9c7768f1d | 6,718 | py | Python | extract_data_1.1.py | stanlee321/bolivia_power | 4c86be2be8b81fead5ba9f1d50f32233cd54c1fc | [
"MIT"
] | null | null | null | extract_data_1.1.py | stanlee321/bolivia_power | 4c86be2be8b81fead5ba9f1d50f32233cd54c1fc | [
"MIT"
] | null | null | null | extract_data_1.1.py | stanlee321/bolivia_power | 4c86be2be8b81fead5ba9f1d50f32233cd54c1fc | [
"MIT"
] | null | null | null |
# Code for extract the information from the web
# with the <id> information into the bolivia_power_1.csv file
# input: bolivia_power_1.id.csv
# output 6x.npy array file:
# <nodes_ids.lat,lon> <node.tags>
# <way.ids> <way.ref> <way.tags>
# ...
# v. 1.1
#import pandas as pd
import numpy as np
import pandas as pd
# Data from Bolivia_power
path_to_csv_power_data = '/notebooks/Power/data/bolivia_power_1.csv'
df_bolivia_power= pd.read_csv(path_to_csv_power_data,delimiter=',',sep=',', error_bad_lines=False)
df_bolivia_power.columns = ['type','id','name_1','name_2','name_3','name_4']
df_bolivia_power.head()
# As array Type and id
df2_type = np.asarray(df_bolivia_power['type'])
df2_id = np.asarray(df_bolivia_power['id'])
# Return to Pandas DataFrame
data_frame_type = pd.DataFrame(df2_type)
data_frame_id = pd.DataFrame(df2_id)
print(len(df2_type))
# AS a unique DataFrame
M = np.ones((len(df2_type),2))
data_frame = pd.DataFrame(M, columns=['type', 'id'])
data_frame['type'] = data_frame_type
data_frame['id'] = data_frame_id
data_frame.head()
## Extracting the data from the web
import urllib.request
from urllib.error import URLError, HTTPError
print("starting to download the files...")
# function fur Convert to pandasdataframe from str
##################FUR NODES #####################
import xml.etree.ElementTree as ET
#################################################
extract_data()
print('finished node,way,relation')
print('saving list arrays into disk....')
#node, ways, relations = extract_data()
"""""
xml_data = node[0]
etree = ET.fromstring(xml_data) #create an ElementTree object
d = pd.DataFrame(list(iter_docs(etree)))
data_list=[] # create list for append every dataframe
for i in range(1,len(node)):
xml_data = node[i]
etree = ET.fromstring(xml_data) #create an ElementTree object
doc_df = pd.DataFrame(list(iter_docs(etree)))
data_list.append(doc_df)
d = d.append(data_list[-1],ignore_index=True)
d.head()
d.to_csv('/notebooks/Power/data/power_node.csv', sep=',', encoding='utf-8',index = False)
#########################################################################################
##############################################FUR WAYS#####################################################################
def iter_docs_way(author):
author_attr = author.attrib
for doc in author.iterfind('.//way'):
doc_dict = author_attr.copy()
doc_dict.update(doc.attrib)
doc_dict['data'] = doc.text
yield doc_dict
xml_data = node[0]
etree = ET.fromstring(xml_data) #create an ElementTree object
w = pd.DataFrame(list(iter_docs(etree)))
data_list_way=[] # create list for append every dataframe
for i in range(1,len(way)):
xml_data = node[i]
etree = ET.fromstring(xml_data) #create an ElementTree object
doc_df = pd.DataFrame(list(iter_docs_way(etree)))
data_list.append(doc_df)
w = w.append(data_list[-1],ignore_index=True)
w.head()
w.to_csv('/notebooks/Power/data/power_way.csv', sep=',', encoding='utf-8',index = False)
#########################################################################################
########################################################## FUR Relation ##################################################
def iter_docs_rel(author):
author_attr = author.attrib
for doc in author.iterfind('.//way'):
doc_dict = author_attr.copy()
doc_dict.update(doc.attrib)
doc_dict['data'] = doc.text
yield doc_dict
xml_data = node[0]
etree = ET.fromstring(xml_data) #create an ElementTree object
r = pd.DataFrame(list(iter_docs_rel(etree)))
data_list_way=[] # create list for append every dataframe
for i in range(1,len(relation)):
xml_data = node[i]
etree = ET.fromstring(xml_data) #create an ElementTree object
doc_df = pd.DataFrame(list(iter_docs_rel(etree)))
data_list.append(doc_df)
r = r.append(data_list[-1],ignore_index=True)
r.head()
r.to_csv('/notebooks/Power/data/power_rel.csv', sep=',', encoding='utf-8',index = False) """ | 28.108787 | 124 | 0.539744 |
85f0ac57ac9d5511f94d39253027463025311137 | 194 | py | Python | tests/api/utils/test_config.py | Devansh3712/py-cmc | e3f9687914d92cd95bd5a7c04e6103345ba43a3d | [
"MIT"
] | 2 | 2022-02-14T07:13:12.000Z | 2022-02-14T07:20:34.000Z | tests/api/utils/test_config.py | Devansh3712/py-cmc | e3f9687914d92cd95bd5a7c04e6103345ba43a3d | [
"MIT"
] | 6 | 2022-02-21T10:50:43.000Z | 2022-03-03T15:44:09.000Z | tests/api/utils/test_config.py | Devansh3712/py-cmc | e3f9687914d92cd95bd5a7c04e6103345ba43a3d | [
"MIT"
] | 2 | 2022-02-20T01:43:35.000Z | 2022-03-13T09:34:51.000Z | from api.utils.config import settings
| 24.25 | 39 | 0.71134 |
85f402a990563be3704e3ce90f8e5fbc80ebcb6e | 526 | py | Python | Practice/Problem Solving/MaximizingXOR.py | avantikasharma/HackerRank-Solutions | a980859ac352688853fcbcf3c7ec6d95685f99ea | [
"MIT"
] | 1 | 2018-07-08T15:44:15.000Z | 2018-07-08T15:44:15.000Z | Practice/Problem Solving/MaximizingXOR.py | avantikasharma/HackerRank-Solutions | a980859ac352688853fcbcf3c7ec6d95685f99ea | [
"MIT"
] | null | null | null | Practice/Problem Solving/MaximizingXOR.py | avantikasharma/HackerRank-Solutions | a980859ac352688853fcbcf3c7ec6d95685f99ea | [
"MIT"
] | 2 | 2018-08-10T06:49:34.000Z | 2020-10-01T04:50:59.000Z | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the maximizingXor function below.
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
l = int(input())
r = int(input())
result = maximizingXor(l, r)
fptr.write(str(result) + '\n')
fptr.close()
| 17.533333 | 47 | 0.58365 |
85f4fbb2d3d898f2e76022a95446b29026af0760 | 2,216 | py | Python | Sketches/MH/Layout/Visualisation/Graph/GridRenderer.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 12 | 2015-10-20T10:22:01.000Z | 2021-07-19T10:09:44.000Z | Sketches/MH/Layout/Visualisation/Graph/GridRenderer.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 2 | 2015-10-20T10:22:55.000Z | 2017-02-13T11:05:25.000Z | Sketches/MH/Layout/Visualisation/Graph/GridRenderer.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 6 | 2015-03-09T12:51:59.000Z | 2020-03-01T13:06:21.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
# Simple topography viewer server - takes textual commands from a single socket
# and renders the appropriate graph
import pygame
| 38.206897 | 83 | 0.584386 |
85f5bac9de2e5415cb10c2fbf5dabb5fee1eb3a8 | 436 | py | Python | config.py | anggerwicaksono/vehicle-brand-recognition-yolov4-python | b58a76481bd499ff77deb037f5791119a9572d0c | [
"MIT"
] | null | null | null | config.py | anggerwicaksono/vehicle-brand-recognition-yolov4-python | b58a76481bd499ff77deb037f5791119a9572d0c | [
"MIT"
] | null | null | null | config.py | anggerwicaksono/vehicle-brand-recognition-yolov4-python | b58a76481bd499ff77deb037f5791119a9572d0c | [
"MIT"
] | null | null | null | # Copyright 2020 by Spectrico
# Licensed under the MIT License
model_file = "model-weights-spectrico-car-colors-recognition-mobilenet_v3-224x224-180420.pb" # path to the car color classifier
label_file = "labelsC.txt" # path to the text file, containing list with the supported makes and models
input_layer = "input_1"
output_layer = "Predictions/Softmax/Softmax"
classifier_input_size = (224, 224) # input size of the classifier
| 48.444444 | 128 | 0.784404 |
85f686d400d73419843a0643d08f81afb4fe05ef | 4,417 | py | Python | interface_report_interactive.py | hpreston/network_info_scripts | b25076eb6f55a7f7335f6cae1a4c3c00ce9aa191 | [
"MIT"
] | 20 | 2019-05-11T03:08:52.000Z | 2022-01-13T13:44:22.000Z | interface_report_interactive.py | hpreston/network_info_scripts | b25076eb6f55a7f7335f6cae1a4c3c00ce9aa191 | [
"MIT"
] | 4 | 2020-02-26T23:25:59.000Z | 2021-12-13T19:59:01.000Z | interface_report_interactive.py | hpreston/network_info_scripts | b25076eb6f55a7f7335f6cae1a4c3c00ce9aa191 | [
"MIT"
] | 8 | 2019-05-20T02:27:40.000Z | 2021-07-07T18:49:45.000Z | #! /usr/bin/env python
"""Exploring Genie's ability to gather details and write to CSV
This script is meant to be run line by line interactively in a Python
interpretor (such as iPython) to learn how the Genie and csv libraries work.
This script assumes you have a virl simulation running and a testbed file
created.
Example:
virl up --provision virlfiles/5_router_mesh
virl generate pyats -o testbed.yaml
Copyright (c) 2018 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# Import the Genie library
from genie.conf import Genie
# Create a testbed object
testbed = Genie.init("testbed.yaml")
# Take a look at the devices that are in the testbed
print(testbed.devices)
# Create a "convenience" variable for one device
iosv1 = testbed.devices["iosv-1"]
# Connect to the router
iosv1.connect()
# Check that you are connected
iosv1.connected
# Run the "show interfaces" command and "parse" results to Python object
interfaces = iosv1.parse("show interfaces")
# Print the parsed data
print(interfaces)
# That's a lot of data, let's explore it some..
# Look at the first set of dictionary keys avialable
interfaces.keys()
# Now let's checkout one interface in a pretty printed way
from pprint import pprint
pprint(interfaces["GigabitEthernet0/0"])
# Much nicer... now let's just get the mac-address for one interface
interfaces["GigabitEthernet0/0"]["mac_address"]
# Suppose we wanted the IP address...
interfaces["GigabitEthernet0/0"]["ipv4"]
# Now let's create a CSV file of the MAC Addresses for each interface
# Import in the CSV library
import csv
# Name our CSV file
interface_file = "interfaces.csv"
# Let's setup the headers for our CSV file
report_fields = ["Interface", "MAC Address"]
# Now let's open up our file and create our report
# This whole block of text from `with` and everything
# indented under it will run at once. Copy or type it all in.
# DON'T FORGET TO SPACE OVER IF TYPING MANUALLY
with open(interface_file, "w") as f:
# Create a DictWriter object
writer = csv.DictWriter(f, report_fields)
# Write the header row
writer.writeheader()
# Loop over each interface and write a row
for interface, details in interfaces.items():
writer.writerow({"Interface": interface, "MAC Address": details["mac_address"]})
# Uh oh.. did you get a "KeyError: 'mac_address'"?
# That's because Loopbacks do NOT have mac_addresses.
# See for yourself...
interfaces["Loopback0"].keys()
# So we need to create our code so we can handle interfaces without mac-addresses
# Several ways you COULD do it, here's one. A "try... except... " block
with open(interface_file, "w") as f:
writer = csv.DictWriter(f, report_fields)
writer.writeheader()
for interface, details in interfaces.items():
# Try to write a row with a mac-address
try:
writer.writerow(
{
"Interface": interface,
"MAC Address": details["mac_address"],
}
)
except KeyError:
# If there isn't one... use "N/A"
writer.writerow(
{
"Interface": interface,
"MAC Address": "N/A"}
)
# Great... let's see what was written.
# Open up the file again for "r"eading (also the default)
with open(interface_file, "r") as f:
# Just print it out
print(f.read())
# Great job!
| 33.462121 | 88 | 0.713607 |
85f74ccca3d8f227ec09283215d9c1ace1b61121 | 1,159 | py | Python | app/priu.py | robhaswell/powerstrip-restrict-image-user | d6a5dbb19330f1ee5b384095c1010636af12120d | [
"Apache-2.0"
] | null | null | null | app/priu.py | robhaswell/powerstrip-restrict-image-user | d6a5dbb19330f1ee5b384095c1010636af12120d | [
"Apache-2.0"
] | null | null | null | app/priu.py | robhaswell/powerstrip-restrict-image-user | d6a5dbb19330f1ee5b384095c1010636af12120d | [
"Apache-2.0"
] | null | null | null | import os, sys
import json as _json
from flask import Flask, Response, request
app = Flask(__name__)
app.debug = True
import lib
if __name__ == "__main__":
try:
app.config['ALLOWED_USER'] = os.environ['USER']
except KeyError:
sys.stdout.write("""Error: Configuration environment variable USER not provided.
Specify an image username on the Docker command-line by using docker run -e USER=<user>.
Use the user "_" to only allow official Docker images.
""")
sys.exit(1)
app.run(port=80)
| 26.340909 | 88 | 0.637619 |
85f7c87317fb94af50f148e6f619929fe75f47af | 1,316 | py | Python | app/gather/api/serializers.py | eHealthAfrica/gather | 88d96009c5f9832b564d13fa66d63841a7fbcd90 | [
"Apache-2.0"
] | 2 | 2019-09-25T18:37:30.000Z | 2019-09-25T18:37:39.000Z | app/gather/api/serializers.py | eHealthAfrica/gather | 88d96009c5f9832b564d13fa66d63841a7fbcd90 | [
"Apache-2.0"
] | 41 | 2015-07-29T14:10:05.000Z | 2021-09-13T07:07:41.000Z | app/gather/api/serializers.py | eHealthAfrica/gather | 88d96009c5f9832b564d13fa66d63841a7fbcd90 | [
"Apache-2.0"
] | 2 | 2019-11-12T23:09:35.000Z | 2020-03-11T16:39:35.000Z | # Copyright (C) 2019 by eHealth Africa : http://www.eHealthAfrica.org
#
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aether.sdk.multitenancy.serializers import (
DynamicFieldsModelSerializer,
MtPrimaryKeyRelatedField,
MtModelSerializer,
)
from .models import Survey, Mask
| 28 | 75 | 0.729483 |
85f9b76b3b5d2af9bcf373c178d927992a22b317 | 280 | py | Python | CCC/CCC '19 J3 - Cold Compress.py | Joon7891/Competitive-Programming | d860b7ad932cd5a6fb91fdc8c53101da57f4a408 | [
"MIT"
] | 2 | 2021-04-13T00:19:56.000Z | 2021-04-13T01:19:45.000Z | CCC/CCC '19 J3 - Cold Compress.py | Joon7891/Competitive-Programming | d860b7ad932cd5a6fb91fdc8c53101da57f4a408 | [
"MIT"
] | null | null | null | CCC/CCC '19 J3 - Cold Compress.py | Joon7891/Competitive-Programming | d860b7ad932cd5a6fb91fdc8c53101da57f4a408 | [
"MIT"
] | 1 | 2020-08-26T12:36:08.000Z | 2020-08-26T12:36:08.000Z | n = int(input())
for _ in range(n):
line = input()
prev = ''
counter = 0
for char in line:
if char != prev:
if prev != '':
print(counter, prev, end = ' ')
prev = char
counter = 1
else:
counter += 1
print(counter, prev) | 15.555556 | 39 | 0.478571 |
85fd9ccfe64a572bc3232cd253f5cd2894061049 | 1,514 | py | Python | src/utils/jupyter_setup.py | paxtonedgar/MisInfo | 81b32fa3e7d0d204feb83e10169093f45727a2ea | [
"MIT"
] | null | null | null | src/utils/jupyter_setup.py | paxtonedgar/MisInfo | 81b32fa3e7d0d204feb83e10169093f45727a2ea | [
"MIT"
] | null | null | null | src/utils/jupyter_setup.py | paxtonedgar/MisInfo | 81b32fa3e7d0d204feb83e10169093f45727a2ea | [
"MIT"
] | null | null | null | # built-in
import os
import logging
# installed
import pandas as pd
import seaborn as sns
from matplotlib import pylab
# custom
import src.settings
from src.utils.log_utils import setup_logging, LogLevel
from src.utils.config_loader import ConfigLoader, Config
def setup_jupyter(
root_dir: str, config_path: str = None,
logging_level: LogLevel = logging.DEBUG
) -> Config:
"""
Setup needed for Jupyter.
:param root_dir: [description]
:type root_dir: str
:param config_path: [description], defaults to None
:type config_path: str, optional
:param logging_level: [description], defaults to logging.DEBUG
:type logging_level: LogLevel, optional
:return: [description]
:rtype: Config
"""
src.settings.init()
cfg = ConfigLoader.load_config(config_path)
print('Config loaded.')
setup_logging(
os.path.join(root_dir, 'logging.json'), logging_level=logging_level
)
# other setup
sns.set()
palette = sns.color_palette('muted')
sns.set_palette(palette)
sns.set(rc={'figure.figsize': (12, 8)})
pd.options.display.float_format = '{:.4f}'.format
pd.set_option('max_colwidth', 800)
pd.set_option('display.max_rows', 200)
params = {
'legend.fontsize': 16,
'figure.figsize': (10, 8),
'axes.labelsize': 16,
'axes.titlesize': 16,
'xtick.labelsize': 16,
'ytick.labelsize': 16
}
pylab.rcParams.update(params)
print('Setup done')
return cfg
| 26.103448 | 75 | 0.664465 |
85fdbccdde41392a6f2e6723a8450dd58d4c3c85 | 3,169 | py | Python | EclipseOJ/contests/models.py | cs251-eclipse/eclipseOJ | ad93bf65014e87051278026f87b6b92afdaed349 | [
"MIT"
] | null | null | null | EclipseOJ/contests/models.py | cs251-eclipse/eclipseOJ | ad93bf65014e87051278026f87b6b92afdaed349 | [
"MIT"
] | null | null | null | EclipseOJ/contests/models.py | cs251-eclipse/eclipseOJ | ad93bf65014e87051278026f87b6b92afdaed349 | [
"MIT"
] | 1 | 2020-06-06T21:05:09.000Z | 2020-06-06T21:05:09.000Z | from django.db import models
from django.contrib.auth.models import User
from core.models import Profile
from array import *
from datetime import datetime
from django.utils import timezone
| 39.123457 | 270 | 0.688545 |
85fe97d41b6486d5e18a9ac451b9332abc6e4cd3 | 2,659 | py | Python | oldnumba/special.py | meawoppl/numba | bb8df0aee99133c6d52465ae9f9df2a7996339f3 | [
"BSD-2-Clause"
] | 1 | 2015-01-29T06:52:36.000Z | 2015-01-29T06:52:36.000Z | oldnumba/special.py | meawoppl/numba | bb8df0aee99133c6d52465ae9f9df2a7996339f3 | [
"BSD-2-Clause"
] | null | null | null | oldnumba/special.py | meawoppl/numba | bb8df0aee99133c6d52465ae9f9df2a7996339f3 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Special compiler-recognized numba functions and attributes.
"""
from __future__ import print_function, division, absolute_import
__all__ = ['NULL', 'typeof', 'python', 'nopython', 'addressof', 'prange']
import ctypes
from numba import error
#------------------------------------------------------------------------
# Pointers
#------------------------------------------------------------------------
NULL = NumbaDotNULL()
def addressof(obj, propagate=True):
"""
Take the address of a compiled jit function.
:param obj: the jit function
:param write_unraisable: whether to write uncaught exceptions to stderr
:param propagate: whether to always propagate exceptions
:return: ctypes function pointer
"""
from numba import numbawrapper
if not propagate:
raise ValueError("Writing unraisable exceptions is not yet supported")
if not isinstance(obj, (numbawrapper.NumbaCompiledWrapper,
numbawrapper.numbafunction_type)):
raise TypeError("Object is not a jit function")
if obj.lfunc_pointer is None:
assert obj.lfunc is not None, obj
from numba.codegen import llvmcontext
llvm_context = llvmcontext.LLVMContextManager()
obj.lfunc_pointer = llvm_context.get_pointer_to_function(obj.lfunc)
ctypes_sig = obj.signature.to_ctypes()
return ctypes.cast(obj.lfunc_pointer, ctypes_sig)
#------------------------------------------------------------------------
# Types
#------------------------------------------------------------------------
def typeof(value):
"""
Get the type of a variable or value.
Used outside of Numba code, infers the type for the object.
"""
from numba import typesystem
return typesystem.numba_typesystem.typeof(value)
#------------------------------------------------------------------------
# python/nopython context managers
#------------------------------------------------------------------------
python = NoopContext("python")
nopython = NoopContext("nopython")
#------------------------------------------------------------------------
# prange
#------------------------------------------------------------------------ | 27.989474 | 78 | 0.532907 |
85ff76b7f34f9abc8f910e03a1576bfe726a0de5 | 7,602 | py | Python | mistletoe/renderers/base.py | executablebooks/mistletoe-ebp | 229812436726fd9b1af85c6e66ff8c81b415758d | [
"MIT"
] | 2 | 2020-05-19T02:06:47.000Z | 2020-06-27T10:01:59.000Z | mistletoe/renderers/base.py | executablebooks/mistletoe-ebp | 229812436726fd9b1af85c6e66ff8c81b415758d | [
"MIT"
] | 5 | 2020-03-10T22:43:16.000Z | 2020-03-21T22:09:09.000Z | mistletoe/renderers/base.py | ExecutableBookProject/mistletoe-ebp | 229812436726fd9b1af85c6e66ff8c81b415758d | [
"MIT"
] | null | null | null | """
Base class for renderers.
"""
from itertools import chain
import re
import sys
from typing import Optional
from mistletoe import block_tokens, block_tokens_ext, span_tokens, span_tokens_ext
from mistletoe.parse_context import ParseContext, set_parse_context
def render_setext_heading(self, token):
"""
Default render method for SetextHeader. Simply parse to render_header.
"""
return self.render_heading(token)
def render_code_fence(self, token):
"""
Default render method for CodeFence. Simply parse to render_block_code.
"""
return self.render_block_code(token)
def __getattr__(self, name):
""""""
if name.startswith("render_"):
return self.unimplemented_renderer
raise AttributeError(name).with_traceback(sys.exc_info()[2])
| 34.089686 | 83 | 0.64654 |
85ff94648db8e42f7e087780f32ca9e870cb3118 | 2,123 | py | Python | deep-scratch/steps/step50.py | jayChung0302/myml | 6575706aec707186037607e49342f77cde34ff52 | [
"MIT"
] | null | null | null | deep-scratch/steps/step50.py | jayChung0302/myml | 6575706aec707186037607e49342f77cde34ff52 | [
"MIT"
] | null | null | null | deep-scratch/steps/step50.py | jayChung0302/myml | 6575706aec707186037607e49342f77cde34ff52 | [
"MIT"
] | null | null | null | if '__file__' in globals():
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import math
import numpy as np
import matplotlib.pyplot as plt
import dezero
from dezero import optimizers
import dezero.functions as F
import dezero.datasets as datasets
from dezero.models import MLP
from dezero.dataloaders import DataLoader as DataLoader
t = [1, 2, 3]
x = iter(t)
print(next(x))
print(next(x))
print(next(x))
obj = MyIterator(5)
for x in obj:
print(x)
y = np.array([[0.2, 0.8, 0], [0.1, 0.9, 0], [0.8, 0.1, 0.1]])
t = np.array([1, 2, 0])
acc = F.accuracy(y, t)
print(acc)
max_epoch = 300
batch_size = 30
hidden_size = 10
lr = 1.0
train_set = dezero.datasets.Spiral(train=True)
test_set = dezero.datasets.Spiral(train=False)
train_loader = DataLoader(train_set, batch_size)
test_loader = DataLoader(test_set, batch_size)
model = MLP((hidden_size, 3))
optimizer = optimizers.SGD(lr).setup(model)
for ep in range(max_epoch):
sum_loss, sum_acc = 0, 0
for x, t in train_loader:
y = model(x)
loss = F.softmax_cross_entropy(y, t)
acc = F.accuracy(y, t)
model.cleargrads()
loss.backward()
optimizer.update()
sum_loss += float(loss.data) * len(t)
sum_acc += float(acc.data) * len(t)
print(f"epoch:{ep+1}")
print(f"train loss:{sum_loss/len(train_set):.4f}, accuracy:{sum_acc/len(train_set):.4f}")
sum_loss, sum_acc = 0, 0
with dezero.no_grad():
for x, t in test_loader:
y = model(x)
loss = F.softmax_cross_entropy(y, t)
acc = F.accuracy(y, t)
sum_loss += float(loss.data) * len(t)
sum_acc += float(acc.data) * len(t)
print(f"test loss: {sum_loss/len(test_set):.4f}, accuracy: {sum_acc/len(test_set):.4f}")
| 25.890244 | 93 | 0.621291 |
c80394afc399b78e52b09f0399ffa60c6bd99be4 | 2,807 | py | Python | src/old/mpas-source/testing_and_setup/compass/landice/MISMIP3D/plot_speed_profiles.py | meteorologytoday/E3SM-sicn | 61acadf73929399586c8972f263f0d65696cba38 | [
"MIT"
] | null | null | null | src/old/mpas-source/testing_and_setup/compass/landice/MISMIP3D/plot_speed_profiles.py | meteorologytoday/E3SM-sicn | 61acadf73929399586c8972f263f0d65696cba38 | [
"MIT"
] | null | null | null | src/old/mpas-source/testing_and_setup/compass/landice/MISMIP3D/plot_speed_profiles.py | meteorologytoday/E3SM-sicn | 61acadf73929399586c8972f263f0d65696cba38 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
'''
Plots velocity profiles for a diagnostic solve for a range of resolutions, with and without GLP.
'''
import numpy as np
import netCDF4
#import datetime
# import math
# from pylab import *
from optparse import OptionParser
import matplotlib.pyplot as plt
from matplotlib import cm
# from matplotlib.contour import QuadContourSet
# import time
reslist = (10000, 5000, 2000, 1000, 500, 250)
GLbit = 256
secInYr = 3600.0 * 24.0 * 365.0 # Note: this may be slightly wrong for some calendar types!
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename", help="file to visualize", metavar="FILE")
parser.add_option("-t", "--time", dest="time", help="time step to visualize (0 based)", metavar="TIME")
parser.add_option("-s", "--save", action="store_true", dest="saveimages", help="include this flag to save plots as files")
parser.add_option("-n", "--nodisp", action="store_true", dest="hidefigs", help="include this flag to not display plots (usually used with -s)")
options, args = parser.parse_args()
if not options.filename:
print "No filename provided. Using output.nc."
options.filename = "output.nc"
if not options.time:
print "No time provided. Using time 0."
time_slice = 0
else:
time_slice = int(options.time)
################### DEFINE FUNCTIONS ######################
colors = [ cm.jet(x) for x in np.linspace(0.0, 1.0, len(reslist)) ]
fig = plt.figure(1, facecolor='w')
ax = fig.add_subplot(111)
for i in range(len(reslist)):
res = reslist[i]
# no glp first
fname = "{}m.nc".format(res)
print "Processing file", fname
x, u = get_data(fname)
plt.plot(x, u, '.-', color=colors[i], label="{}m, no GLP".format(res))
# glp next
fname = "{}m-glp.nc".format(res)
print "Processing file", fname
x, u = get_data(fname)
plt.plot(x, u, '.--', color=colors[i], label="{}m, GLP".format(res))
plt.xlabel('X-position (km)')
plt.ylabel('Speed (m/yr)')
plt.title('Profile at y=0')
plt.legend()
plt.draw()
if options.saveimages:
print "Saving figures to files."
plt.savefig('GL-position.png')
if options.hidefigs:
print "Plot display disabled with -n argument."
else:
plt.show()
| 29.239583 | 143 | 0.662629 |
c804b2aba892b4eb59eed92a1fc2059a9fcab787 | 2,713 | py | Python | halfpipe/model/__init__.py | fossabot/Halfpipe-1 | 9e9fae20467d2c73b67fcb2cc73ed7144d79db3a | [
"FTL"
] | null | null | null | halfpipe/model/__init__.py | fossabot/Halfpipe-1 | 9e9fae20467d2c73b67fcb2cc73ed7144d79db3a | [
"FTL"
] | null | null | null | halfpipe/model/__init__.py | fossabot/Halfpipe-1 | 9e9fae20467d2c73b67fcb2cc73ed7144d79db3a | [
"FTL"
] | null | null | null | # -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
"""
from .exclude import ExcludeSchema, rating_indices
from .spec import SpecSchema, loadspec, savespec
from .tags import BoldTagsSchema, FuncTagsSchema, entities, entity_longnames, resultdict_entities
from .file import (
File,
BidsFileSchema,
AnatFileSchema,
T1wFileSchema,
FuncFileSchema,
BoldFileSchema,
TxtEventsFileSchema,
TsvEventsFileSchema,
MatEventsFileSchema,
FmapFileSchema,
PhaseFmapFileSchema,
PhaseDiffFmapFileSchema,
EPIFmapFileSchema,
BaseFmapFileSchema,
RefFileSchema,
SpreadsheetFileSchema,
FileSchema,
)
from .setting import (
SettingSchema,
BaseSettingSchema,
SmoothingSettingSchema,
BandpassFilterSettingSchema,
GrandMeanScalingSettingSchema,
)
from .metadata import (
MetadataSchema,
templates,
direction_codes,
axis_codes,
space_codes,
slice_order_strs
)
from .resultdict import ResultdictSchema
from .filter import FilterSchema, GroupFilterSchema, TagFilterSchema, MissingFilterSchema
from .contrast import TContrastSchema, InferredTypeContrastSchema
from .model import (
Model,
ModelSchema,
FixedEffectsModelSchema,
MixedEffectsModelSchema,
LinearMixedEffectsModelSchema,
)
from .feature import Feature, FeatureSchema
from .variable import VariableSchema
__all__ = [
"ExcludeSchema",
"rating_indices",
"SpecSchema",
"loadspec",
"savespec",
"BoldTagsSchema",
"FuncTagsSchema",
"entities",
"entity_longnames",
"resultdict_entities",
"File",
"BidsFileSchema",
"AnatFileSchema",
"T1wFileSchema",
"FuncFileSchema",
"BoldFileSchema",
"TxtEventsFileSchema",
"TsvEventsFileSchema",
"MatEventsFileSchema",
"FmapFileSchema",
"PhaseFmapFileSchema",
"PhaseDiffFmapFileSchema",
"EPIFmapFileSchema",
"BaseFmapFileSchema",
"RefFileSchema",
"FileSchema",
"SettingSchema",
"BaseSettingSchema",
"SmoothingSettingSchema",
"BandpassFilterSettingSchema",
"GrandMeanScalingSettingSchema",
"MetadataSchema",
"templates",
"direction_codes",
"axis_codes",
"space_codes",
"slice_order_strs",
"ResultdictSchema",
"FilterSchema",
"GroupFilterSchema",
"TagFilterSchema",
"TContrastSchema",
"MissingFilterSchema",
"InferredTypeContrastSchema",
"Model",
"ModelSchema",
"FixedEffectsModelSchema",
"MixedEffectsModelSchema",
"LinearMixedEffectsModelSchema",
"SpreadsheetFileSchema",
"VariableSchema",
"Feature",
"FeatureSchema",
]
| 23.798246 | 97 | 0.708072 |
c804be87c5478ddfa9fadf38397429243edc770e | 4,363 | py | Python | play.py | cp1r8/metadungeon | e68a35c815d60bccb883436fde782868bff7f81f | [
"CC0-1.0"
] | null | null | null | play.py | cp1r8/metadungeon | e68a35c815d60bccb883436fde782868bff7f81f | [
"CC0-1.0"
] | null | null | null | play.py | cp1r8/metadungeon | e68a35c815d60bccb883436fde782868bff7f81f | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python3
from game import World
from game.creatures import Humanoid, Unit
from game.creatures.adventurers import Adventurer, Party
from game.objects.containers import Container
from game.places.underground import Dungeon
from game.dice import d4
from pathlib import Path
import pickle
import sys
import ui
if __name__ == '__main__':
game_file = Path.home() / '.local' / 'metadungeon.pickle'
if game_file.exists() and '--reset' not in sys.argv:
with game_file.open('rb') as input:
world, party = pickle.load(input)
else:
world = World()
dungeon = Dungeon(world)
world.add(dungeon)
if '--shop' in sys.argv:
auto_equip = False
# TODO start in town (purchase equipment manually)
else:
auto_equip = True
location = dungeon.entrance
if '--basic' in sys.argv:
party = Party.basic(location, auto_equip)
elif '--expert' in sys.argv:
party = Party.expert(location, auto_equip)
elif '--funnel' in sys.argv:
party = Party.assemble(0, sum(4*d4) + 4, location, auto_equip)
elif '--hlc' in sys.argv:
party = Party.highLevelClient(location, auto_equip)
elif '--hlf' in sys.argv:
party = Party.highLevelFighter(location, auto_equip)
elif '--hlm' in sys.argv:
party = Party.highLevelMuser(location, auto_equip)
else:
party = Party.assemble(1, sum(2*d4) + 4, location, auto_equip)
location.add(party)
#for testing
if '--zap' in sys.argv:
damage = sys.argv.count('--zap')
for entity in party.location.entities:
if isinstance(entity, Unit):
for member in entity.members:
member.hit(damage)
actions = party.location.actions(party)
for arg in sys.argv:
if arg in actions:
actions[arg]()
world.age(minutes=10)
actions = party.location.actions(party)
break
with game_file.open('wb') as output:
pickle.dump((world, party), output)
print(f"{str(world):<19} {world.now}")
print('-' * 39)
print(str(party.location))
print()
print('[ ' + ' ] [ '.join(sorted(actions.keys())) + ' ]')
print('=' * 39)
print()
for entity in sorted(party.location.entities, key=lambda entity: entity.id):
if isinstance(entity, Unit):
continue
print(str(entity))
if isinstance(entity, Container):
for item in entity.contents:
ui.print_inventory_item(item)
print('-' * 39)
print()
for entity in sorted(party.location.entities, key=lambda entity: entity.id):
if not isinstance(entity, Unit):
continue
print(str(entity))
# TODO unit "health bar"
# TODO unit status (e.g., lost/flee)
if '--stats' in sys.argv:
print(ui.unitstats(entity))
print('-' * 39)
print()
for member in sorted(entity.members, key=lambda member: member.id):
print(str(member))
if member.hits_taken > member.hit_dice:
hit_points = f"{member.hit_dice - member.hits_taken:d}/{member.hit_dice:d}"
else:
hit_points = f"{member.hits_remaining - member.partial_hit:d}/{member.hit_dice:d}"
print(
f"[{ui.health_bar(member, 28)}]",
f"{hit_points:>5} hp",
)
if '--stats' in sys.argv:
print(ui.statblock(member))
if isinstance(member, Adventurer):
if '--abilities' in sys.argv:
print(ui.abilities(member))
if '--level' in sys.argv:
# TODO calculate "bounty"
print(
f"{member.profile}",
f"1UP:{member.silver_for_next_level:,.0f}$"
)
if isinstance(member, Humanoid):
if '--inventory' in sys.argv:
ui.print_inventory(member, True)
print('-' * 39)
elif '--arms' in sys.argv:
ui.print_inventory(member)
print()
print('=' * 39)
print()
| 28.89404 | 98 | 0.544579 |
c80564b90308d62159da1bcee2b8ccb5be3d2c50 | 87 | py | Python | HelloPython/Hello.py | itsdamslife/python-ml-udacity | d16b7619a05e1b42fcbd0846f64726bf9741701a | [
"MIT"
] | null | null | null | HelloPython/Hello.py | itsdamslife/python-ml-udacity | d16b7619a05e1b42fcbd0846f64726bf9741701a | [
"MIT"
] | null | null | null | HelloPython/Hello.py | itsdamslife/python-ml-udacity | d16b7619a05e1b42fcbd0846f64726bf9741701a | [
"MIT"
] | null | null | null |
HelloPy().hello() | 12.428571 | 30 | 0.597701 |
c80624c4bad650eb5277c12ff9ddd20884d61424 | 590 | py | Python | freeze.py | eudemonia-research/hec | e65df8e4584746dcb2785327cfcffac10a66c689 | [
"MIT"
] | 2 | 2015-11-05T16:24:31.000Z | 2022-02-05T19:01:58.000Z | freeze.py | eudemonia-research/hec | e65df8e4584746dcb2785327cfcffac10a66c689 | [
"MIT"
] | null | null | null | freeze.py | eudemonia-research/hec | e65df8e4584746dcb2785327cfcffac10a66c689 | [
"MIT"
] | null | null | null | from cx_Freeze import setup, Executable
import requests.certs
# Dependencies are automatically detected, but it might need
# fine tuning.
buildOptions = dict(packages = [], excludes = [], include_msvcr=True,
include_files=[(requests.certs.where(),'cacert.pem')])
import sys
base = 'Win32GUI' if sys.platform=='win32' else None
executables = [
Executable('scripts\\hecs.py', base=base, targetName = 'hecs.exe')
]
setup(name='hecs',
version = '1.0',
description = 'Hecs',
options = dict(build_exe = buildOptions),
executables = executables)
| 29.5 | 74 | 0.676271 |
c8063918c58e85541c9a3697d5b6790d29d944d4 | 103 | py | Python | chapter2/chapter2_type_hints_01.py | GoodMonsters/Building-Data-Science-Applications-with-FastAPI | d2218d225c5b93723ecf46c19619ed5d3f2473e6 | [
"MIT"
] | 107 | 2021-03-26T20:18:51.000Z | 2022-03-26T03:38:08.000Z | chapter2/chapter2_type_hints_01.py | GoodMonsters/Building-Data-Science-Applications-with-FastAPI | d2218d225c5b93723ecf46c19619ed5d3f2473e6 | [
"MIT"
] | 4 | 2021-06-09T08:48:21.000Z | 2021-12-27T09:04:43.000Z | chapter2/chapter2_type_hints_01.py | GoodMonsters/Building-Data-Science-Applications-with-FastAPI | d2218d225c5b93723ecf46c19619ed5d3f2473e6 | [
"MIT"
] | 58 | 2021-03-12T20:51:19.000Z | 2022-03-27T15:49:49.000Z |
print(greeting("John")) # "Hello, John"
| 17.166667 | 40 | 0.61165 |
c806d8b85faac4749d3297eee869e84a9a44277c | 2,742 | py | Python | Elasticsearch/elasticsearchconnector.py | krajai/testt | 3aaf5fd7fe85e712c8c1615852b50f9ccb6737e5 | [
"BSD-3-Clause"
] | 1,114 | 2020-09-28T07:32:23.000Z | 2022-03-31T22:35:50.000Z | Elasticsearch/elasticsearchconnector.py | krajai/testt | 3aaf5fd7fe85e712c8c1615852b50f9ccb6737e5 | [
"BSD-3-Clause"
] | 298 | 2020-10-29T09:39:17.000Z | 2022-03-31T15:24:44.000Z | Elasticsearch/elasticsearchconnector.py | krajai/testt | 3aaf5fd7fe85e712c8c1615852b50f9ccb6737e5 | [
"BSD-3-Clause"
] | 153 | 2020-09-29T06:07:39.000Z | 2022-03-31T17:41:16.000Z | # Import elasticsearch module
from elasticsearch import Elasticsearch,ImproperlyConfigured,TransportError
import json | 35.153846 | 119 | 0.73523 |
c8086e05aec0c6df0ba73279af11a14fb3f20635 | 15,813 | py | Python | Lab3.py | jamieaclifford/Crystal | d9ea44187a7673f523ad350834d38730c2d31126 | [
"MIT"
] | null | null | null | Lab3.py | jamieaclifford/Crystal | d9ea44187a7673f523ad350834d38730c2d31126 | [
"MIT"
] | null | null | null | Lab3.py | jamieaclifford/Crystal | d9ea44187a7673f523ad350834d38730c2d31126 | [
"MIT"
] | null | null | null |
from numpy import *
from scipy.signal import correlate2d
from numpy.random import randint,choice,uniform
from matplotlib.pyplot import *
import matplotlib.pyplot as plt
from os import system
system('rm animation.gif')
# example of how code should work
model = LatticeGas(1024,0.3)
print model.state
#model.show()
kmc = KineticMonteCarlo(model)
for n in range(0,100) :
kmc.time_step()
model.show()
#plot of empty empty space against time
f=plt.figure()
x, y = zip(*model.data)
plt.scatter(x, y)
plt.savefig('Islands_vs_Time.jpg')
f.show()
g=plt.figure()
x,y=zip(*model.sing)
plt.scatter(x,y)
plt.savefig('single_vs_time.jpg')
g.show()
h=plt.figure()
x,y=zip(*model.zero)
plt.plot(x,y)
plt.savefig('EmptySpace_vs_time.jpg')
h.show()
system('convert -delay 2 -loop 1 *.png animation.gif')
system('rm *.png')
| 34.526201 | 207 | 0.568393 |
c8093b0fe4419003974199d64ec5c9a63aa70c9e | 4,434 | py | Python | pyvino_utils/models/recognition/gaze_estimation.py | venky4121994/openvinoface | a620138b94f865fb19e6165abde2237c85ca8764 | [
"MIT"
] | 4 | 2020-08-31T17:19:57.000Z | 2020-10-03T13:59:10.000Z | pyvino_utils/models/recognition/gaze_estimation.py | B0N0AI/pyvino_utils | 0d42741eb446b038eae2917b621d9c1ffbc42452 | [
"MIT"
] | 2 | 2020-09-13T08:04:36.000Z | 2020-09-13T08:04:58.000Z | pyvino_utils/models/recognition/gaze_estimation.py | mmphego/pyvino_utils | 0d42741eb446b038eae2917b621d9c1ffbc42452 | [
"MIT"
] | null | null | null | import time
import cv2
import numpy as np
from ..openvino_base.base_model import Base
| 32.844444 | 86 | 0.554804 |
c8095fa9e80674ff147ce29f4d9409ee896f3519 | 1,982 | py | Python | src/testing/task_plot_share_of_educ_participants_with_rapid_test.py | covid-19-impact-lab/sid-germany | aef4bbfb326adaf9190c6d8880e15b3d6f150d28 | [
"MIT"
] | 4 | 2021-04-24T14:43:47.000Z | 2021-07-03T14:05:21.000Z | src/testing/task_plot_share_of_educ_participants_with_rapid_test.py | covid-19-impact-lab/sid-germany | aef4bbfb326adaf9190c6d8880e15b3d6f150d28 | [
"MIT"
] | 4 | 2021-04-27T10:34:45.000Z | 2021-08-31T16:40:28.000Z | src/testing/task_plot_share_of_educ_participants_with_rapid_test.py | covid-19-impact-lab/sid-germany | aef4bbfb326adaf9190c6d8880e15b3d6f150d28 | [
"MIT"
] | null | null | null | import warnings
import matplotlib.pyplot as plt
import pandas as pd
import pytask
import seaborn as sns
from src.config import BLD
from src.config import PLOT_END_DATE
from src.config import PLOT_SIZE
from src.config import PLOT_START_DATE
from src.config import SRC
from src.plotting.plotting import style_plot
from src.testing.shared import get_piecewise_linear_interpolation
| 30.492308 | 85 | 0.712916 |
c80966397626d332b933ed9036f4e46b5c441750 | 734 | py | Python | app/models/brand.py | ertyurk/bugme | 5a3ef3e089e0089055074c1c896c3fdc76600e93 | [
"MIT"
] | null | null | null | app/models/brand.py | ertyurk/bugme | 5a3ef3e089e0089055074c1c896c3fdc76600e93 | [
"MIT"
] | null | null | null | app/models/brand.py | ertyurk/bugme | 5a3ef3e089e0089055074c1c896c3fdc76600e93 | [
"MIT"
] | null | null | null | from typing import Optional
from pydantic import BaseModel, Field
| 22.9375 | 54 | 0.553134 |
c80b2595bdb7003b8c3cf6b926f272d5aafaf2b7 | 2,917 | py | Python | src/autoschedulers/nelli2021/cmdscale.py | InteonCo/Halide | cb38ed9942e878bf40674d75da1cdd2527b81d0a | [
"Apache-2.0"
] | 1 | 2021-12-30T09:27:33.000Z | 2021-12-30T09:27:33.000Z | src/autoschedulers/nelli2021/cmdscale.py | InteonCo/Halide | cb38ed9942e878bf40674d75da1cdd2527b81d0a | [
"Apache-2.0"
] | 3 | 2021-03-16T22:01:02.000Z | 2021-09-23T19:17:10.000Z | src/autoschedulers/nelli2021/cmdscale.py | InteonCo/Halide | cb38ed9942e878bf40674d75da1cdd2527b81d0a | [
"Apache-2.0"
] | null | null | null | # thanks to Francis Song for this function
# source: http://www.nervouscomputer.com/hfs/cmdscale-in-python/
from __future__ import division
import numpy as np
def cmdscale(D):
"""
Classical multidimensional scaling (MDS)
Parameters
----------
D : (n, n) array
Symmetric distance matrix.
Returns
-------
Y : (n, p) array
Configuration matrix. Each column represents a dimension. Only the
p dimensions corresponding to positive eigenvalues of B are returned.
Note that each dimension is only determined up to an overall sign,
corresponding to a reflection.
e : (n,) array
Eigenvalues of B.
"""
# Number of points
n = len(D)
# Centering matrix
H = np.eye(n) - np.ones((n, n))/n
# YY^T
B = -H.dot(D**2).dot(H)/2
# Diagonalize
evals, evecs = np.linalg.eigh(B)
# Sort by eigenvalue in descending order
idx = np.argsort(evals)[::-1]
evals = evals[idx]
evecs = evecs[:,idx]
# Compute the coordinates using positive-eigenvalued components only
w, = np.where(evals > 0)
L = np.diag(np.sqrt(evals[w]))
V = evecs[:,w]
Y = V.dot(L)
return Y, evals[evals > 0]
| 56.096154 | 95 | 0.255399 |
c80c247892056d339d30163cadca271c880389d5 | 443 | py | Python | flaskapp/app.py | Chetan-Gahane/Detection-Of-Phishing-Websites | 327c6bbd4fe77d465e290466f26a387760103ad7 | [
"MIT"
] | null | null | null | flaskapp/app.py | Chetan-Gahane/Detection-Of-Phishing-Websites | 327c6bbd4fe77d465e290466f26a387760103ad7 | [
"MIT"
] | null | null | null | flaskapp/app.py | Chetan-Gahane/Detection-Of-Phishing-Websites | 327c6bbd4fe77d465e290466f26a387760103ad7 | [
"MIT"
] | null | null | null |
from flask import Flask
from flask import Flask, flash, redirect, render_template, request, session, abort
import os
import newtrain
app = Flask(__name__)
if __name__ == "__main__":
app.secret_key = os.urandom(12)
app.run(debug=True) | 20.136364 | 82 | 0.683973 |
c80efb1904e757cf4b1d0964fcfc32592997f16d | 178 | py | Python | lib/__init__.py | irap-omp/deconv3d | 0ab5322f99e28a19ce5540d9a27dcbe340542d0a | [
"MIT"
] | 3 | 2016-09-19T08:52:37.000Z | 2018-12-07T09:33:20.000Z | lib/__init__.py | irap-omp/deconv3d | 0ab5322f99e28a19ce5540d9a27dcbe340542d0a | [
"MIT"
] | 1 | 2015-04-09T12:30:14.000Z | 2015-04-09T14:05:30.000Z | lib/__init__.py | irap-omp/deconv3d | 0ab5322f99e28a19ce5540d9a27dcbe340542d0a | [
"MIT"
] | 3 | 2017-12-13T14:26:24.000Z | 2019-07-26T18:15:37.000Z |
from os.path import abspath, dirname
with open(dirname(abspath(__file__))+'/../VERSION', 'r') as version_file:
__version__ = version_file.read().replace('\n', '').strip()
| 25.428571 | 73 | 0.685393 |
c80fb814451534b7b615ad0d3cac56241fef8195 | 9,150 | py | Python | selection/algorithms/tests/test_sqrt_lasso.py | wfithian/selective-inference | 19ea427118b04716b23b394f37aafcf126d29a53 | [
"BSD-3-Clause"
] | null | null | null | selection/algorithms/tests/test_sqrt_lasso.py | wfithian/selective-inference | 19ea427118b04716b23b394f37aafcf126d29a53 | [
"BSD-3-Clause"
] | null | null | null | selection/algorithms/tests/test_sqrt_lasso.py | wfithian/selective-inference | 19ea427118b04716b23b394f37aafcf126d29a53 | [
"BSD-3-Clause"
] | 1 | 2019-07-13T04:14:12.000Z | 2019-07-13T04:14:12.000Z | from __future__ import division
import numpy as np
import numpy.testing.decorators as dec
import nose.tools as nt
import statsmodels as sm
import matplotlib.pyplot as plt
from selection.algorithms.sqrt_lasso import (sqrt_lasso, choose_lambda,
estimate_sigma, data_carving, split_model)
from selection.algorithms.lasso import instance
from selection.constraints.quasi_affine import constraints_unknown_sigma
from selection.truncated import T as truncated_T
from selection.sampling.tests.test_sample_sphere import _generate_constraints
def test_gaussian_approx(n=100,p=200,s=10):
"""
using gaussian approximation for pvalues
"""
sigma = 3
y = np.random.standard_normal(n) * sigma
beta = np.zeros(p)
#beta[:s] = 8 * (2 * np.random.binomial(1, 0.5, size=(s,)) - 1)
beta[:s] = 18
X = np.random.standard_normal((n,p)) + 0.3 * np.random.standard_normal(n)[:,None]
X /= (X.std(0)[None,:] * np.sqrt(n))
y += np.dot(X, beta)
lam_theor = choose_lambda(X, quantile=0.75)
L = sqrt_lasso(y, X, lam_theor)
L.fit(tol=1.e-10, min_its=80)
P = []
P_gaussian = []
intervals = []
if L.active.shape[0] > 0:
np.testing.assert_array_less( \
np.dot(L.constraints.linear_part, L.y),
L.constraints.offset)
if set(range(s)).issubset(L.active):
P = [p[1] for p in L.active_pvalues[s:]]
P_gaussian = [p[1] for p in L.active_gaussian_pval[s:]]
intervals = [u for u in L.active_gaussian_intervals if u[0] in range(s)]
return P, P_gaussian, intervals, beta
| 33.888889 | 114 | 0.500656 |
c8100632cb345df1cb4918dfaf696ed8e91b2f92 | 8,607 | py | Python | training/anticausal_classifier_train.py | SANCHES-Pedro/Diff-SCM | a7e7e6ed3a2cd1c21e3bf7a3ed8ed8b29a22cb69 | [
"Apache-2.0"
] | 6 | 2022-02-22T05:07:05.000Z | 2022-03-29T09:48:03.000Z | training/anticausal_classifier_train.py | SANCHES-Pedro/Diff-SCM | a7e7e6ed3a2cd1c21e3bf7a3ed8ed8b29a22cb69 | [
"Apache-2.0"
] | null | null | null | training/anticausal_classifier_train.py | SANCHES-Pedro/Diff-SCM | a7e7e6ed3a2cd1c21e3bf7a3ed8ed8b29a22cb69 | [
"Apache-2.0"
] | 2 | 2022-02-20T08:45:54.000Z | 2022-03-09T09:51:13.000Z | """
Train a noised image classifier on ImageNet.
"""
import os
import blobfile as bf
import torch as th
import torch.distributed as dist
import torch.nn.functional as F
from torch.nn.parallel.distributed import DistributedDataParallel as DDP
from torch.optim import AdamW
import torch
from pathlib import Path
import sys
sys.path.append(str(Path.cwd()))
from configs import default_mnist_configs
from utils import logger, dist_util
from utils.script_util import create_anti_causal_predictor, create_gaussian_diffusion
from utils.fp16_util import MixedPrecisionTrainer
from models.resample import create_named_schedule_sampler
from training.train_util import parse_resume_step_from_filename, log_loss_dict
from datasets import loader
"""
for i, (sub_batch, sub_labels, sub_t) in enumerate(
split_microbatches(config.classifier.training.microbatch, batch, labels, t)
):
if not config.classifier.noise_conditioning:
sub_t = None
if prefix == "train" and config.classifier.training.adversarial_training:
sub_batch_perturbed = adversarial_attacker.perturb(model, sub_batch, sub_labels, sub_t)
logits_perturbed = model(sub_batch_perturbed, timesteps=sub_t)
loss += F.cross_entropy(logits_perturbed, sub_labels, reduction="none")
loss /= 2
adversarial_sub_labels = get_random_vector_excluding(sub_labels)
adversarial_sub_batch = fgsm_attack(sub_batch, sub_batch.grad.data)
adversarial_logits = model(adversarial_sub_batch, timesteps=sub_t)
"""
# FGSM attack code
if __name__ == "__main__":
main()
| 38.084071 | 121 | 0.668526 |
c81323b7eda0896694f1dbe20031469d75f77fed | 3,015 | py | Python | pythonclient/karmen/karmen.py | jrcichra/karmen | 4d25d635509ebffa295b085ae7fa3932e3a36344 | [
"MIT"
] | 3 | 2020-03-02T13:09:07.000Z | 2021-12-27T16:27:23.000Z | pythonclient/karmen/karmen.py | jrcichra/karmen | 4d25d635509ebffa295b085ae7fa3932e3a36344 | [
"MIT"
] | 5 | 2020-03-02T04:53:54.000Z | 2021-12-17T23:57:12.000Z | pythonclient/karmen/karmen.py | jrcichra/karmen | 4d25d635509ebffa295b085ae7fa3932e3a36344 | [
"MIT"
] | null | null | null | #!/usr/bin/python3 -u
import threading
import time
import queue
import socket
import grpc
import karmen.karmen_pb2 as pb
import karmen.karmen_pb2_grpc as pb_grpc
if __name__ == "__main__":
k = Karmen(name="bob")
print(k.ping())
k.addAction(sleep, "sleep")
k.register()
print(k.runEvent("pleaseSleep"))
| 30.15 | 83 | 0.60995 |
c81398777499e20a3165a99b2f64b68aeafcfd64 | 54 | py | Python | emma.py | patrickdijusto/python.hub | 776a6a843dbd8895c414dcb8e7730c8cb2f3894d | [
"MIT"
] | null | null | null | emma.py | patrickdijusto/python.hub | 776a6a843dbd8895c414dcb8e7730c8cb2f3894d | [
"MIT"
] | null | null | null | emma.py | patrickdijusto/python.hub | 776a6a843dbd8895c414dcb8e7730c8cb2f3894d | [
"MIT"
] | null | null | null |
fun('Emma')
| 10.8 | 22 | 0.62963 |
c813a1eed5677e2bde6506474cc6f8326d3c6475 | 6,680 | py | Python | proxy-server/proxy_server.py | MS17-010/python-misc | 15fbf3215359c97b75d4809756644626e6b577ed | [
"MIT"
] | null | null | null | proxy-server/proxy_server.py | MS17-010/python-misc | 15fbf3215359c97b75d4809756644626e6b577ed | [
"MIT"
] | null | null | null | proxy-server/proxy_server.py | MS17-010/python-misc | 15fbf3215359c97b75d4809756644626e6b577ed | [
"MIT"
] | null | null | null | import socket
import threading
import signal
import sys
import fnmatch
import utils
from time import gmtime, strftime, localtime
import logging
config = {
"HOST_NAME" : "192.168.0.136",
"BIND_PORT" : 12345,
"MAX_REQUEST_LEN" : 1024,
"CONNECTION_TIMEOUT" : 5,
"BLACKLIST_DOMAINS" : [ "blocked.com" ],
"HOST_ALLOWED" : [ "*" ],
"COLORED_LOGGING" : "true"
}
logging.basicConfig(level=logging.DEBUG,
format='[%(CurrentTime)-10s] (%(ThreadName)-10s) %(message)s',
)
def proxy_thread(self, conn, client_addr):
"""
*******************************************
*********** PROXY_THREAD FUNC *************
A thread to handle request from browser
*******************************************
"""
request = conn.recv(config['MAX_REQUEST_LEN']) # get the request from browser
first_line = request.split('\n')[0] # parse the first line
url = first_line.split(' ')[1] # get url
# Check if the host:port is blacklisted
for i in range(0,len(config['BLACKLIST_DOMAINS'])):
if config['BLACKLIST_DOMAINS'][i] in url:
self.log("FAIL", client_addr, "BLACKLISTED: " + first_line)
conn.close()
# TODO: Create response for 403 Forbidden
return
# Check if client is allowed or not
if not self._ishostAllowed(client_addr[0]):
# TODO: Create response for 403 Forbidden
return
self.log("WARNING", client_addr, "REQUEST: " + first_line)
# find the webserver and port
http_pos = url.find("://") # find pos of ://
if (http_pos==-1):
temp = url
else:
temp = url[(http_pos+3):] # get the rest of url
port_pos = temp.find(":") # find the port pos (if any)
# find end of web server
webserver_pos = temp.find("/")
if webserver_pos == -1:
webserver_pos = len(temp)
webserver = ""
port = -1
if (port_pos==-1 or webserver_pos < port_pos): # default port
port = 80
webserver = temp[:webserver_pos]
else: # specific port
port = int((temp[(port_pos+1):])[:webserver_pos-port_pos-1])
webserver = temp[:port_pos]
try:
# create a socket to connect to the web server
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(config['CONNECTION_TIMEOUT'])
s.connect((webserver, port))
s.sendall(request) # send request to webserver
while 1:
data = s.recv(config['MAX_REQUEST_LEN']) # receive data from web server
if (len(data) > 0):
conn.send(data) # send to browser
else:
break
s.close()
conn.close()
except socket.error as error_msg:
self.log("ERROR", client_addr, error_msg)
if s:
s.close()
if conn:
conn.close()
self.log("WARNING", client_addr, "Peer Reset: " + first_line)
def _getClientName(self, cli_addr):
""" Return the clientName.
"""
return "Client"
def shutdown(self, signum, frame):
""" Handle the exiting server. Clean all traces """
self.log("WARNING", -1, 'Shutting down gracefully...')
#Exit all the running threads before exiting the application.
#threading.enumerate() iterates over all the running threads,
#so we do not need to maintain a list of them.
#The behavior of threading module is unexpected when we try to end the main_thread.
# The official documentation also states this:
# join() raises a RuntimeError if an attempt is
# made to join the current thread as that would cause a deadlock.
# It is also an error to join() a thread before it has been started
# and attempts to do so raises the same exception.
main_thread = threading.currentThread() # Wait for all clients to exit
for t in threading.enumerate():
if t is main_thread:
continue
self.log("FAIL", -1, 'joining ' + t.getName())
t.join()
self.serverSocket.close()
sys.exit(0)
def log(self, log_level, client, msg):
""" Log the messages to appropriate place """
LoggerDict = {
'CurrentTime' : strftime("%a, %d %b %Y %X", localtime()),
'ThreadName' : threading.currentThread().getName()
}
if client == -1: # Main Thread
formatedMSG = msg
else: # Child threads or Request Threads
formatedMSG = '{0}:{1} {2}'.format(client[0], client[1], msg)
logging.debug('%s', utils.colorizeLog(config['COLORED_LOGGING'], log_level, formatedMSG), extra=LoggerDict)
if __name__ == "__main__":
server = Server(config)
server.listenForClient()
| 37.954545 | 137 | 0.550898 |
c8146b0afe8645af236462b961f5cb0186da93f4 | 9,336 | py | Python | basestation/xbox.py | ksurct-officers/Mercury2018-19 | ee90803c1b14727663c65ebc396cdb92c2b79667 | [
"Apache-2.0"
] | null | null | null | basestation/xbox.py | ksurct-officers/Mercury2018-19 | ee90803c1b14727663c65ebc396cdb92c2b79667 | [
"Apache-2.0"
] | null | null | null | basestation/xbox.py | ksurct-officers/Mercury2018-19 | ee90803c1b14727663c65ebc396cdb92c2b79667 | [
"Apache-2.0"
] | null | null | null | ''' xbox.py
Responsible for getting information directly from controller
Courtesy Aaron Schif, former member
Don't touch this file, it's perfect.
'''
from math import isclose
from collections import namedtuple
import sdl2
from sdl2 import ext
ButtonEvent = namedtuple('ButtonEvent', ['time', 'state'])
AxisEvent = namedtuple('AxisEvent', ['time', 'state'])
HatEvent = namedtuple('HatEvent', ['time', 'state'])
DeviceEvent = namedtuple('DeviceEvent', ['number'])
def Test():
import time
Controller.init()
controller = Controller(0)
while True:
# time.sleep(1)
controller.update()
robot = {}
# General buttons
robot['x'] = 1 if controller.x() else 0
robot['y'] = 1 if controller.y() else 0
robot['a'] = 1 if controller.a() else 0
robot['b'] = 1 if controller.b() else 0
# Triggers
robot['r_trigger'] = int(controller.right_trigger() >> 3)
robot['l_trigger'] = int(controller.left_trigger() >> 3)
# Analog sticks
r_stick_x = round(controller.right_x(), 1)
r_stick_y = round(controller.right_y(), 1)
l_stick_x = round(controller.left_x(), 1)
l_stick_y = round(controller.left_y(), 1)
robot['r_stick'] = (int(10*r_stick_x) if abs(r_stick_x) > 0.1 else 0,
int(-10*r_stick_y) if abs(r_stick_y) > 0.1 else 0 )
robot['l_stick'] = (int(10*l_stick_x) if abs(l_stick_x) > 0.1 else 0,
int(-10*l_stick_y) if abs(l_stick_y) > 0.1 else 0 )
# Bumpers
robot['r_bump'] = 1 if controller.right_bumper() else 0
robot['l_bump'] = 1 if controller.left_bumper() else 0
# D-pad
robot['left'] = 1 if str(controller.hat).strip() == 'l' else 0
robot['right'] = 1 if str(controller.hat).strip() == 'r' else 0
robot['up'] = 1 if str(controller.hat).strip() == 'u' else 0
robot['down'] = 1 if str(controller.hat).strip() == 'd' else 0
# # Left bumper combinations
# robot['lbx'] = 1 if controller.left_bumper() and controller.x() else 0
# robot['lby'] = 1 if controller.left_bumper() and controller.y() else 0
# robot['lbb'] = 1 if controller.left_bumper() and controller.b() else 0
# robot['lba'] = 1 if controller.left_bumper() and controller.a() else 0
# # Right bumper combinations
# robot['rbx'] = 1 if controller.right_bumper() and controller.x() else 0
# robot['rby'] = 1 if controller.right_bumper() and controller.y() else 0
# robot['rbb'] = 1 if controller.right_bumper() and controller.b() else 0
# robot['rba'] = 1 if controller.right_bumper() and controller.a() else 0
if(robot):
print(robot)
if __name__ == "__main__":
Test()
| 28.638037 | 88 | 0.60347 |
c817b460ee65b13241ef6e94463df88bf762261b | 765 | py | Python | legacy/legacy/recommenders/visual_gmf.py | csmithchicago/openrec | 5a9cf03abe0db0636107985f9f19d6351e4afe68 | [
"MIT"
] | null | null | null | legacy/legacy/recommenders/visual_gmf.py | csmithchicago/openrec | 5a9cf03abe0db0636107985f9f19d6351e4afe68 | [
"MIT"
] | 6 | 2020-01-28T22:51:16.000Z | 2022-02-10T00:11:19.000Z | legacy/legacy/recommenders/visual_gmf.py | csmithchicago/openrec | 5a9cf03abe0db0636107985f9f19d6351e4afe68 | [
"MIT"
] | null | null | null | from openrec.legacy.recommenders import VisualPMF
from openrec.legacy.modules.interactions import PointwiseGeCE
| 34.772727 | 86 | 0.589542 |
c818c2c94bfac62e873d6b6ae455389a5b8e8196 | 732 | py | Python | tests/test_tag.py | danielwe/explore-courses-api | e08d219b154e7fdb16690e4cd02aa239366f6747 | [
"MIT"
] | 7 | 2019-06-17T07:45:54.000Z | 2022-01-31T01:09:22.000Z | tests/test_tag.py | illiteratecoder/Explore-Courses-API | b2dc41092882e4b2b7945609e4e85b8ac1702bc7 | [
"MIT"
] | null | null | null | tests/test_tag.py | illiteratecoder/Explore-Courses-API | b2dc41092882e4b2b7945609e4e85b8ac1702bc7 | [
"MIT"
] | 1 | 2021-11-14T22:23:59.000Z | 2021-11-14T22:23:59.000Z | from xml.etree import ElementTree as ET
from explorecourses import *
| 20.914286 | 56 | 0.592896 |
c818d2ec8f5ff64e655e15444fca7720cb2c47a4 | 385 | py | Python | server/server.py | BShadid/TFKAKsubmission | b324ea477f5658c4b0b46fd05cde60c4c8a43b93 | [
"MIT"
] | null | null | null | server/server.py | BShadid/TFKAKsubmission | b324ea477f5658c4b0b46fd05cde60c4c8a43b93 | [
"MIT"
] | null | null | null | server/server.py | BShadid/TFKAKsubmission | b324ea477f5658c4b0b46fd05cde60c4c8a43b93 | [
"MIT"
] | null | null | null | from flask import Flask
import easyWik
app = Flask(__name__)
if __name__ == "__main__":
context = ('yourserver.crt','yourserver.key')
app.run(host='0.0.0.0',port='5000',ssl_context=context)
| 20.263158 | 59 | 0.664935 |
c8205acb89329008fc256d7baa124e1eca07ffcd | 1,521 | py | Python | slybot/slybot/linkextractor/xml.py | coolkunal64/ht | b7c52d5604dd75ea4086a6ff92eaa2db85bb145c | [
"BSD-3-Clause"
] | 1 | 2017-11-03T13:00:21.000Z | 2017-11-03T13:00:21.000Z | slybot/slybot/linkextractor/xml.py | coolkunal64/ht | b7c52d5604dd75ea4086a6ff92eaa2db85bb145c | [
"BSD-3-Clause"
] | 2 | 2021-03-31T20:04:55.000Z | 2021-12-13T20:47:09.000Z | slybot/slybot/linkextractor/xml.py | coolkunal64/ht | b7c52d5604dd75ea4086a6ff92eaa2db85bb145c | [
"BSD-3-Clause"
] | 2 | 2017-11-03T13:00:23.000Z | 2020-08-28T19:59:40.000Z | """
Link extraction for auto scraping
"""
from scrapy.link import Link
from scrapy.selector import Selector
from slybot.linkextractor.base import BaseLinkExtractor
| 37.097561 | 123 | 0.680473 |
c821e277e58638d63f9549aad5f4d477bf38817b | 643 | py | Python | advancing_hero/settings.py | hentt30/ces22-project | 06f13ebdf06bbab182b83c882846701ab69e8631 | [
"MIT"
] | 1 | 2021-04-12T01:27:34.000Z | 2021-04-12T01:27:34.000Z | advancing_hero/settings.py | hentt30/ces22-project | 06f13ebdf06bbab182b83c882846701ab69e8631 | [
"MIT"
] | null | null | null | advancing_hero/settings.py | hentt30/ces22-project | 06f13ebdf06bbab182b83c882846701ab69e8631 | [
"MIT"
] | 2 | 2021-04-01T00:13:51.000Z | 2021-04-11T07:15:54.000Z | TITLE = 'Knight of Valhalla'
SCREEN_ROWS = 9
SCREEN_COLUMNS = 16
SIZE = screen_width, screen_height = 64 * 16, 64 * 9
FPS = 60
tile_size = 64
## Speeds
WORLD_SPEED = 1
DEFAULT_PLAYER_SPEED = 5
ASPHALT_SPEED = 1 * DEFAULT_PLAYER_SPEED
GRASS_SPEED = 1 * DEFAULT_PLAYER_SPEED
DIRT_SPEED = 0.6 * DEFAULT_PLAYER_SPEED
WATER_SPEED = 0.5 * DEFAULT_PLAYER_SPEED
## Block names
ASPHALT = 'black_rock'
BRICK = 'gray_rock'
GRASS = 'grass'
DIRT = 'sand'
WATER = 'water'
LAVA = 'lava'
level_1 = 'advancing_hero/world/world.json'
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
## DEBUG
DEBUG = False
| 18.911765 | 52 | 0.695179 |
c821ed2774a2669777a45f15bf9913ade184edde | 1,319 | py | Python | questions/construct-the-rectangle/Solution.py | marcus-aurelianus/leetcode-solutions | 8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6 | [
"MIT"
] | 141 | 2017-12-12T21:45:53.000Z | 2022-03-25T07:03:39.000Z | questions/construct-the-rectangle/Solution.py | marcus-aurelianus/leetcode-solutions | 8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6 | [
"MIT"
] | 32 | 2015-10-05T14:09:52.000Z | 2021-05-30T10:28:41.000Z | questions/construct-the-rectangle/Solution.py | marcus-aurelianus/leetcode-solutions | 8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6 | [
"MIT"
] | 56 | 2015-09-30T05:23:28.000Z | 2022-03-08T07:57:11.000Z | """
A web developer needs to know how to design a web page's size. So, given a specific rectangular web pages area, your job by now is to design a rectangular web page, whose length L and width W satisfy the following requirements:
The area of the rectangular web page you designed must equal to the given target area.
The width W should not be larger than the length L, which means L >= W.
The difference between length L and width W should be as small as possible.
Return an array [L, W] where L and W are the length and width of theweb page you designed in sequence.
Example 1:
Input: area = 4
Output: [2,2]
Explanation: The target area is 4, and all the possible ways to construct it are [1,4], [2,2], [4,1].
But according to requirement 2, [1,4] is illegal; according to requirement 3, [4,1] is not optimal compared to [2,2]. So the length L is 2, and the width W is 2.
Example 2:
Input: area = 37
Output: [37,1]
Example 3:
Input: area = 122122
Output: [427,286]
Constraints:
1 <= area <= 107
"""
| 27.479167 | 228 | 0.644428 |
c823fba7bcffc4b21c83b60516082a369618b755 | 891 | py | Python | flask_pdv/ext/resources/transacao.py | evaristofm/api_transacao | 3a8d1d1459f5f58c8df7473fe6f7ea1a438738d2 | [
"MIT"
] | null | null | null | flask_pdv/ext/resources/transacao.py | evaristofm/api_transacao | 3a8d1d1459f5f58c8df7473fe6f7ea1a438738d2 | [
"MIT"
] | null | null | null | flask_pdv/ext/resources/transacao.py | evaristofm/api_transacao | 3a8d1d1459f5f58c8df7473fe6f7ea1a438738d2 | [
"MIT"
] | null | null | null | from flask_restful import Resource, marshal
from flask_pdv.ext.api import requests
from flask_pdv.ext.db.models import TransacaoModel
from flask_pdv.ext.db.schemas import transacao_field
from flask_pdv.ext.db import db
| 26.205882 | 85 | 0.65881 |
c825a6df3c14933bdcbd115b36ca8c69f6c6f233 | 2,434 | py | Python | limiter/rate_limiter.py | sousa-andre/requests-limiter | ad3a5982a40e88111eca63b258e1226e15a8befa | [
"MIT"
] | 4 | 2020-11-14T18:13:27.000Z | 2021-01-03T19:13:39.000Z | limiter/rate_limiter.py | sousa-andre/requests-limiter | ad3a5982a40e88111eca63b258e1226e15a8befa | [
"MIT"
] | null | null | null | limiter/rate_limiter.py | sousa-andre/requests-limiter | ad3a5982a40e88111eca63b258e1226e15a8befa | [
"MIT"
] | 2 | 2021-01-03T19:13:46.000Z | 2021-01-31T12:24:23.000Z | from functools import wraps
from time import sleep
from typing import List
from .rate_limit import RateLimit
from .exceptions import RateLimitHit
| 32.453333 | 101 | 0.582991 |