hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d084cbae54b480c7285413e8320b17434455ebf4
| 1,666
|
py
|
Python
|
python_exercises/main.py
|
nchristie/slide-python-intro
|
dd52781b5d25435f97aa83cfff58c175fa7fdd1c
|
[
"MIT"
] | 1
|
2018-06-07T12:40:37.000Z
|
2018-06-07T12:40:37.000Z
|
python_exercises/main.py
|
nchristie/slide-python-intro
|
dd52781b5d25435f97aa83cfff58c175fa7fdd1c
|
[
"MIT"
] | 3
|
2018-06-07T14:39:19.000Z
|
2019-01-15T16:35:23.000Z
|
python_exercises/main.py
|
nchristie/slide-python-intro
|
dd52781b5d25435f97aa83cfff58c175fa7fdd1c
|
[
"MIT"
] | 9
|
2018-05-30T17:12:27.000Z
|
2021-07-01T03:22:48.000Z
|
"""
Press run above to start
"""
from exercises.question_runner import run
from question_directory import (
boolean_operators,
boolean_review,
changing_lists,
dictionaries,
equality_and_booleans,
for_loops,
functions,
functions_quick_review,
greater_than_less_than_and_booleans,
inbuilt_functions_and_operators,
indexing_lists,
variables_equality_and_booleans,
while_loops,
)
from unit_tests.test_instructor_code import * # noqa
if input("\n\nPress enter to start\n") != "test":
# LESSON ONE
# https://kathrinschuler.github.io/slide-python-intro/#/10/3
run(equality_and_booleans.TASKS, equality_and_booleans.BLURB)
run(greater_than_less_than_and_booleans.TASKS, greater_than_less_than_and_booleans.BLURB)
# https://kathrinschuler.github.io/slide-python-intro/#/11/4
run(variables_equality_and_booleans.TASKS, variables_equality_and_booleans.BLURB)
run(boolean_operators.TASKS, boolean_operators.BLURB)
# LESSON TWO
run(inbuilt_functions_and_operators.TASKS, inbuilt_functions_and_operators.BLURB)
# LESSON THREE
# https://kathrinschuler.github.io/slide-python-intro/#/25/4
run(boolean_review.TASKS, boolean_review.BLURB)
run(while_loops.TASKS, while_loops.BLURB)
run(for_loops.TASKS, for_loops.BLURB)
run(functions.TASKS, functions.BLURB)
# LESSON FOUR
run(indexing_lists.TASKS, indexing_lists.BLURB)
run(functions_quick_review.TASKS, functions_quick_review.BLURB)
run(changing_lists.TASKS, changing_lists.BLURB)
run(dictionaries.TASKS, dictionaries.BLURB)
else:
if __name__ == "__main__":
unittest.main() # noqa
| 32.038462
| 93
| 0.758703
|
d085434851368c058d877fff6243d0f3f90e2c23
| 462
|
py
|
Python
|
config.py
|
LandRegistry/historian-alpha
|
3f2c2060d0f747772e8362dbe4867aea24731180
|
[
"MIT"
] | null | null | null |
config.py
|
LandRegistry/historian-alpha
|
3f2c2060d0f747772e8362dbe4867aea24731180
|
[
"MIT"
] | null | null | null |
config.py
|
LandRegistry/historian-alpha
|
3f2c2060d0f747772e8362dbe4867aea24731180
|
[
"MIT"
] | 1
|
2021-04-11T06:07:16.000Z
|
2021-04-11T06:07:16.000Z
|
import os
| 25.666667
| 63
| 0.727273
|
d085e4d4c18167f75fdb378a2d6a53bb684ea18f
| 1,124
|
py
|
Python
|
scripts/removeComments.py
|
doggy8088/azure-devops-cli-extension
|
2f6b1a6ffbc49ae454df640a8bb00dac991d6514
|
[
"MIT"
] | 326
|
2019-04-10T12:38:23.000Z
|
2022-03-31T23:07:49.000Z
|
scripts/removeComments.py
|
doggy8088/azure-devops-cli-extension
|
2f6b1a6ffbc49ae454df640a8bb00dac991d6514
|
[
"MIT"
] | 562
|
2019-04-10T07:36:12.000Z
|
2022-03-28T07:37:54.000Z
|
scripts/removeComments.py
|
doggy8088/azure-devops-cli-extension
|
2f6b1a6ffbc49ae454df640a8bb00dac991d6514
|
[
"MIT"
] | 166
|
2019-04-10T07:59:40.000Z
|
2022-03-16T14:17:13.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
for path, subdirs, files in os.walk('.'):
for name in files:
file_path = os.path.join(path, name)
if file_path.endswith(".py") and "devops_sdk" in file_path:
print('removing comments from ' + file_path)
remove_comment_from_file(file_path)
| 36.258065
| 94
| 0.544484
|
d086ce5911f7ac6a2a4bd8994adb6dc6191adc49
| 7,407
|
py
|
Python
|
scripts/update_tables.py
|
EnsemblGSOC/tony-gsoc-2018
|
7b727e3a82654a4f102d735fb0b2c4ab12470ff6
|
[
"Apache-2.0"
] | 1
|
2018-08-12T08:34:51.000Z
|
2018-08-12T08:34:51.000Z
|
scripts/update_tables.py
|
EnsemblGSOC/tony-gsoc-2018
|
7b727e3a82654a4f102d735fb0b2c4ab12470ff6
|
[
"Apache-2.0"
] | 19
|
2018-05-11T12:46:28.000Z
|
2018-08-13T11:28:44.000Z
|
scripts/update_tables.py
|
EnsemblGSOC/tony-gsoc-2018
|
7b727e3a82654a4f102d735fb0b2c4ab12470ff6
|
[
"Apache-2.0"
] | 1
|
2018-09-22T04:58:55.000Z
|
2018-09-22T04:58:55.000Z
|
"""
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import ast
import sys
import xml.etree.ElementTree as ET
from datetime import datetime
import requests
from sqlalchemy import create_engine, Table, MetaData, func, or_
from sqlalchemy.orm import sessionmaker
from base import *
# setup config
config_path = sys.argv[1]
with open(config_path) as configfile:
config = ast.literal_eval(configfile.read())
tony_assembly = config["tony_assembly"]
results_dir = config ["results_dir"]
udocker_root = config["udocker_root"]
toil_dir = config["toil_dir"]
workflow_dir = config["workflow_dir"]
log_dir = config["log_dir"]
registry = config["registry"]
def xml_download(ena_accession):
"""
pulling xml record from ENA
:param ena_accession:
:return:
"""
try:
xml = ET.fromstring(requests.get("https://www.ebi.ac.uk/ena/data/view/{}&display=xml".format(ena_accession),
stream=True, timeout=60).content)
return xml
except requests.exceptions.ReadTimeout:
stderr.write("Could not download XML file with accession {}\n".format(ena_accession))
return None
def xml_download_retry(ena_accession):
"""
pulling xml record from ENA, some of the records take a longer time to connect, this retry set timeout to be 5 mins
:param ena_accession:
:return:
"""
try:
xml = ET.fromstring(requests.get("https://www.ebi.ac.uk/ena/data/view/{}&display=xml".format(ena_accession),
stream=True, timeout=300).content)
return xml
except requests.exceptions.ReadTimeout:
stderr.write("Could not download XML file with accession {}\n".format(ena_accession))
return None
def chromosome_number(xml):
"""
find the number of chromosomes within the assembly. If the assembly is assembled to scaffold level, returns 0
:param xml:
:return:
"""
try:
chroms_number = len(xml.find("ASSEMBLY").find("CHROMOSOMES").findall("CHROMOSOME"))
return chroms_number
except AttributeError:
return 0
def chromosome_data(xml):
"""
extract md5 and length of the chromosome from the chromosome's xml record
:param xml:
:return:
"""
for xref in xml.find("entry").findall("xref"):
if xref.attrib["db"] == "MD5":
md5 = xref.attrib["id"]
break
length = xml.find("entry").attrib["sequenceLength"]
return md5, int(length)
stderr = open("{log_dir}/log_update_tables.txt".format(log_dir=log_dir), "a")
stderr.write(str(datetime.now()) + "\n")
stderr.write("====\n")
registry_engine = create_engine(registry)
assembly = Table("assembly", MetaData(), autoload=True, autoload_with=registry_engine)
engine = create_engine(tony_assembly)
session = sessionmaker(bind=engine)
s = session()
old_accessions = s.query(GCA.accession).all()
r_session = sessionmaker(bind=registry_engine)
rs = r_session()
sub_concat = func.concat(assembly.c.chain, ".", assembly.c.version)
new_accessions = rs.query(sub_concat).filter(sub_concat.notin_(old_accessions)).all()
rs.close()
s = session()
for entry in new_accessions:
gca = GCA()
gca.accession = entry[0]
# print(gca.accession)
gca_xml = xml_download(gca.accession)
if gca_xml is not None: # only add to GCA table if the xml record of the assembly exists
try:
gca.assembly_level = gca_xml.find("ASSEMBLY").find("ASSEMBLY_LEVEL").text
except AttributeError:
gca.assembly_level = "No Level"
stderr.write("{} has no assembly_level attribute, not added to database\n".format(gca.accession))
if gca.assembly_level in ["chromosome", "complete genome"]:
gca.records = chromosome_number(gca_xml)
s.add(gca)
# print(gca.accession, gca.assembly_level, gca.records)
for chrom_record in get_chromosomes(gca_xml):
chromosome = Chromosome()
chromosome.GCA_accession = gca.accession
chromosome.accession = chrom_record.attrib["accession"]
# print(chromosome.accession)
chromosome.name = chrom_record.find("NAME").text
chromosome.status = 1
chrom_xml = xml_download(chromosome.accession)
if chrom_xml is not None:
try:
chromosome.md5, chromosome.length = chromosome_data(chrom_xml)
except AttributeError:
stderr.write("Chromosome {} doesn't exit or has corrupted xml file. Chromosome was added "
"without md5 and length.\n".format(chromosome.accession))
s.add(chromosome)
# print(chromosome.accession, chromosome.GCA_accession,
# chromosome.name, chromosome.length, chromosome.md5)
if not s.query(Jobs).filter(Jobs.chromosome_accession == chromosome.accession).all():
for job in ["get_fasta", "GC", "trf", "CpG"]:
s.add(Jobs(chromosome_accession=chromosome.accession,
job_name=job))
# print(chromosome.accession, job)
elif gca.assembly_level in ["scaffold", "contig"]:
gca.records = get_scaffold_number(gca_xml)
s.add(gca)
for job in ["get_fasta", "GC", "trf", "CpG"]:
s.add(Jobs(chromosome_accession=gca.accession,
job_name=job))
# print(gca.accession, gca.assembly_level, gca.records)
s.commit()
else:
stderr.write("{} was not added because XML record is unavailable\n".format(gca.accession))
stderr.flush()
# retry download chromosome xml record with a longer timeout
for chromosome in s.query(Chromosome).filter(or_(Chromosome.md5 == None, Chromosome.length == None)).all():
chrom_xml = xml_download_retry(chromosome.accession)
if chrom_xml is not None:
try:
chromosome.md5, chromosome.length = chromosome_data(chrom_xml)
except AttributeError:
stderr.write("Chromosome {} doesn't exit or has corrupted xml file. Chromosome data was not added\n"
.format(chromosome.accession))
s.commit()
stderr.flush()
s.close()
stderr.close()
| 38.780105
| 119
| 0.652491
|
d086d7aab191c7687a9c71aa884d413b256ec344
| 8,518
|
py
|
Python
|
lda_loader.py
|
abramhindle/organize-conference-sessions-by-paper-topics
|
076adf1049fe9588c04f37840e938b20dd32d102
|
[
"Apache-2.0"
] | 1
|
2017-12-24T23:48:33.000Z
|
2017-12-24T23:48:33.000Z
|
lda_loader.py
|
abramhindle/organize-conference-sessions-by-paper-topics
|
076adf1049fe9588c04f37840e938b20dd32d102
|
[
"Apache-2.0"
] | 1
|
2017-12-29T15:31:24.000Z
|
2018-01-03T17:39:58.000Z
|
lda_loader.py
|
abramhindle/organize-conference-sessions-by-paper-topics
|
076adf1049fe9588c04f37840e938b20dd32d102
|
[
"Apache-2.0"
] | 2
|
2017-12-29T15:14:00.000Z
|
2021-01-16T13:33:50.000Z
|
# Copyright (C) 2014 Alex Wilson
# Copyright (C) 2012-14 Abram Hindle
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import json, os
from config import Config
from databases import BulkOps, ElasticSearch
from topics_controller import *
import lda
import common, project, task_queue
# tasks for task_queue
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser('LDA analyser')
parser.add_argument('project', help='project name')
parser.add_argument('--topics', type=int, default=100,
help='number of topics to generate (no effect on incremental)')
parser.add_argument('--incremental', help='do an incremental analysis')
Config.add_args(parser)
args = parser.parse_args()
config = Config.build(args)
if args.incremental:
LDAIncrementalTask(args.project).run()
else:
print 'running LDA analysis on {} with {} topics'.format(
args.project, args.topics)
LDATask(args.project, args.topics).run()
| 34.909836
| 110
| 0.646513
|
d086eb141826a0dd9e722b35bf04940deba291b8
| 1,003
|
py
|
Python
|
api-gateway/services/mail_service.py
|
Niweera/DNSTool-Middleware-API
|
0e83d9f62fb65d9223b86a7876b3f30b2771befb
|
[
"Apache-2.0"
] | null | null | null |
api-gateway/services/mail_service.py
|
Niweera/DNSTool-Middleware-API
|
0e83d9f62fb65d9223b86a7876b3f30b2771befb
|
[
"Apache-2.0"
] | 9
|
2021-06-12T05:39:59.000Z
|
2021-08-14T09:20:00.000Z
|
api-gateway/services/mail_service.py
|
Niweera/DNSTool-Middleware-API
|
0e83d9f62fb65d9223b86a7876b3f30b2771befb
|
[
"Apache-2.0"
] | 2
|
2021-05-22T15:33:50.000Z
|
2021-08-28T08:51:25.000Z
|
from flask_mail import Message
from mailer import mailer
from middleware.error_handling import write_log
| 31.34375
| 130
| 0.564307
|
d0879d6a8986f5d89d403d25cf640af496e2854b
| 2,390
|
py
|
Python
|
tests/system/test_grpc_streams.py
|
danoscarmike/gapic-generator-python
|
805645d5571dde05c6fb947c81f0f41f2ba10a98
|
[
"Apache-2.0"
] | null | null | null |
tests/system/test_grpc_streams.py
|
danoscarmike/gapic-generator-python
|
805645d5571dde05c6fb947c81f0f41f2ba10a98
|
[
"Apache-2.0"
] | null | null | null |
tests/system/test_grpc_streams.py
|
danoscarmike/gapic-generator-python
|
805645d5571dde05c6fb947c81f0f41f2ba10a98
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google import showcase
metadata = (("showcase-trailer", "hello world"),)
| 32.297297
| 74
| 0.706695
|
d088e305cc713c10d76eb917ab7c20df555901b9
| 1,469
|
py
|
Python
|
day02/python/subesokun/solution.py
|
clssn/aoc-2019
|
a978e5235855be937e60a1e7f88d1ef9b541be15
|
[
"MIT"
] | 22
|
2019-11-27T08:28:46.000Z
|
2021-04-27T05:37:08.000Z
|
day02/python/subesokun/solution.py
|
sancho1241/aoc-2019
|
e0f63824c8250e0f84a42805e1a7ff7d9232002c
|
[
"MIT"
] | 77
|
2019-11-16T17:22:42.000Z
|
2021-05-10T20:36:36.000Z
|
day02/python/subesokun/solution.py
|
sancho1241/aoc-2019
|
e0f63824c8250e0f84a42805e1a7ff7d9232002c
|
[
"MIT"
] | 43
|
2019-11-27T06:36:51.000Z
|
2021-11-03T20:56:48.000Z
|
INPUT_FILE_NAME = 'input.txt'
puzzle_input = None
with open(INPUT_FILE_NAME) as input_file:
puzzle_input = list(map(lambda val: int(val), input_file.readline().rstrip('\n').split(',')))
memory_solution_part1 = puzzle_input.copy()
memory_solution_part1[1] = 12
memory_solution_part1[2] = 2
solution_part_1 = run_program(memory_solution_part1)
print('Solution to part 1: %i' % (solution_part_1[0],))
(noun, verb) = find_noun_verb(19690720, puzzle_input)
solution_part_2 = 100 * noun + verb
print('Solution to part 2: %i' % (solution_part_2,))
| 31.934783
| 163
| 0.673247
|
d08a58e73bc10ca50c82c660e481d5fd9ab9cf8d
| 6,299
|
py
|
Python
|
opendata_module/anonymizer/iio/mongodb_manager.py
|
ria-ee/monitor
|
d5cb9384abf38394b35e760729649136cbbc7548
|
[
"MIT"
] | 10
|
2017-12-01T11:59:54.000Z
|
2021-11-08T10:30:35.000Z
|
opendata_module/anonymizer/iio/mongodb_manager.py
|
ria-ee/monitor
|
d5cb9384abf38394b35e760729649136cbbc7548
|
[
"MIT"
] | 16
|
2019-11-15T08:45:33.000Z
|
2021-06-10T18:06:03.000Z
|
opendata_module/anonymizer/iio/mongodb_manager.py
|
ria-ee/monitor
|
d5cb9384abf38394b35e760729649136cbbc7548
|
[
"MIT"
] | 13
|
2017-11-22T08:46:57.000Z
|
2021-12-16T06:51:07.000Z
|
from pymongo import MongoClient
import pymongo
import datetime
import sqlite3 as sql
import os
import signal
from signal import SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGHUP
import traceback
from anonymizer.utils import logger_manager
import sys
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
ATEXIT_SINGLETON = None
for sig in (SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGHUP):
signal.signal(sig, store_last_processed_timestamp)
| 40.378205
| 118
| 0.640895
|
d08c99da891c1e82a3ced1b3133024eb4b5c39fb
| 826
|
py
|
Python
|
viewer/models.py
|
davhanks/digitalmyworld
|
0f718be3967d399dddc6105f0b9d4cbc0ab35764
|
[
"Apache-2.0"
] | null | null | null |
viewer/models.py
|
davhanks/digitalmyworld
|
0f718be3967d399dddc6105f0b9d4cbc0ab35764
|
[
"Apache-2.0"
] | null | null | null |
viewer/models.py
|
davhanks/digitalmyworld
|
0f718be3967d399dddc6105f0b9d4cbc0ab35764
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
# from polls import models as pmod
# questions = pmod.Question.objects.all()
# pmod.Question.objects.filter(question_text='This is the third question')
# q1 = pmod.Question.objects.get(id=2)
# .exclude() you can chain them together
| 35.913043
| 80
| 0.733656
|
d08cfb1d89201e35321b4d716317704e5a60a247
| 552
|
py
|
Python
|
tests/test_dbapi_sqlite.py
|
cyanodbc/cyanodbc
|
6ed49ded15a545edf4b78886868daebc8c5d4874
|
[
"MIT"
] | 2
|
2020-07-10T17:36:00.000Z
|
2020-08-12T14:57:48.000Z
|
tests/test_dbapi_sqlite.py
|
detule/cyanodbc
|
e7713c3cc3333a018409ec50ee1e5836a8d85f06
|
[
"MIT"
] | 15
|
2018-09-09T12:05:15.000Z
|
2020-07-07T12:06:16.000Z
|
tests/test_dbapi_sqlite.py
|
detule/cyanodbc
|
e7713c3cc3333a018409ec50ee1e5836a8d85f06
|
[
"MIT"
] | 1
|
2020-07-02T10:58:07.000Z
|
2020-07-02T10:58:07.000Z
|
import cyanodbc
import dbapi20
from distro import linux_distribution
import pytest
| 26.285714
| 60
| 0.684783
|
d08f1ec8fdeeec92aebf3f03615a051f9221f14d
| 802
|
py
|
Python
|
problems/p050.py
|
davisschenk/project-euler-python
|
1375412e6c8199ab02250bd67223c758d4df1725
|
[
"MIT"
] | null | null | null |
problems/p050.py
|
davisschenk/project-euler-python
|
1375412e6c8199ab02250bd67223c758d4df1725
|
[
"MIT"
] | null | null | null |
problems/p050.py
|
davisschenk/project-euler-python
|
1375412e6c8199ab02250bd67223c758d4df1725
|
[
"MIT"
] | 2
|
2020-10-08T23:35:03.000Z
|
2020-10-09T00:28:36.000Z
|
from problem import Problem
from utils.primes import sieve_of_eratosthenes, simple_is_prime
| 30.846154
| 82
| 0.588529
|
d091d22d60b6b043a2100712328787d0097e7ec3
| 2,312
|
py
|
Python
|
acapy_client/models/indy_pres_attr_spec.py
|
dbluhm/acapy-client
|
d92ef607ba2ff1152ec15429f2edb20976991424
|
[
"Apache-2.0"
] | 4
|
2021-08-05T09:20:34.000Z
|
2021-08-08T19:37:29.000Z
|
acapy_client/models/indy_pres_attr_spec.py
|
dbluhm/acapy-client
|
d92ef607ba2ff1152ec15429f2edb20976991424
|
[
"Apache-2.0"
] | null | null | null |
acapy_client/models/indy_pres_attr_spec.py
|
dbluhm/acapy-client
|
d92ef607ba2ff1152ec15429f2edb20976991424
|
[
"Apache-2.0"
] | 2
|
2021-08-12T18:18:45.000Z
|
2021-08-14T13:22:28.000Z
|
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..types import UNSET, Unset
T = TypeVar("T", bound="IndyPresAttrSpec")
| 27.2
| 77
| 0.602076
|
d0955d256ce56a7190e83eb9977c69b7f233f594
| 4,929
|
py
|
Python
|
close_approach.py
|
jepster/python_project_near_earth_objects
|
6e48da50685f15226bbc0adc66231c661596ac67
|
[
"MIT"
] | null | null | null |
close_approach.py
|
jepster/python_project_near_earth_objects
|
6e48da50685f15226bbc0adc66231c661596ac67
|
[
"MIT"
] | null | null | null |
close_approach.py
|
jepster/python_project_near_earth_objects
|
6e48da50685f15226bbc0adc66231c661596ac67
|
[
"MIT"
] | null | null | null |
from helpers import cd_to_datetime, datetime_to_str
| 39.432
| 79
| 0.581862
|
d096f230d88b0cb0b44ad27a15da83bc18edf195
| 11,023
|
py
|
Python
|
terncy/terncy.py
|
rxwen/python-terncy
|
69be427c39118f122554a300e6e82ec24ad48bc1
|
[
"MIT"
] | 1
|
2020-06-30T07:06:14.000Z
|
2020-06-30T07:06:14.000Z
|
terncy/terncy.py
|
rxwen/python-terncy
|
69be427c39118f122554a300e6e82ec24ad48bc1
|
[
"MIT"
] | null | null | null |
terncy/terncy.py
|
rxwen/python-terncy
|
69be427c39118f122554a300e6e82ec24ad48bc1
|
[
"MIT"
] | 1
|
2020-12-26T11:20:42.000Z
|
2020-12-26T11:20:42.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import asyncio
import logging
import json
import ssl
import uuid
from terncy.version import __version__
import terncy.event as event
import ipaddress
from datetime import datetime
from enum import Enum
from zeroconf import ServiceBrowser, Zeroconf
import aiohttp
import websockets
_LOGGER = logging.getLogger(__name__)
TERNCY_HUB_SVC_NAME = "_websocket._tcp.local."
WAIT_RESP_TIMEOUT_SEC = 5
_discovery_engine = None
_discovery_browser = None
discovered_homecenters = {}
class Terncy:
def __init__(self, client_id, dev_id, ip, port=443, username="", token=""):
self.client_id = client_id
self.dev_id = dev_id
self.ip = ip
self.port = port
self.username = username
self.token = token
self.token_id = -1
self.token_state = TokenState.INVALID
self._connection = None
self._pending_requests = {}
self._event_handler = None
| 34.021605
| 122
| 0.547945
|
d0974c0fc40e9d8831ae2a7987183cbff51e0997
| 110
|
py
|
Python
|
scripts/item/consume_2430769.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | 9
|
2021-04-26T11:59:29.000Z
|
2021-12-20T13:15:27.000Z
|
scripts/item/consume_2430769.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | null | null | null |
scripts/item/consume_2430769.py
|
Snewmy/swordie
|
ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17
|
[
"MIT"
] | 6
|
2021-07-14T06:32:05.000Z
|
2022-02-06T02:32:56.000Z
|
if sm.getSlotsLeftToAddByInvType(2) < 8:
sm.dispose()
sm.addInventorySlotsByInvType(8, 2)
sm.consumeItem()
| 27.5
| 40
| 0.763636
|
d0995eaa88ebe8af4a5de82f32eaa66b6c6943ba
| 5,310
|
py
|
Python
|
ssc/visualization/vispy_voxel.py
|
DavidGillsjo/bssc-net
|
e1ffa643a2c8e3df34225f0756bad0dec9f801a2
|
[
"MIT"
] | 5
|
2021-02-25T01:59:46.000Z
|
2022-02-09T12:23:30.000Z
|
ssc/visualization/vispy_voxel.py
|
DavidGillsjo/bssc-net
|
e1ffa643a2c8e3df34225f0756bad0dec9f801a2
|
[
"MIT"
] | null | null | null |
ssc/visualization/vispy_voxel.py
|
DavidGillsjo/bssc-net
|
e1ffa643a2c8e3df34225f0756bad0dec9f801a2
|
[
"MIT"
] | 1
|
2021-04-10T04:11:02.000Z
|
2021-04-10T04:11:02.000Z
|
import vispy
vispy.use(app='egl')
from moviepy.editor import VideoClip
import numpy as np
from vispy import scene, io, visuals
from vispy.color import *
import cv2
# Check the application correctly picked up egl
assert vispy.app.use_app().backend_name == 'egl', 'Not using EGL'
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Test plotting voxels')
parser.add_argument('gt_file', type=str, help='Path to gt file')
parser.add_argument('--animate', action='store_true', help='Yield GIF instead of PNG')
args = parser.parse_args()
from ssc.data.suncg_mapping import SUNCGMapping
import os
labels = SUNCGMapping()
gt_npz = np.load(args.gt_file)
scatter_plot_voxels(gt_npz['voxels'], labels.get_classes(), gt_npz['vox_min'], gt_npz['vox_unit'], save_path = os.getcwd() , animate = args.animate)
| 38.759124
| 170
| 0.634087
|
d09a191e2a3804cf26b16a157b643e61f06cbb1c
| 69,906
|
py
|
Python
|
__init__.py
|
state-of-the-art/BlendNet
|
0a303e34081b820370c9528c807f276eefb122dc
|
[
"Apache-2.0"
] | 66
|
2019-10-30T13:39:13.000Z
|
2022-03-23T18:33:03.000Z
|
__init__.py
|
state-of-the-art/BlendNet
|
0a303e34081b820370c9528c807f276eefb122dc
|
[
"Apache-2.0"
] | 97
|
2019-10-18T16:48:50.000Z
|
2022-02-13T18:58:45.000Z
|
__init__.py
|
state-of-the-art/BlendNet
|
0a303e34081b820370c9528c807f276eefb122dc
|
[
"Apache-2.0"
] | 12
|
2019-11-23T12:53:52.000Z
|
2021-08-09T21:15:48.000Z
|
bl_info = {
'name': 'BlendNet - distributed cloud render',
'author': 'www.state-of-the-art.io',
'version': (0, 4, 0),
'warning': 'development version',
'blender': (2, 80, 0),
'location': 'Properties --> Render --> BlendNet Render',
'description': 'Allows to easy allocate resources in cloud and '
'run the cycles rendering with getting preview '
'and results',
'wiki_url': 'https://github.com/state-of-the-art/BlendNet/wiki',
'tracker_url': 'https://github.com/state-of-the-art/BlendNet/issues',
'category': 'Render',
}
if 'bpy' in locals():
import importlib
importlib.reload(BlendNet)
importlib.reload(blend_file)
else:
from . import (
BlendNet,
)
from .BlendNet import blend_file
import os
import time
import tempfile
from datetime import datetime
import bpy
from bpy.props import (
BoolProperty,
IntProperty,
StringProperty,
EnumProperty,
PointerProperty,
CollectionProperty,
)
def loadProvidersSettings():
'''Get the available providers settings to set and load them during registration of the class'''
all_settings = BlendNet.addon.getProvidersSettings()
for provider, provider_settings in all_settings.items():
for key, data in provider_settings.items():
path = 'provider_' + provider + '_' + key
print('DEBUG: registering provider config:', path)
if data.get('type') in ('string', 'path'):
BlendNetAddonPreferences.__annotations__[path] = StringProperty(
name = data.get('name'),
description = data.get('description'),
subtype = 'FILE_PATH' if data['type'] == 'path' else 'NONE',
update = BlendNet.addon.updateProviderSettings,
)
elif data.get('type') == 'choice':
BlendNetAddonPreferences.__annotations__[path] = EnumProperty(
name = data.get('name'),
description = data.get('description'),
items = data.get('values'),
update = BlendNet.addon.updateProviderSettings,
)
# Additional field to store string value (otherwise it's hard on init when
# value of enum is integer and has no items to choose from)
BlendNetAddonPreferences.__annotations__[path+'_value'] = StringProperty(
name = data.get('name'),
description = data.get('description'),
)
else:
print('ERROR: Unknown provider "%s" setting "%s" type: %s' % (provider, key, data.get('type')))
def initPreferences():
'''Will init the preferences with defaults'''
prefs = bpy.context.preferences.addons[__package__].preferences
# Set defaults for preferences
# Update resource_provider anyway to set the addon var
prefs.resource_provider = prefs.resource_provider or BlendNet.addon.getAddonDefaultProvider()
# Since default for property will be regenerated every restart
# we generate new session id if the current one is empty
if prefs.session_id == '':
prefs.session_id = ''
if prefs.manager_password_hidden == '':
prefs.manager_password_hidden = ''
if prefs.agent_password_hidden == '':
prefs.agent_password_hidden = ''
BlendNet.addon.fillAvailableBlenderDists()
# Getting provider info to make sure all the settings are ok
# for current provider configuration
BlendNet.addon.getProviderInfo()
if __name__ == '__main__':
register()
| 38.988288
| 131
| 0.608918
|
d09d02c68d6758f8f9b41f85738e74eee8e7455d
| 3,601
|
py
|
Python
|
codonlib/codonlib.py
|
tmsincomb/codonlib
|
026bc475b3255831d749455b4c76250d56e4b91e
|
[
"MIT"
] | null | null | null |
codonlib/codonlib.py
|
tmsincomb/codonlib
|
026bc475b3255831d749455b4c76250d56e4b91e
|
[
"MIT"
] | null | null | null |
codonlib/codonlib.py
|
tmsincomb/codonlib
|
026bc475b3255831d749455b4c76250d56e4b91e
|
[
"MIT"
] | null | null | null |
"""Main module."""
from collections import defaultdict
from functools import cache
from itertools import product
from operator import itemgetter
from typing import List
import numpy as np
from Bio.Data.CodonTable import unambiguous_dna_by_id
| 34.295238
| 120
| 0.580117
|
d0a007717c7cd5164028357f50c0a77b0adfbf09
| 243
|
py
|
Python
|
M7 - python script.py
|
kfmahre/movies_neural_net
|
749d2a4e05bb24537c03c6069443da6956084055
|
[
"MIT"
] | null | null | null |
M7 - python script.py
|
kfmahre/movies_neural_net
|
749d2a4e05bb24537c03c6069443da6956084055
|
[
"MIT"
] | null | null | null |
M7 - python script.py
|
kfmahre/movies_neural_net
|
749d2a4e05bb24537c03c6069443da6956084055
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 15 01:37:59 2019
@author: kfmah
"""
stuff = list()
stuff.append('python')
stuff.append('chuck')
stuff.sort()
print (stuff[0])
print (stuff.__getitem__(0))
print (list.__getitem__(stuff,0))
| 12.15
| 35
| 0.650206
|
d0a233e2c4be9f3270a7527a6c44f88fc90c7f07
| 179
|
py
|
Python
|
python/ql/src/Security/CWE-022/examples/tarslip_bad.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/src/Security/CWE-022/examples/tarslip_bad.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/src/Security/CWE-022/examples/tarslip_bad.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
import tarfile
with tarfile.open('archive.zip') as tar:
#BAD : This could write any file on the filesystem.
for entry in tar:
tar.extract(entry, "/tmp/unpack/")
| 22.375
| 55
| 0.664804
|
d0a27cee092dbbcf3e9f8c3f67fcac4a8d78a9b8
| 2,206
|
py
|
Python
|
thermidor/classes/clusterer_socket.py
|
rcorrero/thermidor
|
01381da6dcef6d256c4438f90b46197ea89c4e7d
|
[
"BSD-3-Clause"
] | null | null | null |
thermidor/classes/clusterer_socket.py
|
rcorrero/thermidor
|
01381da6dcef6d256c4438f90b46197ea89c4e7d
|
[
"BSD-3-Clause"
] | null | null | null |
thermidor/classes/clusterer_socket.py
|
rcorrero/thermidor
|
01381da6dcef6d256c4438f90b46197ea89c4e7d
|
[
"BSD-3-Clause"
] | null | null | null |
# Author: Richard Correro
from sklearn.base import ClusterMixin
from .transformer_socket import TransformerSocket
| 30.219178
| 76
| 0.593835
|
d0a31acaeb6c9427098272c4fe196ed9fa1d7cff
| 17,639
|
py
|
Python
|
Telstra_Messaging/configuration.py
|
yashints/MessagingAPI-SDK-python
|
6cb41ed90fd237e57a6ce4ca383fa035cd842a7d
|
[
"Apache-2.0"
] | null | null | null |
Telstra_Messaging/configuration.py
|
yashints/MessagingAPI-SDK-python
|
6cb41ed90fd237e57a6ce4ca383fa035cd842a7d
|
[
"Apache-2.0"
] | null | null | null |
Telstra_Messaging/configuration.py
|
yashints/MessagingAPI-SDK-python
|
6cb41ed90fd237e57a6ce4ca383fa035cd842a7d
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Telstra Messaging API
# Introduction <table><tbody><tr><td class = 'into_api' style='border:none;padding:0 0 0 0'><p>Send and receive SMS and MMS messages globally using Telstra's enterprise grade Messaging API. It also allows your application to track the delivery status of both sent and received messages. Get your dedicated Australian number, and start sending and receiving messages today.</p></td><td class = 'into_api_logo' style='width: 20%;border:none'><img class = 'api_logo' style='margin: -26px 0 0 0' src = 'https://test-telstra-retail-tdev.devportal.apigee.io/sites/default/files/messagingapi-icon.png'></td></tr></tbody></table> # Features The Telstra Messaging API provides the features below. | Feature | Description | | --- | --- | | `Dedicated Number` | Provision a mobile number for your account to be used as `from` address in the API | | `Send Messages` | Sending SMS or MMS messages | | `Receive Messages` | Telstra will deliver messages sent to a dedicated number or to the `notifyURL` defined by you | | `Broadcast Messages` | Invoke a single API call to send a message to a list of numbers provided in `to` | | `Delivery Status` | Query the delivery status of your messages | | `Callbacks` | Provide a notification URL and Telstra will notify your app when a message status changes | | `Alphanumeric Identifier` | Differentiate yourself by providing an alphanumeric string in `from`. This feature is only available on paid plans | | `Concatenation` | Send messages up to 1900 characters long and Telstra will automaticaly segment and reassemble them | | `Reply Request` | Create a chat session by associating `messageId` and `to` number to track responses received from a mobile number. We will store this association for 8 days | | `Character set` | Accepts all Unicode characters as part of UTF-8 | | `Bounce-back response` | See if your SMS hits an unreachable or unallocated number (Australia Only) | | `Queuing` | Messaging API will automatically queue and deliver each message at a compliant rate. | | `Emoji Encoding` | The API supports the encoding of the full range of emojis. Emojis in the reply messages will be in their UTF-8 format. | ## Delivery Notification or Callbacks The API provides several methods for notifying when a message has been delivered to the destination. 1. When you send a message there is an opportunity to specify a `notifyURL`. Once the message has been delivered the API will make a call to this URL to advise of the message status. 2. If you do not specify a URL you can always call the `GET /status` API to get the status of the message. # Getting Access to the API 1. Register at [https://dev.telstra.com](https://dev.telstra.com). 2. After registration, login to [https://dev.telstra.com](https://dev.telstra.com) and navigate to the **My apps** page. 3. Create your application by clicking the **Add new app** button 4. Select **API Free Trial** Product when configuring your application. This Product includes the Telstra Messaging API as well as other free trial APIs. Your application will be approved automatically. 5. There is a maximum of 1000 free messages per developer. Additional messages and features can be purchased from [https://dev.telstra.com](https://dev.telstra.com). 6. Note your `Client key` and `Client secret` as these will be needed to provision a number for your application and for authentication. Now head over to **Getting Started** where you can find a postman collection as well as some links to sample apps and SDKs to get you started. Happy Messaging! # Frequently Asked Questions **Q: Is creating a subscription via the Provisioning call a required step?** A. Yes. You will only be able to start sending messages if you have a provisioned dedicated number. Use Provisioning to create a dedicated number subscription, or renew your dedicated number if it has expired. **Q: When trying to send an SMS I receive a `400 Bad Request` response. How can I fix this?** A. You need to make sure you have a provisioned dedicated number before you can send an SMS. If you do not have a provisioned dedicated number and you try to send a message via the API, you will get the error below in the response: <pre><code class=\"language-sh\">{ \"status\":\"400\", \"code\":\"DELIVERY-IMPOSSIBLE\", \"message\":\"Invalid \\'from\\' address specified\" }</code></pre> Use Provisioning to create a dedicated number subscription, or renew your dedicated number if it has expired. **Q: How long does my dedicated number stay active for?** A. When you provision a dedicated number, by default it will be active for 30 days. You can use the `activeDays` parameter during the provisioning call to increment or decrement the number of days your dedicated number will remain active. Note that Free Trial apps will have 30 days as the maximum `activeDays` they can add to their provisioned number. If the Provisioning call is made several times within that 30-Day period, it will return the `expiryDate` in the Unix format and will not add any activeDays until after that `expiryDate`. **Q: Can I send a broadcast message using the Telstra Messaging API?** A. Yes. Recipient numbers can be in the form of an array of strings if a broadcast message needs to be sent, allowing you to send to multiple mobile numbers in one API call. A sample request body for this will be: `{\"to\":[\"+61412345678\",\"+61487654321\"],\"body\":\"Test Message\"}` **Q: Can I send SMS and MMS to all countries?** A. You can send SMS and MMS to all countries EXCEPT to countries which are subject to global sanctions namely: Burma, Cte d'Ivoire, Cuba, Iran, North Korea, Syria. **Q: Can I use `Alphanumeric Identifier` from my paid plan via credit card?** A. `Alphanumeric Identifier` is only available on Telstra Account paid plans, not through credit card paid plans. **Q: What is the maximum sized MMS that I can send?** A. This will depend on the carrier that will receive the MMS. For Telstra it's up to 2MB, Optus up to 1.5MB and Vodafone only allows up to 500kB. You will need to check with international carriers for thier MMS size limits. **Q: How is the size of an MMS calculated?** A. Images are scaled up to approximately 4/3 when base64 encoded. Additionally, there is approximately 200 bytes of overhead on each MMS. Assuming the maximum MMS that can be sent on Telstras network is 2MB, then the maximum image size that can be sent will be approximately 1.378MB (1.378 x 1.34 + 200, without SOAP encapsulation). **Q: How is an MMS classified as Small or Large?** A. MMSes with size below 600kB are classed as Small whereas those that are bigger than 600kB are classed as Large. They will be charged accordingly. **Q: Are SMILs supported by the Messaging API?** A. While there will be no error if you send an MMS with a SMIL presentation, the actual layout or sequence defined in the SMIL may not display as expected because most of the new smartphone devices ignore the SMIL presentation layer. SMIL was used in feature phones which had limited capability and SMIL allowed a *powerpoint type* presentation to be provided. Smartphones now have the capability to display video which is the better option for presentations. It is recommended that MMS messages should just drop the SMIL. **Q: How do I assign a delivery notification or callback URL?** A. You can assign a delivery notification or callback URL by adding the `notifyURL` parameter in the body of the request when you send a message. Once the message has been delivered, a notification will then be posted to this callback URL. **Q: What is the difference between the `notifyURL` parameter in the Provisoning call versus the `notifyURL` parameter in the Send Message call?** A. The `notifyURL` in the Provisoning call will be the URL where replies to the provisioned number will be posted. On the other hand, the `notifyURL` in the Send Message call will be the URL where the delivery notification will be posted, e.g. when an SMS has already been delivered to the recipient. # Getting Started Below are the steps to get started with the Telstra Messaging API. 1. Generate an OAuth2 token using your `Client key` and `Client secret`. 2. Use the Provisioning call to create a subscription and receive a dedicated number. 3. Send a message to a specific mobile number. ## Run in Postman <a href=\"https://app.getpostman.com/run-collection/ded00578f69a9deba256#?env%5BMessaging%20API%20Environments%5D=W3siZW5hYmxlZCI6dHJ1ZSwia2V5IjoiY2xpZW50X2lkIiwidmFsdWUiOiIiLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoiY2xpZW50X3NlY3JldCIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6ImFjY2Vzc190b2tlbiIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6Imhvc3QiLCJ2YWx1ZSI6InRhcGkudGVsc3RyYS5jb20iLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoiQXV0aG9yaXphdGlvbiIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6Im9hdXRoX2hvc3QiLCJ2YWx1ZSI6InNhcGkudGVsc3RyYS5jb20iLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoibWVzc2FnZV9pZCIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifV0=\"><img src=\"https://run.pstmn.io/button.svg\" alt=\"Run in Postman\"/></a> ## Sample Apps - [Perl Sample App](https://github.com/telstra/MessagingAPI-perl-sample-app) - [Happy Chat App](https://github.com/telstra/messaging-sample-code-happy-chat) - [PHP Sample App](https://github.com/developersteve/telstra-messaging-php) ## SDK Repos - [Messaging API - PHP SDK](https://github.com/telstra/MessagingAPI-SDK-php) - [Messaging API - Python SDK](https://github.com/telstra/MessagingAPI-SDK-python) - [Messaging API - Ruby SDK](https://github.com/telstra/MessagingAPI-SDK-ruby) - [Messaging API - NodeJS SDK](https://github.com/telstra/MessagingAPI-SDK-node) - [Messaging API - .Net2 SDK](https://github.com/telstra/MessagingAPI-SDK-dotnet) - [Messaging API - Java SDK](https://github.com/telstra/MessagingAPI-SDK-Java) ## Blog Posts For more information on the Messaging API, you can read these blog posts: - [Callbacks Part 1](https://dev.telstra.com/content/understanding-messaging-api-callbacks-part-1) - [Callbacks Part 2](https://dev.telstra.com/content/understanding-messaging-api-callbacks-part-2) # noqa: E501
OpenAPI spec version: 2.2.9
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
'auth':
{
'type': 'oauth2',
'in': 'header',
'key': 'Authorization',
'value': 'Bearer ' + self.access_token
},
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 2.2.9\n"\
"SDK Package Version: 1.0.6".\
format(env=sys.platform, pyversion=sys.version)
| 73.495833
| 10,226
| 0.704122
|
d0a3ddac31091e48614d1cfeaf2b19071cf7215f
| 1,598
|
py
|
Python
|
gym_minigrid/envs/mygridworld.py
|
nathan-miller23/gym-minigrid
|
4ed9e6a511be88a49903f107003951977d86d842
|
[
"Apache-2.0"
] | null | null | null |
gym_minigrid/envs/mygridworld.py
|
nathan-miller23/gym-minigrid
|
4ed9e6a511be88a49903f107003951977d86d842
|
[
"Apache-2.0"
] | null | null | null |
gym_minigrid/envs/mygridworld.py
|
nathan-miller23/gym-minigrid
|
4ed9e6a511be88a49903f107003951977d86d842
|
[
"Apache-2.0"
] | null | null | null |
from gym_minigrid.minigrid import *
from gym_minigrid.register import register
| 30.730769
| 162
| 0.646433
|
d0a76a2ccea01e9241f0631317ad97ebe4b3a680
| 86
|
py
|
Python
|
dai10shou/code10-1.py
|
naoshige314/workshop01
|
5c7be08f99eb164b7901628de26cecfd04fa926f
|
[
"MIT"
] | null | null | null |
dai10shou/code10-1.py
|
naoshige314/workshop01
|
5c7be08f99eb164b7901628de26cecfd04fa926f
|
[
"MIT"
] | null | null | null |
dai10shou/code10-1.py
|
naoshige314/workshop01
|
5c7be08f99eb164b7901628de26cecfd04fa926f
|
[
"MIT"
] | 2
|
2021-06-10T11:53:02.000Z
|
2021-06-20T15:43:39.000Z
|
#
n=7
#
G=[[] for _ in range(n)]
G[0]=[1,2]
G[1]=[0,3]
G[2]=[0,4,5]
#etc.
| 7.818182
| 24
| 0.476744
|
d0a7c3be5b482ff8c7670f19fff95f3a54a4face
| 5,854
|
py
|
Python
|
tawsocks/tcp_relay.py
|
comeacrossyun/skyun
|
96ef2e98da2826863850c8b917bf1cba8a8a796b
|
[
"MIT"
] | 1
|
2021-05-23T15:50:25.000Z
|
2021-05-23T15:50:25.000Z
|
tawsocks/tcp_relay.py
|
comeacrossyun/skyun
|
96ef2e98da2826863850c8b917bf1cba8a8a796b
|
[
"MIT"
] | null | null | null |
tawsocks/tcp_relay.py
|
comeacrossyun/skyun
|
96ef2e98da2826863850c8b917bf1cba8a8a796b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
# @Time : 2019-06-04
# @Author : hongshu
import sys
import asyncio
from tawsocks import common
| 39.823129
| 118
| 0.591561
|
d0a857e2bc8d397632de8161aef9ad14a63435bd
| 1,033
|
py
|
Python
|
exchange/utils/http_util.py
|
inasie/PyExchange
|
7d40517c8145f92ac8068d5a0f25cc44ecddc82b
|
[
"MIT"
] | 12
|
2018-07-31T14:56:33.000Z
|
2021-05-24T23:47:51.000Z
|
exchange/utils/http_util.py
|
inasie/PyExchange
|
7d40517c8145f92ac8068d5a0f25cc44ecddc82b
|
[
"MIT"
] | 1
|
2018-08-17T09:16:04.000Z
|
2018-08-17T09:16:04.000Z
|
exchange/utils/http_util.py
|
inasie/PyExchange
|
7d40517c8145f92ac8068d5a0f25cc44ecddc82b
|
[
"MIT"
] | 8
|
2018-07-31T14:57:37.000Z
|
2021-05-24T23:47:52.000Z
|
# -*- coding: utf-8 -*-
import requests
import json
import logging
| 25.825
| 73
| 0.522749
|
d0aa2fe0ec18a3eff28a11f7344074671b7a9434
| 452
|
py
|
Python
|
src/glitchygames/movement/vertical.py
|
terrysimons/ghettogames
|
23773119d1994251b43c42db39c1c99c08386c24
|
[
"BSD-3-Clause"
] | 1
|
2019-07-06T02:01:27.000Z
|
2019-07-06T02:01:27.000Z
|
src/glitchygames/movement/vertical.py
|
terrysimons/ghettogames
|
23773119d1994251b43c42db39c1c99c08386c24
|
[
"BSD-3-Clause"
] | 4
|
2021-12-31T04:18:01.000Z
|
2022-03-29T13:40:32.000Z
|
src/glitchygames/movement/vertical.py
|
terrysimons/glitchygames
|
23773119d1994251b43c42db39c1c99c08386c24
|
[
"BSD-3-Clause"
] | 1
|
2019-07-12T19:41:09.000Z
|
2019-07-12T19:41:09.000Z
|
"""
Vertical:
Adds movement functions along the vertical (Y) axis to a game object
"""
| 18.833333
| 68
| 0.634956
|
d0aadc614a084b433d38993f99643e2433d5d14d
| 3,828
|
py
|
Python
|
pip_services3_datadog/clients/DataDogLogClient.py
|
pip-services3-python/pip-services3-datadog-python
|
5d4549685b8486f1fc663b0e50ea52d019095909
|
[
"MIT"
] | null | null | null |
pip_services3_datadog/clients/DataDogLogClient.py
|
pip-services3-python/pip-services3-datadog-python
|
5d4549685b8486f1fc663b0e50ea52d019095909
|
[
"MIT"
] | null | null | null |
pip_services3_datadog/clients/DataDogLogClient.py
|
pip-services3-python/pip-services3-datadog-python
|
5d4549685b8486f1fc663b0e50ea52d019095909
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from typing import Optional, List, Any
from pip_services3_commons.config import ConfigParams
from pip_services3_commons.convert import StringConverter
from pip_services3_commons.errors import ConfigException
from pip_services3_commons.refer import IReferences
from pip_services3_components.auth import CredentialResolver
from pip_services3_rpc.clients import RestClient
from pip_services3_datadog.clients.DataDogLogMessage import DataDogLogMessage
| 35.119266
| 97
| 0.648642
|
d0aaf1fbb6455df5e13ef467cbacc908e5245647
| 2,802
|
py
|
Python
|
gui/composition_worker.py
|
ivanovwaltz/wavelet_sound_microscope
|
ff14d82135193a3d20543e84a9e6a81f884b1cf7
|
[
"MIT"
] | null | null | null |
gui/composition_worker.py
|
ivanovwaltz/wavelet_sound_microscope
|
ff14d82135193a3d20543e84a9e6a81f884b1cf7
|
[
"MIT"
] | null | null | null |
gui/composition_worker.py
|
ivanovwaltz/wavelet_sound_microscope
|
ff14d82135193a3d20543e84a9e6a81f884b1cf7
|
[
"MIT"
] | null | null | null |
import logging
import os
from functools import partial
from PIL.Image import Image
from PyQt5.QtCore import QObject, pyqtSignal, QThread
from PyQt5.QtWidgets import QProgressDialog
from .threading import QThreadedWorkerDebug as QThreadedWorker
from analyze.composition import Composition, Spectrogram
from analyze.media.sound import Sound, SoundResampled
from utils import ProgressProxy
SAMPLERATE = 1024 * 16
log = logging.getLogger(__name__)
| 25.472727
| 77
| 0.664525
|
d0ab2fa07ec216f8af2558d0b182fc6b664345b5
| 61,891
|
py
|
Python
|
metalpipe/node.py
|
zacernst/nanostream
|
382389b09c42b55c6bdb64c7b0017d4810c7165f
|
[
"MIT"
] | 2
|
2019-04-12T19:32:55.000Z
|
2019-12-24T16:50:09.000Z
|
metalpipe/node.py
|
zacernst/metalpipe
|
382389b09c42b55c6bdb64c7b0017d4810c7165f
|
[
"MIT"
] | 10
|
2019-04-03T01:25:52.000Z
|
2019-12-16T05:09:35.000Z
|
metalpipe/node.py
|
zacernst/nanostream
|
382389b09c42b55c6bdb64c7b0017d4810c7165f
|
[
"MIT"
] | 1
|
2019-04-17T12:55:19.000Z
|
2019-04-17T12:55:19.000Z
|
"""
Node module
===========
The ``node`` module contains the ``MetalNode`` class, which is the foundation
for MetalPipe.
"""
import time
import datetime
import uuid
import importlib
import logging
import os
import threading
import pprint
import sys
import copy
import random
import functools
import csv
import MySQLdb
import re
import io
import yaml
import types
import inspect
import prettytable
import requests
import graphviz
from timed_dict.timed_dict import TimedDict
from metalpipe.message.batch import BatchStart, BatchEnd
from metalpipe.message.message import MetalPipeMessage
from metalpipe.node_queue.queue import MetalPipeQueue
from metalpipe.message.canary import Canary
from metalpipe.utils.set_attributes import set_kwarg_attributes
from metalpipe.utils.data_structures import Row, MySQLTypeSystem
from metalpipe.utils import data_structures as ds
# from metalpipe.metalpipe_recorder import RedisFixturizer
from metalpipe.utils.helpers import (
load_function,
replace_by_path,
remap_dictionary,
set_value,
get_value,
to_bool,
aggregate_values,
)
DEFAULT_MAX_QUEUE_SIZE = int(os.environ.get("DEFAULT_MAX_QUEUE_SIZE", 128))
MONITOR_INTERVAL = 1
STATS_COUNTER_MODULO = 4
LOGJAM_THRESHOLD = 0.25
SHORT_DELAY = 0.1
PROMETHEUS = False
def no_op(*args, **kwargs):
"""
No-op function to serve as default ``get_runtime_attrs``.
"""
return None
def _get_message_content(self, one_item):
# Get the content of a specific keypath, if one has
# been defined in the ``MetalNode`` initialization.
message_content = (
get_value(one_item.message_content, self.input_message_keypath)
if len(self.input_message_keypath) > 0
else one_item.message_content
)
if (
isinstance(message_content, (dict,))
and len(message_content) == 1
and "__value__" in message_content
):
message_content = message_content["__value__"]
return message_content
def wait_for_pipeline_finish(self):
while not self.pipeline_finished:
time.sleep(SHORT_DELAY)
def input_queues_empty(self):
"""
Tests whether there are any messages on any of the node's input
queues.
Returns:
bool: ``True`` if input queues are all empty.
"""
return all(queue.empty for queue in self.input_queue_list)
def cleanup(self):
"""
If there is any cleanup (closing files, shutting down database connections),
necessary when the node is stopped, then the node's class should provide
a ``cleanup`` method. By default, the method is just a logging statement.
"""
self.log_info("in null cleanup")
yield NothingToSeeHere()
def _cleanup(self):
self.log_info("Cleanup called after shutdown.")
for i in self.cleanup():
yield i
# Send termination message here
if self.send_termination_message:
yield Terminated(self)
for q in self.output_queue_list:
while not q.empty:
pass
self.log_info("setting cleanup_called to True")
self.cleanup_called = True
def log_info(self, message=""):
logging.info(
"{node_name}: {message}".format(node_name=self.name, message=message)
)
def terminate_pipeline(self, error=False):
"""
This method can be called on any node in a pipeline, and it will cause
all of the nodes to terminate if they haven't stopped already.
Args:
error (bool): Not yet implemented.
"""
self.log_info("terminate_pipeline called..." + str(self.name))
for node in self.all_connected():
node.terminate = True
for q in node.output_queue_list:
q.drain()
# if not node.finished:
# node.stopped_at = datetime.datetime.now()
# print('setting node.terminate')
# node.terminate = True
def process_item(self, *args, **kwargs):
"""
Default no-op for nodes.
"""
pass
def stream(self):
"""
Called in each ``MetalNode`` thread.
"""
self.status = "running"
if getattr(self, "_import_pydatalog", False):
from pyDatalog import pyDatalog, Logic
Logic(self.logic_engine)
try:
for output, previous_message in self.start():
logging.debug("In MetalNode.stream.stream() --> " + str(output))
for output_queue in self.output_queue_list:
self.messages_sent_counter += 1
output_queue.put(
output,
block=True,
timeout=None,
queue_event=self.queue_event,
previous_message=previous_message,
)
# if 1 or not isinstance(output, (NothingToSeeHere,)) and output is not None:
except Exception as error:
self.status = "error"
self.stopped_at = datetime.datetime.now()
raise error
self.status = "success"
self.stopped_at = datetime.datetime.now()
def all_connected(self, seen=None):
"""
Returns all the nodes connected (directly or indirectly) to ``self``.
This allows us to loop over all the nodes in a pipeline even if we
have a handle on only one. This is used by ``global_start``, for
example.
Args:
seen (set): A set of all the nodes that have been identified as
connected to ``self``.
Returns:
(set of ``MetalNode``): All the nodes connected to ``self``. This
includes ``self``.
"""
seen = seen or set()
if isinstance(self, (DynamicClassMediator,)):
for node_name, node_dict in self.node_dict.items():
node_obj = node_dict["obj"]
seen = seen | node_obj.all_connected(seen=seen)
else:
if self not in seen:
seen.add(self)
for node in self.input_node_list + self.output_node_list:
if node in seen:
continue
seen.add(node)
seen = seen | node.all_connected(seen=seen)
return seen
def broadcast(self, broadcast_message):
"""
Puts the message into all the input queues for all connected nodes.
"""
for node in self.all_connected():
for input_queue in node.input_queue_list:
input_queue.put(broadcast_message)
def global_start(
self, prometheus=False, pipeline_name=None, max_time=None, fixturize=False,
):
"""
Starts every node connected to ``self``. Mainly, it:
1. calls ``start()`` on each node
#. sets some global variables
#. optionally starts some experimental code for monitoring
"""
def prometheus_init():
"""
Experimental code for enabling Prometheus monitoring.
"""
from prometheus_client import (
start_http_server,
Summary,
Gauge,
Histogram,
Counter,
)
for node in self.all_connected():
node.prometheus_objects = {}
summary = Summary(
node.name + "_incoming", "Summary of incoming messages"
)
node.prometheus_objects["incoming_message_summary"] = summary
node.prometheus_objects["outgoing_message_summary"] = Gauge(
node.name + "_outgoing", "Summary of outgoing messages"
)
start_http_server(8000)
if PROMETHEUS:
prometheus_init()
# thread_dict = self.thread_dict
global_dict = {}
run_id = uuid.uuid4().hex
for node in self.all_connected():
# Set the pipeline name on the attribute of each node
node.pipeline_name = pipeline_name or uuid.uuid4().hex
# Set a unique run_id
node.run_id = run_id
node.fixturize = fixturize
node.global_dict = global_dict # Establishing shared globals
logging.debug("global_start:" + str(self))
# Create thread event here?
thread = threading.Thread(
target=MetalNode.stream, args=(node,), daemon=False
)
thread.start()
node.thread_dict = self.thread_dict
self.thread_dict[node.name] = thread
node.status = "running"
monitor_thread = threading.Thread(
target=MetalNode.thread_monitor,
args=(self,),
kwargs={"max_time": max_time},
daemon=True,
)
monitor_thread.start()
def draw_pipeline(self):
"""
Draw the pipeline structure using graphviz.
"""
dot = graphviz.Digraph()
for node in self.all_connected():
dot.node(node.name, node.name, shape="box")
for node in self.all_connected():
for target_node in node.output_node_list:
dot.edge(node.name, target_node.name)
dot.render("pipeline_drawing.gv", view=True)
def thread_monitor(self, max_time=None):
"""
This function loops over all of the threads in the pipeline, checking
that they are either ``finished`` or ``running``. If any have had an
abnormal exit, terminate the entire pipeline.
"""
counter = 0
error = False
time_started = time.time()
while not self.pipeline_finished:
logging.debug("MONITOR THREAD")
time.sleep(MONITOR_INTERVAL)
counter += 1
if max_time is not None:
print("checking max_time...")
if time.time() - time_started >= max_time:
self.pipeline_finished = True
print("finished because of max_time")
for node in self.all_connected():
node.finished = True
continue
# Check whether all the workers have ``.finished``
# self.pipeline_finished = all(
# node.finished for node in self.all_connected())
if counter % STATS_COUNTER_MODULO == 0:
table = prettytable.PrettyTable(
["Node", "Class", "Received", "Sent", "Queued", "Status", "Time",]
)
for node in sorted(list(self.all_connected()), key=lambda x: x.name):
if node.status == "running":
status_color = bcolors.WARNING
elif node.status == "stopped":
status_color = ""
elif node.status == "error":
status_color = bcolors.FAIL
error = True
elif node.status == "success":
status_color = bcolors.OKGREEN
else:
assert False
if node.logjam >= LOGJAM_THRESHOLD:
logjam_color = bcolors.FAIL
else:
logjam_color = ""
table.add_row(
[
logjam_color + node.name + bcolors.ENDC,
node.__class__.__name__,
node.messages_received_counter,
node.messages_sent_counter,
node.input_queue_size,
status_color + node.status + bcolors.ENDC,
node.time_running,
]
)
self.log_info("\n" + str(table))
if error:
logging.error("Terminating due to error.")
self.terminate_pipeline(error=True)
# self.pipeline_finished = True
break
# Check for blocked nodes
for node in self.all_connected():
input_queue_full = [
input_queue.approximately_full()
for input_queue in node.input_queue_list
]
output_queue_full = [
output_queue.approximately_full()
for output_queue in node.output_queue_list
]
logjam = (
not node.is_source
and all(input_queue_full)
and not any(output_queue_full)
)
node.logjam_score["polled"] += 1
logging.debug("LOGJAM SCORE: {logjam}".format(logjam=str(node.logjam)))
if logjam:
node.logjam_score["logjam"] += 1
logging.debug(
"LOGJAM {logjam} {name}".format(logjam=logjam, name=node.name)
)
self.log_info("Pipeline finished.")
self.log_info("Sending terminate signal to nodes.")
self.log_info("Messages that are being processed will complete.")
# HERE
if error:
self.log_info("Abnormal exit")
sys.exit(1)
else:
self.log_info("Normal exit.")
sys.exit(0)
class CounterOfThings(MetalNode):
class FunctionOfMessage(MetalNode):
class MockNode(MetalNode):
"""
This is only intended for doing unit tests, etc.
"""
class InsertData(MetalNode):
class RandomSample(MetalNode):
"""
Lets through only a random sample of incoming messages. Might be useful
for testing, or when only approximate results are necessary.
"""
class SubstituteRegex(MetalNode):
class CSVToDictionaryList(MetalNode):
class SequenceEmitter(MetalNode):
"""
Emits ``sequence`` ``max_sequences`` times, or forever if
``max_sequences`` is ``None``.
"""
class GetEnvironmentVariables(MetalNode):
"""
This node reads environment variables and stores them in the message.
The required keyword argument for this node is ``environment_variables``,
which is a list of -- you guessed it! -- environment variables. By
default, they will be read and stored in the outgoing message under
keys with the same names as the environment variables. E.g. ``FOO_VAR``
will be stored in the message ``{"FOO_BAR": whatever}``.
Optionally, you can provide a dictionary to the ``mappings`` keyword
argument, which maps environment variable names to new names. E.g.
if ``mappings = {"FOO_VAR": "bar_var"}``, then the value of ``FOO_VAR``
will be stored in the message ``{"bar_var": whatever}``.
If the environment variable is not defined, then its value will be
set to ``None``.
Args:
mappings (dict): An optional dictionary mapping environment variable
names to new names.
environment_variables (list): A list of environment variable names.
"""
class SimpleTransforms(MetalNode):
class Serializer(MetalNode):
"""
Takes an iterable thing as input, and successively yields its items.
"""
class AggregateValues(MetalNode):
"""
Does that.
"""
class Filter(MetalNode):
"""
Applies tests to each message and filters out messages that don't pass
Built-in tests:
key_exists
value_is_true
value_is_not_none
Example:
{'test': 'key_exists',
'key': mykey}
"""
def process_item(self):
if self.test in ["key_exists", "value_is_not_none", "value_is_true"]:
result = (
getattr(self, "_" + self.test)(self.__message__, self.test_keypath)
== self.value
)
else:
raise Exception("Unknown test: {test_name}".format(test_name=test))
if result:
logging.debug("Sending message through")
yield self.message
else:
logging.debug("Blocking message: " + str(self.__message__))
yield NothingToSeeHere()
class StreamMySQLTable(MetalNode):
# def get_schema(self):
# self.cursor.execute(self.table_schema_query)
# table_schema = self.cursor.fetchall()
# return table_schema
class PrinterOfThings(MetalNode):
class ConstantEmitter(MetalNode):
"""
Send a thing every n seconds
"""
class TimeWindowAccumulator(MetalNode):
"""
Every N seconds, put the latest M seconds data on the queue.
"""
def get_node_dict(node_config):
node_dict = {}
for node_config in node_config["nodes"]:
node_class = globals()[node_config["class"]]
node_name = node_config["name"]
node_dict[node_name] = {}
node_dict[node_name]["class"] = node_class
frozen_arguments = node_config.get("frozen_arguments", {})
node_dict[node_name]["frozen_arguments"] = frozen_arguments
node_obj = node_class(**frozen_arguments)
node_dict[node_name]["remapping"] = node_config.get("arg_mapping", {})
return node_dict
def kwarg_remapper(f, **kwarg_mapping):
reverse_mapping = {value: key for key, value in kwarg_mapping.items()}
logging.debug("kwarg_mapping:" + str(kwarg_mapping))
parameters = [i for i, _ in list(inspect.signature(f).parameters.items())]
for kwarg in parameters:
if kwarg not in kwarg_mapping:
reverse_mapping[kwarg] = kwarg
return remapped_function
def template_class(
class_name, parent_class, kwargs_remapping, frozen_arguments_mapping
):
kwargs_remapping = kwargs_remapping or {}
frozen_init = functools.partial(parent_class.__init__, **frozen_arguments_mapping)
if isinstance(parent_class, (str,)):
parent_class = globals()[parent_class]
cls = type(class_name, (parent_class,), {})
setattr(cls, "__init__", kwarg_remapper(frozen_init, **kwargs_remapping))
return cls
def class_factory(raw_config):
new_class = type(raw_config["name"], (DynamicClassMediator,), {})
new_class.node_dict = get_node_dict(raw_config)
new_class.class_name = raw_config["name"]
new_class.edge_list_dict = raw_config.get("edges", [])
new_class.raw_config = raw_config
for node_name, node_config in new_class.node_dict.items():
_class = node_config["class"]
cls = template_class(
node_name,
_class,
node_config["remapping"],
node_config["frozen_arguments"],
)
setattr(cls, "raw_config", raw_config)
node_config["cls_obj"] = cls
# Inject?
globals()[new_class.__name__] = new_class
return new_class
if __name__ == "__main__":
pass
| 35.185333
| 97
| 0.587194
|
d0ac701e358934362e43d3495ffc036dc298f1b4
| 184
|
py
|
Python
|
Curso_de_Python_ Curso_em_Video/PythonTeste/condicoesEx001.py
|
DanilooSilva/Cursos_de_Python
|
8f167a4c6e16f01601e23b6f107578aa1454472d
|
[
"MIT"
] | null | null | null |
Curso_de_Python_ Curso_em_Video/PythonTeste/condicoesEx001.py
|
DanilooSilva/Cursos_de_Python
|
8f167a4c6e16f01601e23b6f107578aa1454472d
|
[
"MIT"
] | null | null | null |
Curso_de_Python_ Curso_em_Video/PythonTeste/condicoesEx001.py
|
DanilooSilva/Cursos_de_Python
|
8f167a4c6e16f01601e23b6f107578aa1454472d
|
[
"MIT"
] | null | null | null |
import random
numeroPc = random.randint(1, 5)
numeroUsuario = int(input('Digite um nmero: '))
print('Parabns vc acertou!' if numeroPc == numeroUsuario else 'O Computador venceu')
| 23
| 85
| 0.73913
|
d0ae7da86f116336cc253188f9639ca495e26131
| 5,739
|
py
|
Python
|
src/get_device_id.py
|
icemanzz/SPS_NM_Scripts
|
f7b9e05e441d35715ee98bf1e2a73765a3a8d7c9
|
[
"Apache-2.0"
] | null | null | null |
src/get_device_id.py
|
icemanzz/SPS_NM_Scripts
|
f7b9e05e441d35715ee98bf1e2a73765a3a8d7c9
|
[
"Apache-2.0"
] | null | null | null |
src/get_device_id.py
|
icemanzz/SPS_NM_Scripts
|
f7b9e05e441d35715ee98bf1e2a73765a3a8d7c9
|
[
"Apache-2.0"
] | null | null | null |
import pyipmi
import pyipmi.interfaces
import os
import re
import datetime
import os.path
import time
import math
import numpy
import mmap
import array
import getopt
import sys
#Inmport path
sys.path.append('../src')
from aardvark_initial import *
#Inmport path
sys.path.append('../')
from os_parameters_define import *
from utility_function import *
from nm_ipmi_raw_to_str import *
from error_messages_define import *
from nm_functions import *
from config import *
## Define Delay Time check function
## Define Delay Time check function
## Define Input parameters lenth check
## _Main_ ##
# Initial aardvark
#ipmi = aardvark_ipmi_init(target_me_addr, target_me_bridge_channel)
# Check delay time parameter
sts = parameter_check(sys.argv)
if(sts == PASS):
print 'Check Delay Time parameter setting'
sts, delay_time = delay_check(str(sys.argv[1]))
print ( "delay time = %d " %(delay_time) )
sts, loop_number = loop_check(str(sys.argv[2]))
print ("loop_number = " , loop_number)
else:
sts = ERROR
if(sts == PASS):
print 'Start to Send Get Device ID..'
while loop_number :
sts, sps_version, platform, dcmi, nm, image = get_device_id_py(ipmi)
# Add delay time 5 secs to make sure me go back to stable mode
time.sleep(delay_time)
# Show Result
print('SPS Version = '+ sps_version)
print('platform = %d' %platform )
print('dcmi =%d' %dcmi)
print('nm = %d' %nm)
print('image = %d' %(image))
if( loop_number == 'loop' ):
loop_number = True
else:
loop_number = loop_number -1
if(sts == ERROR ):
loop_number = False
break
else:
print' Done! '
| 31.021622
| 194
| 0.638439
|
d0af8ccc38db80b7705a16b0b92de3ffc09909b1
| 321
|
py
|
Python
|
submissions/arc068/b.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | 1
|
2021-05-10T01:16:28.000Z
|
2021-05-10T01:16:28.000Z
|
submissions/arc068/b.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | 3
|
2021-05-11T06:14:15.000Z
|
2021-06-19T08:18:36.000Z
|
submissions/arc068/b.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | null | null | null |
import sys
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
sys.setrecursionlimit(10 ** 7)
from collections import Counter
n, *a = map(int, read().split())
counter = Counter(a).values()
ans = len(counter)
if (sum(counter) - ans) % 2 == 1:
ans -= 1
print(ans)
| 21.4
| 38
| 0.697819
|
d0af98534b90208cb7e4f06f1ab2ae7e3d283c93
| 9,730
|
py
|
Python
|
server/plato/test/test_domains.py
|
zhlooking/plato
|
9daf0dfd8b376603453eadf2d981c71d3adb2632
|
[
"MIT"
] | null | null | null |
server/plato/test/test_domains.py
|
zhlooking/plato
|
9daf0dfd8b376603453eadf2d981c71d3adb2632
|
[
"MIT"
] | null | null | null |
server/plato/test/test_domains.py
|
zhlooking/plato
|
9daf0dfd8b376603453eadf2d981c71d3adb2632
|
[
"MIT"
] | null | null | null |
import json
from plato import db
from plato.model.user import User
from plato.test.base import BaseTestCase
from plato.test.utils import add_user, add_domain
| 39.076305
| 84
| 0.481809
|
d0b20375cd75fe0eef53b990d01615a34d6461be
| 442
|
py
|
Python
|
create.py
|
chen-robert/hackcmu21
|
0728af0aa4f61b1969d0b73f7e8688fee90c1cb9
|
[
"MIT"
] | null | null | null |
create.py
|
chen-robert/hackcmu21
|
0728af0aa4f61b1969d0b73f7e8688fee90c1cb9
|
[
"MIT"
] | null | null | null |
create.py
|
chen-robert/hackcmu21
|
0728af0aa4f61b1969d0b73f7e8688fee90c1cb9
|
[
"MIT"
] | null | null | null |
import sqlite3
import datetime
conn = sqlite3.connect('database.db')
print("Opened database successfully")
# NOTE: ID is DEPRECATED
conn.execute('CREATE TABLE simulated (id TEXT, lat NUMERIC, lon NUMERIC, alt NUMERIC, time TIMESTAMP DEFAULT CURRENT_TIMESTAMP)')
conn.execute('CREATE TABLE locations (id TEXT, lat NUMERIC, lon NUMERIC, alt NUMERIC, time TIMESTAMP DEFAULT CURRENT_TIMESTAMP)')
print("Table created successfully")
conn.close()
| 40.181818
| 129
| 0.791855
|
d0b20f9be0257673e00f2b9f9aa968fab5295bbd
| 3,091
|
py
|
Python
|
tests/components/test_ts_component.py
|
T4rk1n/dazzler
|
69c49422dc19c910445ab265b1d3481041de8f43
|
[
"MIT"
] | 15
|
2019-12-19T11:57:30.000Z
|
2021-11-15T23:34:41.000Z
|
tests/components/test_ts_component.py
|
T4rk1n/dazzler
|
69c49422dc19c910445ab265b1d3481041de8f43
|
[
"MIT"
] | 196
|
2019-09-21T15:10:14.000Z
|
2022-03-31T11:07:48.000Z
|
tests/components/test_ts_component.py
|
T4rk1n/dazzler
|
69c49422dc19c910445ab265b1d3481041de8f43
|
[
"MIT"
] | 7
|
2019-10-30T19:38:15.000Z
|
2021-12-01T04:54:16.000Z
|
# A bit of duplication of the component system tests to ensure
# typescript components are transpiled properly to Python.
# Types are tested in test_mypy.
import json
import re
import pytest
from . import ts_components as tsc
def test_tsc_enum_docstring():
assert ":param enumeration: (Possible values: 'foo', 'bar')" \
in tsc.TypedComponent.__init__.__doc__
assert ":param defined_enum: (Possible values: 'foo', 'bar')" \
in tsc.TypedComponent.__init__.__doc__
| 31.865979
| 117
| 0.693303
|
d0b22aa7904b846e9743534781f5c71318798017
| 9,371
|
py
|
Python
|
python test generator/main.py
|
ElDonad/Tixel-Dungeon
|
ad622e570a06bf7722cdf15dcc33547ba14aada4
|
[
"MIT"
] | null | null | null |
python test generator/main.py
|
ElDonad/Tixel-Dungeon
|
ad622e570a06bf7722cdf15dcc33547ba14aada4
|
[
"MIT"
] | null | null | null |
python test generator/main.py
|
ElDonad/Tixel-Dungeon
|
ad622e570a06bf7722cdf15dcc33547ba14aada4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import random
import numpy as np
import colorama
from colorama import Fore, Back
import copy
colorama.init()
LEFT = 'lft'
RIGHT = 'rgt'
UP = 'up'
DOWN = 'dwn'
HORIZONTAL = 'horizontal'
VERTICAL = 'vertical'
print("beginning...")
level = []
for x in range(50):
level.append([])
for y in range(50):
level[x].append(".")
rooms = []
corridors = []
for a in range(0,1000):
generateLevel()
print("finished generation !")
print(len(rooms))
count = 1
for room in rooms:
for x in range(room.x, room.x + room.w):
level[x][room.y] = str(count)#""
level[x][room.y + room.h] = str(count)
for y in range(room.y, room.y + room.h):
level[room.x][y] = str(count)
level[room.x + room.w][y] = str(count)
count += 1
print("nombre de corridors : ", len(corridors))
for corridor in corridors:
print("nombre de corridors : ", len(corridor.straights))
for straight in corridor.straights:
print("origine", straight.x, ', ',straight.y,"oritentation : ",straight.orientation, "length : ", straight.length)
if straight.orientation == VERTICAL:
for y in range(straight.y,straight.y + straight.length, np.sign(straight.length)):
for x in range(straight.x -1,straight.x + 1 + 1,2):
#level[x][y] = ""
pass
level[straight.x][y]=Fore.RED + "." + Fore.WHITE
elif straight.orientation == HORIZONTAL:
print("horizontal")
for x in range(straight.x,straight.x + straight.length, np.sign(straight.length)):
for y in range(straight.y -1,straight.y + 1 + 1,2):
#level[x][y] = ""
pass
level[x][straight.y]= Fore.RED + "." + Fore.WHITE
for line in level:
lineC = " "
print(lineC.join(line))
rooms = []
corridors = []
level = []
for x in range(50):
level.append([])
for y in range(50):
level[x].append(".")
print("loop position : ",a)
input()
| 33.230496
| 146
| 0.555224
|
d0b36a7b39c48086c567c97c9b01212d0a865743
| 255
|
py
|
Python
|
src/py/vmw/ui/vmw_launcher.py
|
jp-uom/variant_matrix_wizard
|
c5d7ac509be6d6a2020ab38f49c28df090a03c1d
|
[
"MIT"
] | 1
|
2017-12-27T11:56:33.000Z
|
2017-12-27T11:56:33.000Z
|
src/py/vmw/ui/vmw_launcher.py
|
jp-uom/variant_matrix_wizard
|
c5d7ac509be6d6a2020ab38f49c28df090a03c1d
|
[
"MIT"
] | null | null | null |
src/py/vmw/ui/vmw_launcher.py
|
jp-uom/variant_matrix_wizard
|
c5d7ac509be6d6a2020ab38f49c28df090a03c1d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import wx
import vmwizard as vmw
if __name__ == '__main__':
app = wx.App(False)
frame = wx.Frame(None, wx.ID_ANY, "Variant Matrix")
wiz = vmw.Wizard(frame)
frame.Show(True)
frame.Centre()
app.MainLoop()
| 15.9375
| 55
| 0.639216
|
d0b370195e62577b0993491b41073f0838231b20
| 2,308
|
py
|
Python
|
Modules/Scripted/DMRIInstall/DMRIInstall.py
|
TheInterventionCentre/NorMIT-Plan-App
|
765ed9a5dccc1cc134b65ccabe93fc132baeb2ea
|
[
"MIT"
] | null | null | null |
Modules/Scripted/DMRIInstall/DMRIInstall.py
|
TheInterventionCentre/NorMIT-Plan-App
|
765ed9a5dccc1cc134b65ccabe93fc132baeb2ea
|
[
"MIT"
] | null | null | null |
Modules/Scripted/DMRIInstall/DMRIInstall.py
|
TheInterventionCentre/NorMIT-Plan-App
|
765ed9a5dccc1cc134b65ccabe93fc132baeb2ea
|
[
"MIT"
] | null | null | null |
import os
import string
import textwrap
import unittest
import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
import logging
#
# DMRIInstall
#
| 29.589744
| 96
| 0.731369
|
d0b43ab4f6dd3ba972b2dc8c30789b6cc19eaa03
| 24,698
|
py
|
Python
|
opcalendar/models.py
|
buahaha/allianceauth-opcalendar
|
44e50e06eac4b5c0e6b809e5ca2638af5e49145f
|
[
"MIT"
] | null | null | null |
opcalendar/models.py
|
buahaha/allianceauth-opcalendar
|
44e50e06eac4b5c0e6b809e5ca2638af5e49145f
|
[
"MIT"
] | null | null | null |
opcalendar/models.py
|
buahaha/allianceauth-opcalendar
|
44e50e06eac4b5c0e6b809e5ca2638af5e49145f
|
[
"MIT"
] | null | null | null |
import requests
import json
from typing import Tuple
from datetime import timedelta, datetime
from django.db import models
from django.urls import reverse
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.utils.html import strip_tags
from django.contrib.auth.models import Group
from esi.errors import TokenExpiredError, TokenInvalidError
from esi.models import Token
from allianceauth.authentication.models import CharacterOwnership
from allianceauth.eveonline.models import EveCharacter, EveCorporationInfo
from allianceauth.services.hooks import get_extension_logger
from allianceauth.authentication.models import State
from .providers import esi
from .decorators import fetch_token_for_owner
logger = get_extension_logger(__name__)
class Owner(models.Model):
"""A corporation that holds the calendars"""
ERROR_NONE = 0
ERROR_TOKEN_INVALID = 1
ERROR_TOKEN_EXPIRED = 2
ERROR_INSUFFICIENT_PERMISSIONS = 3
ERROR_NO_CHARACTER = 4
ERROR_ESI_UNAVAILABLE = 5
ERROR_OPERATION_MODE_MISMATCH = 6
ERROR_UNKNOWN = 99
ERRORS_LIST = [
(ERROR_NONE, "No error"),
(ERROR_TOKEN_INVALID, "Invalid token"),
(ERROR_TOKEN_EXPIRED, "Expired token"),
(ERROR_INSUFFICIENT_PERMISSIONS, "Insufficient permissions"),
(ERROR_NO_CHARACTER, "No character set for fetching data from ESI"),
(ERROR_ESI_UNAVAILABLE, "ESI API is currently unavailable"),
(
ERROR_OPERATION_MODE_MISMATCH,
"Operaton mode does not match with current setting",
),
(ERROR_UNKNOWN, "Unknown error"),
]
corporation = models.OneToOneField(
EveCorporationInfo,
default=None,
null=True,
blank=True,
on_delete=models.CASCADE,
help_text="Corporation owning the calendar",
related_name="+",
)
character = models.ForeignKey(
CharacterOwnership,
on_delete=models.SET_DEFAULT,
default=None,
null=True,
blank=True,
help_text="Character used for syncing the calendar",
related_name="+",
)
event_visibility = models.ForeignKey(
EventVisibility,
on_delete=models.CASCADE,
null=True,
blank=True,
help_text=_("Visibility filter that dictates who is able to see this event"),
)
operation_type = models.ForeignKey(
EventCategory,
null=True,
blank=True,
on_delete=models.CASCADE,
help_text=_(
"Event category that will be assigned for all of the events from this owner."
),
)
is_active = models.BooleanField(
default=True,
help_text=("whether this owner is currently included in the sync process"),
)
def token(self, scopes=None) -> Tuple[Token, int]:
"""returns a valid Token for the owner"""
token = None
error = None
# abort if character is not configured
if self.character is None:
logger.error("%s: No character configured to sync", self)
error = self.ERROR_NO_CHARACTER
# abort if character does not have sufficient permissions
elif self.corporation and not self.character.user.has_perm(
"opcalendar.add_ingame_calendar_owner"
):
logger.error(
"%s: This character does not have sufficient permission to sync corporation calendars",
self,
)
error = self.ERROR_INSUFFICIENT_PERMISSIONS
# abort if character does not have sufficient permissions
elif not self.character.user.has_perm("opcalendar.add_ingame_calendar_owner"):
logger.error(
"%s: This character does not have sufficient permission to sync personal calendars",
self,
)
error = self.ERROR_INSUFFICIENT_PERMISSIONS
else:
try:
# get token
token = (
Token.objects.filter(
user=self.character.user,
character_id=self.character.character.character_id,
)
.require_scopes(scopes)
.require_valid()
.first()
)
except TokenInvalidError:
logger.error("%s: Invalid token for fetching calendars", self)
error = self.ERROR_TOKEN_INVALID
except TokenExpiredError:
logger.error("%s: Token expired for fetching calendars", self)
error = self.ERROR_TOKEN_EXPIRED
else:
if not token:
logger.error("%s: No token found with sufficient scopes", self)
error = self.ERROR_TOKEN_INVALID
return token, error
class IngameEvents(models.Model):
event_id = models.PositiveBigIntegerField(
primary_key=True, help_text="The EVE ID of the event"
)
owner = models.ForeignKey(
Owner,
on_delete=models.CASCADE,
help_text="Event holder",
)
event_start_date = models.DateTimeField()
event_end_date = models.DateTimeField(blank=True, null=True)
title = models.CharField(max_length=128)
text = models.TextField()
event_owner_id = models.IntegerField(null=True)
owner_type = models.CharField(max_length=128)
owner_name = models.CharField(max_length=128)
host = models.ForeignKey(
EventHost,
on_delete=models.CASCADE,
default=1,
help_text=_("Host entity for the event"),
)
importance = models.CharField(max_length=128)
duration = models.CharField(max_length=128)
| 33.285714
| 309
| 0.604907
|
d0b49d08acf472e125d49a19fc95585b9f897f91
| 5,603
|
py
|
Python
|
scripts/mot_neural_solver/pl_module/pair_nuclei.py
|
taimurhassan/crc
|
930be78505dd17655542a38b0fc1ded9cf19a9a2
|
[
"MIT"
] | 1
|
2022-03-16T10:40:23.000Z
|
2022-03-16T10:40:23.000Z
|
scripts/mot_neural_solver/pl_module/pair_nuclei.py
|
taimurhassan/crc
|
930be78505dd17655542a38b0fc1ded9cf19a9a2
|
[
"MIT"
] | null | null | null |
scripts/mot_neural_solver/pl_module/pair_nuclei.py
|
taimurhassan/crc
|
930be78505dd17655542a38b0fc1ded9cf19a9a2
|
[
"MIT"
] | null | null | null |
import sacred
from sacred import Experiment
import os.path as osp
import pandas as pd
import scipy.io as sio
import numpy as np
from sacred import SETTINGS
SETTINGS.CONFIG.READ_ONLY_CONFIG=False
| 29.489474
| 124
| 0.493129
|
d0b6dbf00473c06ab74f4d07421e49558388e75e
| 90
|
py
|
Python
|
test.py
|
Jiyao17/fl-grouping
|
37ada217cdf9121c9d7119f311228e87ba4a8e83
|
[
"MIT"
] | null | null | null |
test.py
|
Jiyao17/fl-grouping
|
37ada217cdf9121c9d7119f311228e87ba4a8e83
|
[
"MIT"
] | null | null | null |
test.py
|
Jiyao17/fl-grouping
|
37ada217cdf9121c9d7119f311228e87ba4a8e83
|
[
"MIT"
] | 1
|
2022-01-29T22:31:43.000Z
|
2022-01-29T22:31:43.000Z
|
import numpy as np
arrs = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
print(arrs[[0,2]])
| 18
| 50
| 0.511111
|
d0b8c5cb52a0f84127322d8ea824dbfd7a2fbbb9
| 1,087
|
py
|
Python
|
onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py
|
szha/onnxmltools
|
b04d05bda625cbc006955ce0a220277739a95825
|
[
"MIT"
] | 3
|
2019-02-27T21:03:43.000Z
|
2020-04-07T22:16:50.000Z
|
onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py
|
szha/onnxmltools
|
b04d05bda625cbc006955ce0a220277739a95825
|
[
"MIT"
] | null | null | null |
onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py
|
szha/onnxmltools
|
b04d05bda625cbc006955ce0a220277739a95825
|
[
"MIT"
] | 2
|
2020-10-01T09:24:55.000Z
|
2021-04-17T13:57:31.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from ....proto import onnx_proto
from ...common._registration import register_converter
register_converter('arrayFeatureExtractor', convert_array_feature_extractor)
| 41.807692
| 117
| 0.678933
|
d0b987edf568de32ee6c05d30261bbe4ded56c15
| 2,682
|
py
|
Python
|
constants.py
|
xuefei1/Graph-Seq2Attn
|
336c69877e483c95d9996ee205d2a005342f08af
|
[
"MIT"
] | 1
|
2020-01-06T07:49:46.000Z
|
2020-01-06T07:49:46.000Z
|
constants.py
|
xuefei1/Graph-Seq2Attn
|
336c69877e483c95d9996ee205d2a005342f08af
|
[
"MIT"
] | 1
|
2020-04-16T10:15:27.000Z
|
2020-04-16T16:41:42.000Z
|
constants.py
|
xuefei1/Graph-Seq2Attn
|
336c69877e483c95d9996ee205d2a005342f08af
|
[
"MIT"
] | null | null | null |
# one identifier for one types of dict
# for instance, DK_SOME_KEY means this is a key for a data_dict
DK_BATCH_SIZE = "batch_size"
DK_PAD = "pad" # DK: general purpose data_dict
DK_SRC_WID = "src_wid" # src = msg + ctx
DK_SRC_WID_MASK = "src_wid_mask"
DK_SRC_SEQ_MASK = "src_seq_mask"
DK_MSG_WID = "msg_wid" # msg is usually shorter than ctx
DK_MSG_WID_MASK = "msg_wid_mask"
DK_CTX_WID = "ctx_wid" # msg is usually shorter than ctx
DK_CTX_WID_MASK = "ctx_wid_mask"
DK_SRC_POS = "src_pos"
DK_SRC_NER = "src_ner"
DK_SRC_SEG_LISTS = "src_seg_lists"
DK_TGT_GEN_WID = "tgt_gen_wid"
DK_TGT_CPY_WID = "tgt_cpy_wid"
DK_TGT_CPY_GATE = "tgt_cpy_gate"
DK_TGT_N_TOKEN = "tgt_n_token"
DK_TGT_SEG_LISTS = "tgt_seg_lists"
DK_SRC_IOB = "src_iob" # iob: SQuAD QG specific
DK_DOC_WID = "doc_wid"
DK_DOC_SEG_LISTS = "doc_seg_lists"
DK_DOC_WID_MASK = "doc_wid_mask"
DK_DOC_SENTS_WID = "doc_sents_wid"
DK_DOC_SENTS_WID_MASK = "doc_sents_wid_mask"
DK_TITLE_WID = "title_wid"
DK_TQ_SEG_LISTS = "title_seg_lists"
DK_TITLE_WID_MASK = "title_wid_mask"
DK_CONCEPT_SEG_LISTS = "concept_seg_lists"
DK_TGT_CONCEPT_GEN_WID = "tgt_concept_gen_wid" # concept gen specific
DK_TGT_CONCEPT_CPY_WID = "tgt_concept_cpy_wid"
DK_TGT_CONCEPT_CPY_GATE = "tgt_concept_cpy_gate"
DK_TGT_CONCEPT_N_TOKEN = "tgt_concept_n_token"
DK_TGT_TITLE_GEN_WID = "tgt_title_gen_wid" # title gen specific
DK_TGT_TITLE_CPY_WID = "tgt_title_cpy_wid"
DK_TGT_TITLE_CPY_GATE = "tgt_title_cpy_gate"
DK_TGT_TITLE_N_TOKEN = "tgt_title_n_token"
DK_SENT_DEPEND_GRAPH_LIST = "sent_depend_graph_list"
DK_DOC_KW_DIST_GRAPH = "doc_kw_dist_graph"
DK_DOC_SENT_MEAN_TFIDF_SIM_GRAPH = "doc_sent_mean_tfidf_sim_graph"
DK_DOC_SENT_PAIR_TFIDF_SIM_GRAPH = "doc_sent_pair_tfidf_sim_graph"
DK_DOC_SENT_WORD_OVERLAP_GRAPH = "doc_sent_word_overlap_graph"
DK_G2S_WID_GRAPH = "graph2seq_wid_graph"
SQGK_SRC_W_LIST = "src_word_list" # SQGK: SQuAD data reader keys
SQGK_SRC_IOB_LIST = "src_iob_list"
SQGK_SRC_POS_LIST = "src_pos_list"
SQGK_SRC_NER_LIST = "src_ner_list"
SQGK_TGT_W_LIST = "tgt_word_list"
SQGK_DATA_LIST = "data_list"
SQGK_IOB_T2I = "iob_t2i"
SQGK_POS_T2I = "pos_t2i"
SQGK_NER_T2I = "ner_t2i"
CHKPT_COMPLETED_EPOCHS = "completed_epochs" # CHKPT: checkpoint dict keys
CHKPT_MODEL = "model"
CHKPT_OPTIMIZER = "optimizer"
CHKPT_METADATA = "metadata"
CHKPT_PARAMS = "params"
CHKPT_BEST_EVAL_RESULT = "best_eval_result"
CHKPT_BEST_EVAL_EPOCH = "best_eval_epoch"
CHKPT_PAST_EVAL_RESULTS = "past_eval_results"
GK_EDGE_WEIGHT = "edge_weight" # GK: graph keys
GK_EDGE_WORD_PAIR = "edge_word_pair"
GK_EDGE_GV_IDX_PAIR = "edge_v_idx_pair"
GK_EDGE_TYPE = "edge_type"
GK_EDGE_DIR = "edge_directed"
GK_EDGE_UNDIR = "edge_undirected"
GK_SENT_DEP = "sentence_depends"
| 37.774648
| 73
| 0.818792
|
d0ba6c15a6c14b45dd62608fe761ce634451a9c5
| 794
|
py
|
Python
|
register/views/list.py
|
Bartlett-Christopher/coaching_manual
|
43e6dd582f74afa3e0c89203cd01380638f8ed7f
|
[
"MIT"
] | null | null | null |
register/views/list.py
|
Bartlett-Christopher/coaching_manual
|
43e6dd582f74afa3e0c89203cd01380638f8ed7f
|
[
"MIT"
] | 6
|
2020-05-18T05:38:26.000Z
|
2021-09-22T19:02:10.000Z
|
register/views/list.py
|
Bartlett-Christopher/coaching_manual
|
43e6dd582f74afa3e0c89203cd01380638f8ed7f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
.. module:: register.views.list
:synopsis: View to list all registered users
.. moduleauthor:: Chris Bartlett
"""
from django.urls import reverse
from django.views.generic import TemplateView
from register.api.utils.make_request import make_request
| 25.612903
| 61
| 0.667506
|
d0bd703517c8b3f6a8e778d87ff497a305805d45
| 12,308
|
py
|
Python
|
tests/evaluator_test.py
|
NightShade256/prymate
|
deeb81ab685854599d803719971e85ead6699a90
|
[
"MIT"
] | 6
|
2020-06-22T14:54:55.000Z
|
2021-12-13T12:33:21.000Z
|
tests/evaluator_test.py
|
NightShade256/prymate
|
deeb81ab685854599d803719971e85ead6699a90
|
[
"MIT"
] | null | null | null |
tests/evaluator_test.py
|
NightShade256/prymate
|
deeb81ab685854599d803719971e85ead6699a90
|
[
"MIT"
] | 1
|
2020-10-11T18:31:57.000Z
|
2020-10-11T18:31:57.000Z
|
import unittest
from prymate import evaluator, objects
from prymate.lexer import Lexer
from prymate.parser import Parser
if __name__ == "__main__":
unittest.main()
| 32.474934
| 87
| 0.456451
|
d0be74bdfe9cb84b8767afe5f63676a2412c89f4
| 1,074
|
py
|
Python
|
chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2018-03-10T13:08:49.000Z
|
2018-03-10T13:08:49.000Z
|
chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py
|
codenote/chromium-test
|
0637af0080f7e80bf7d20b29ce94c5edc817f390
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2020-11-04T07:25:45.000Z
|
2020-11-04T07:25:45.000Z
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# A simple native client in python.
# All this client does is echo the text it receives back at the extension.
import sys
import struct
if __name__ == '__main__':
Main()
| 25.571429
| 74
| 0.633147
|
d0bf49499bee89967eb7d175fcbf84d7e2af6904
| 1,140
|
py
|
Python
|
covid_19/users_app/models.py
|
MikePolyakov/djanjo_project
|
4d80cac9142bacdaa91b5f7be0c7377a365c3db9
|
[
"MIT"
] | null | null | null |
covid_19/users_app/models.py
|
MikePolyakov/djanjo_project
|
4d80cac9142bacdaa91b5f7be0c7377a365c3db9
|
[
"MIT"
] | 6
|
2021-06-04T23:11:44.000Z
|
2022-03-12T00:29:55.000Z
|
covid_19/users_app/models.py
|
id2k1149/django_project
|
4d80cac9142bacdaa91b5f7be0c7377a365c3db9
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.db.models.signals import post_save
from django.dispatch import receiver
# Create your models here.
from django.dispatch import receiver
# ( , )
# @receiver(post_save, sender=AppUser)
# def create_profile(sender, instance, **kwargs):
# print(' ')
# if not Profile.objects.filter(user=instance).exists():
# Profile.objects.create(user=instance)
| 30.810811
| 79
| 0.728947
|
d0bf9ddf2a1b5e4b50f545954e0579d25793cb8e
| 1,748
|
py
|
Python
|
Wrappers/Python/setup.py
|
lauramurgatroyd/CILViewer
|
3aafa4693498a55ffd270c55118399dd807dee5f
|
[
"Apache-2.0"
] | null | null | null |
Wrappers/Python/setup.py
|
lauramurgatroyd/CILViewer
|
3aafa4693498a55ffd270c55118399dd807dee5f
|
[
"Apache-2.0"
] | null | null | null |
Wrappers/Python/setup.py
|
lauramurgatroyd/CILViewer
|
3aafa4693498a55ffd270c55118399dd807dee5f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2017 Edoardo Pasca
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Wed Jun 7 09:57:13 2017
@author: ofn77899
"""
from distutils.core import setup
#from setuptools import setup, find_packages
import os
import sys
cil_version = "20.07.4"
setup(
name="ccpi-viewer",
version=cil_version,
packages=['ccpi','ccpi.viewer', 'ccpi.viewer.utils'],
install_requires=['numpy','vtk'],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
#install_requires=['docutils>=0.3'],
# package_data={
# # If any package contains *.txt or *.rst files, include them:
# '': ['*.txt', '*.rst'],
# # And include any *.msg files found in the 'hello' package, too:
# 'hello': ['*.msg'],
# },
zip_safe = False,
# metadata for upload to PyPI
author="Edoardo Pasca",
author_email="edo.paskino@gmail.com",
description='CCPi Core Imaging Library - VTK Viewer Module',
license="Apache v2.0",
keywords="3D data viewer",
url="http://www.ccpi.ac.uk", # project home page, if any
# could also include long_description, download_url, classifiers, etc.
)
| 31.214286
| 76
| 0.677346
|
d0c0175702c2bf4073b22292f0de9bec50aa18ec
| 4,658
|
py
|
Python
|
src/das/model_analyzer/analyzer_args.py
|
saifullah3396/doc_shap
|
0b65912bc9abc8721b5a8aec008a438fa13e8cbf
|
[
"Apache-2.0"
] | null | null | null |
src/das/model_analyzer/analyzer_args.py
|
saifullah3396/doc_shap
|
0b65912bc9abc8721b5a8aec008a438fa13e8cbf
|
[
"Apache-2.0"
] | null | null | null |
src/das/model_analyzer/analyzer_args.py
|
saifullah3396/doc_shap
|
0b65912bc9abc8721b5a8aec008a438fa13e8cbf
|
[
"Apache-2.0"
] | null | null | null |
"""
Defines the dataclass for holding training related arguments.
"""
import json
import math
import sys
from dataclasses import asdict, dataclass, field
from enum import Enum
from typing import Any, Dict, List, Optional, Union
from das.models.model_args import ModelArguments
from das.utils.basic_utils import create_logger
logger = create_logger(__name__)
SUPPORTED_MODEL_ARGUMENTS = {
"generate_metrics": GenerateMetricsTaskArguments,
"generate_robustness_metrics": GenerateRobustnessMetricsTaskArguments,
"generate_shap_values": GenerateShapValuesTaskArguments,
"generate_shap_visualizations": GenerateShapVisualizationsTaskArguments,
"feature_perturbation": FeaturePerturbationTaskArguments,
"similar_images_clustering": SimilarImagesClusteringTaskArguments,
"feature_perturbation_analysis": FeaturePerturbationAnalysisTaskArguments,
}
| 30.444444
| 86
| 0.738729
|
d0c3051d812d65b6baa90af1922f0a2918135e6d
| 128
|
py
|
Python
|
django_cenvars/tools/sanitize.py
|
martinphellwig/django-cenvars
|
2b7ae6e719fa6ae7ffb8f0cedad615114064dab1
|
[
"BSD-2-Clause"
] | null | null | null |
django_cenvars/tools/sanitize.py
|
martinphellwig/django-cenvars
|
2b7ae6e719fa6ae7ffb8f0cedad615114064dab1
|
[
"BSD-2-Clause"
] | null | null | null |
django_cenvars/tools/sanitize.py
|
martinphellwig/django-cenvars
|
2b7ae6e719fa6ae7ffb8f0cedad615114064dab1
|
[
"BSD-2-Clause"
] | null | null | null |
"""
Perform sanitization check prior of releasing the app as ready.
"""
def main():
"Perform sanitization checks"
pass
| 16
| 63
| 0.695313
|
d0c48f41277d0c455a9e37dabfa1c49d07148ba0
| 1,982
|
py
|
Python
|
src/server/ClientHandler.py
|
ENDERZOMBI102/chatapp
|
3f54e72a8d3b10457cf88ec5f87b2984cc84a51f
|
[
"MIT"
] | 1
|
2021-06-20T05:47:53.000Z
|
2021-06-20T05:47:53.000Z
|
src/server/ClientHandler.py
|
ENDERZOMBI102/chatapp
|
3f54e72a8d3b10457cf88ec5f87b2984cc84a51f
|
[
"MIT"
] | null | null | null |
src/server/ClientHandler.py
|
ENDERZOMBI102/chatapp
|
3f54e72a8d3b10457cf88ec5f87b2984cc84a51f
|
[
"MIT"
] | null | null | null |
import asyncio
import traceback
from asyncio import StreamWriter, StreamReader, Task
from .BaseClientHandler import BaseClientHandler
from data import Message
| 30.492308
| 95
| 0.700807
|
d0c5b0e690a24fec09fd97682f7f29681f7e57f6
| 8,658
|
py
|
Python
|
strangeflix/room/consumers.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | 6
|
2020-11-02T16:40:56.000Z
|
2020-11-07T06:59:00.000Z
|
strangeflix/room/consumers.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | null | null | null |
strangeflix/room/consumers.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | 2
|
2020-11-03T05:20:25.000Z
|
2020-11-03T05:38:47.000Z
|
# chat/consumers.py
import json
from channels.generic.websocket import AsyncWebsocketConsumer
from .models import RoomControl
from channels.db import database_sync_to_async
| 36.378151
| 102
| 0.473897
|
d0c819007b9eb94c341aa70c5a8a5172d3857e95
| 7,342
|
py
|
Python
|
src/cnnclustering/hooks.py
|
janjoswig/CNN
|
06ab0e07da46141cca941e99ac1a11ddc7ce233d
|
[
"MIT"
] | 4
|
2020-06-16T13:33:57.000Z
|
2021-01-05T18:19:57.000Z
|
src/cnnclustering/hooks.py
|
janjoswig/CNN
|
06ab0e07da46141cca941e99ac1a11ddc7ce233d
|
[
"MIT"
] | 12
|
2019-10-22T09:15:09.000Z
|
2020-07-02T09:42:44.000Z
|
src/cnnclustering/hooks.py
|
janjoswig/CommonNNClustering
|
06ab0e07da46141cca941e99ac1a11ddc7ce233d
|
[
"MIT"
] | null | null | null |
import numpy as np
from cnnclustering._primitive_types import P_AINDEX, P_AVALUE
from cnnclustering import _types, _fit
COMPONENT_ALT_KW_MAP = {
"input": "input_data",
"data": "input_data",
"n": "neighbours",
"na": "neighbours",
"nb": "neighbour_neighbours",
"getter": "neighbours_getter",
"ogetter": "neighbours_getter_other",
"ngetter": "neighbours_getter",
"ongetter": "neighbours_getter_other",
"dgetter": "distance_getter",
"checker": "similarity_checker",
"q": "queue",
}
COMPONENT_KW_TYPE_ALIAS_MAP = {
"neighbour_neighbours": "neighbours",
"neighbour_getter_other": "neighbours_getter",
}
COMPONENT_NAME_TYPE_MAP = {
"input_data": {
"components_mview": _types.InputDataExtComponentsMemoryview,
"neighbourhoods_mview": _types.InputDataExtNeighbourhoodsMemoryview
},
"neighbours_getter": {
"brute_force": _types.NeighboursGetterExtBruteForce,
"lookup": _types.NeighboursGetterExtLookup,
},
"distance_getter": {
"metric": _types.DistanceGetterExtMetric,
"lookup": _types.DistanceGetterExtLookup,
},
"neighbours": {
"vector": _types.NeighboursExtVector,
"uset": _types.NeighboursExtCPPUnorderedSet,
"vuset": _types.NeighboursExtVectorCPPUnorderedSet,
},
"metric": {
"dummy": _types.MetricExtDummy,
"precomputed": _types.MetricExtPrecomputed,
"euclidean": _types.MetricExtEuclidean,
"euclidean_r": _types.MetricExtEuclideanReduced,
"euclidean_periodic_r": _types.MetricExtEuclideanPeriodicReduced,
"euclidean_reduced": _types.MetricExtEuclideanReduced,
"euclidean_periodic_reduced": _types.MetricExtEuclideanPeriodicReduced,
},
"similarity_checker": {
"contains": _types.SimilarityCheckerExtContains,
"switch": _types.SimilarityCheckerExtSwitchContains,
"screen": _types.SimilarityCheckerExtScreensorted,
},
"queue": {
"fifo": _types.QueueExtFIFOQueue
},
"fitter": {
"bfs": _fit.FitterExtBFS,
"bfs_debug": _fit.FitterExtBFSDebug
}
}
def prepare_pass(data):
"""Dummy preparation hook
Use if no preparation of input data is desired.
Args:
data: Input data that should be prepared.
Returns:
(data,), {}
"""
return (data,), {}
def prepare_points_from_parts(data):
r"""Prepare input data points
Use when point components are passed as sequence of parts, e.g. as
>>> input_data, meta = prepare_points_parts([[[0, 0],
... [1, 1]],
... [[2, 2],
... [3,3]]])
>>> input_data
array([[0, 0],
[1, 1],
[2, 2],
[3, 3]])
>>> meta
{"edges": [2, 2]}
Recognised data formats are:
* Sequence of length *d*:
interpreted as 1 point with *d* components.
* 2D Sequence (sequence of sequences all of same length) with
length *n* (rows) and width *d* (columns):
interpreted as *n* points with *d* components.
* Sequence of 2D sequences all of same width:
interpreted as parts (groups) of points.
The returned input data format is compatible with:
* `cnnclustering._types.InputDataExtPointsMemoryview`
Args:
data: Input data that should be prepared.
Returns:
* Formatted input data (NumPy array of shape
:math:`\sum n_\mathrm{part}, d`)
* Dictionary of meta-information
Notes:
Does not catch deeper nested formats.
"""
try:
d1 = len(data)
except TypeError as error:
raise error
finished = False
if d1 == 0:
# Empty sequence
data = [np.array([[]])]
finished = True
if not finished:
try:
d2 = [len(x) for x in data]
all_d2_equal = (len(set(d2)) == 1)
except TypeError:
# 1D Sequence
data = [np.array([data])]
finished = True
if not finished:
try:
d3 = [len(y) for x in data for y in x]
all_d3_equal = (len(set(d3)) == 1)
except TypeError:
if not all_d2_equal:
raise ValueError(
"Dimension mismatch"
)
# 2D Sequence of sequences of same length
data = [np.asarray(data)]
finished = True
if not finished:
if not all_d3_equal:
raise ValueError(
"Dimension mismatch"
)
# Sequence of 2D sequences of same width
data = [np.asarray(x) for x in data]
finished = True
meta = {}
meta["edges"] = [x.shape[0] for x in data]
data_args = (np.asarray(np.vstack(data), order="C", dtype=P_AVALUE),)
data_kwargs = {"meta": meta}
return data_args, data_kwargs
def prepare_neighbourhoods(data):
"""Prepare neighbourhood information by padding
Args:
data: Expects a sequence of sequences with neighbour indices.
Returns:
Data as a 2D NumPy array of shape (#points, max. number of neighbours)
and a 1D array with the actual number of neighbours for each point (data
args). Also returns meta information (data kwargs).
"""
n_neighbours = [len(s) for s in data]
pad_to = max(n_neighbours)
data = [
np.pad(a, (0, pad_to - n_neighbours[i]), mode="constant", constant_values=0)
for i, a in enumerate(data)
]
meta = {}
data_args = (
np.asarray(data, order="C", dtype=P_AINDEX),
np.asarray(n_neighbours, dtype=P_AINDEX)
)
data_kwargs = {"meta": meta}
return data_args, data_kwargs
| 28.679688
| 84
| 0.566194
|
d0c895b700d9298c6544f69260721fb2fce2376e
| 15,620
|
py
|
Python
|
cybergis/jobs.py
|
cybergis/jupyterlib
|
b39cf9c525b52fc9f67a388a751126df00b498f2
|
[
"NCSA"
] | 5
|
2017-11-08T15:32:09.000Z
|
2019-12-20T03:05:34.000Z
|
cybergis/jobs.py
|
cybergis/jupyterlib
|
b39cf9c525b52fc9f67a388a751126df00b498f2
|
[
"NCSA"
] | null | null | null |
cybergis/jobs.py
|
cybergis/jupyterlib
|
b39cf9c525b52fc9f67a388a751126df00b498f2
|
[
"NCSA"
] | 1
|
2019-12-20T02:46:56.000Z
|
2019-12-20T02:46:56.000Z
|
#!/usr/bin/env python
from __future__ import print_function
from ipywidgets import *
from IPython.display import display
from getpass import getpass
import glob
import os
import stat
import paramiko
from string import Template
from os.path import expanduser
from pkg_resources import resource_string
from IPython.core.magic import (register_line_magic, register_cell_magic,register_line_cell_magic)
import hashlib
from itertools import izip,cycle
from IPython.display import IFrame
USERNAME = os.environ['USER']
CONF_DIR='.rg_conf'
CONF_MOD=int('700', 8) # exclusive access
CONF_FILE='%s/%s'%(CONF_DIR, USERNAME)
#ROGER_PRJ='/projects/class/jhub/users'
#JUPYTER_HOME='/mnt/jhub/users'
ROGER_PRJ='/projects/jupyter'
JUPYTER_HOME='/home'
#@register_line_magic
#def roger(line):
# Roger()
#del roger
| 37.368421
| 151
| 0.541613
|
d0c95eb3b0bfb04075898983cf10d20a892318cb
| 5,419
|
py
|
Python
|
compile.py
|
Wizard-collab/wizard_2
|
a2cb23362e178a0205f6dd0b9b4328c329b5b142
|
[
"MIT"
] | 1
|
2021-10-13T15:07:32.000Z
|
2021-10-13T15:07:32.000Z
|
compile.py
|
Wizard-collab/wizard_2
|
a2cb23362e178a0205f6dd0b9b4328c329b5b142
|
[
"MIT"
] | null | null | null |
compile.py
|
Wizard-collab/wizard_2
|
a2cb23362e178a0205f6dd0b9b4328c329b5b142
|
[
"MIT"
] | null | null | null |
import subprocess
import os
import shutil
import time
import yaml
import sys
import logging
logger = logging.getLogger(__name__)
if __name__ == '__main__':
compile()
| 29.291892
| 92
| 0.677985
|
d0cbf9627e932b48a14476699153120bd9f96cba
| 990
|
py
|
Python
|
ibms_project/ibms/migrations/0011_auto_20190814_1110.py
|
parksandwildlife/ibms
|
caea0cb15deed1744ee73a6a44c264650391f71d
|
[
"Apache-2.0"
] | 2
|
2019-09-07T20:39:29.000Z
|
2021-09-16T12:02:16.000Z
|
ibms_project/ibms/migrations/0011_auto_20190814_1110.py
|
ropable/ibms
|
8cb2c24ad0202e961c4cf7e3c79385f5716b8c63
|
[
"Apache-2.0"
] | 11
|
2020-06-18T06:53:01.000Z
|
2022-02-11T01:55:42.000Z
|
ibms_project/ibms/migrations/0011_auto_20190814_1110.py
|
ropable/ibms
|
8cb2c24ad0202e961c4cf7e3c79385f5716b8c63
|
[
"Apache-2.0"
] | 5
|
2016-01-18T04:36:48.000Z
|
2017-09-07T06:38:28.000Z
|
# Generated by Django 2.1.11 on 2019-08-14 03:10
from django.db import migrations
| 28.285714
| 60
| 0.579798
|
d0d20743fdd39b355e497598543bd007290f251f
| 840
|
py
|
Python
|
src/discolight/loaders/annotation/widthheightcsv.py
|
denzel-datature/discolight
|
7c8309d3f883263b2e4cae0b289f17be1d1c07ea
|
[
"MIT"
] | 27
|
2020-07-23T08:09:25.000Z
|
2022-03-01T08:24:43.000Z
|
src/discolight/loaders/annotation/widthheightcsv.py
|
denzel-datature/discolight
|
7c8309d3f883263b2e4cae0b289f17be1d1c07ea
|
[
"MIT"
] | 7
|
2020-08-05T07:26:55.000Z
|
2020-12-31T04:20:40.000Z
|
src/discolight/loaders/annotation/widthheightcsv.py
|
denzel-datature/discolight
|
7c8309d3f883263b2e4cae0b289f17be1d1c07ea
|
[
"MIT"
] | 6
|
2020-07-27T04:30:01.000Z
|
2020-08-13T02:39:25.000Z
|
"""A CSV annotation writer that reads the bbox in x, y, w, h format."""
from discolight.annotations import BoundingBox
from .types import CSVRow, CSVAnnotationLoader
| 28.965517
| 78
| 0.633333
|
d0d3e5c8138c7d0eda8194549ae4292083be2818
| 1,286
|
py
|
Python
|
test/test_day09.py
|
frangiz/AdventOfCode2017
|
5fc171d4a83bfb9a408b4647ded4cb3efd12247e
|
[
"MIT"
] | null | null | null |
test/test_day09.py
|
frangiz/AdventOfCode2017
|
5fc171d4a83bfb9a408b4647ded4cb3efd12247e
|
[
"MIT"
] | null | null | null |
test/test_day09.py
|
frangiz/AdventOfCode2017
|
5fc171d4a83bfb9a408b4647ded4cb3efd12247e
|
[
"MIT"
] | null | null | null |
from days import day09
from ddt import ddt, data, unpack
import unittest
import util
| 29.906977
| 67
| 0.437014
|
d0d550ba7652a9b60f892093b2e1479dc926d08c
| 751
|
py
|
Python
|
venv/lib/python2.7/dist-packages/landscape/lib/fd.py
|
pengwu/scapy_env
|
3db9c5dea2e219048a2387649d6d89be342903d9
|
[
"MIT"
] | null | null | null |
venv/lib/python2.7/dist-packages/landscape/lib/fd.py
|
pengwu/scapy_env
|
3db9c5dea2e219048a2387649d6d89be342903d9
|
[
"MIT"
] | null | null | null |
venv/lib/python2.7/dist-packages/landscape/lib/fd.py
|
pengwu/scapy_env
|
3db9c5dea2e219048a2387649d6d89be342903d9
|
[
"MIT"
] | null | null | null |
"""A utility module which has FD-related functions.
This module mostly exists for L{clean_fds}, so it can be imported without
accidentally getting a reactor or something else that might create a critical
file descriptor.
"""
import os
import resource
def clean_fds():
"""Close all non-stdio file descriptors.
This should be called at the beginning of a program to avoid inheriting any
unwanted file descriptors from the invoking process. Unfortunately, this
is really common in unix!
"""
rlimit_nofile = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
total_descriptors = min(4096, rlimit_nofile)
for fd in range(3, total_descriptors):
try:
os.close(fd)
except OSError:
pass
| 28.884615
| 79
| 0.713715
|
d0d55d407b26fa73a5076bdbaa2919b847abf548
| 6,760
|
py
|
Python
|
jps-people-importer.py
|
UniversalSuperBox/jps-people-importer
|
eb7128122d00879798a88b599d90e53c139a00da
|
[
"MIT"
] | null | null | null |
jps-people-importer.py
|
UniversalSuperBox/jps-people-importer
|
eb7128122d00879798a88b599d90e53c139a00da
|
[
"MIT"
] | null | null | null |
jps-people-importer.py
|
UniversalSuperBox/jps-people-importer
|
eb7128122d00879798a88b599d90e53c139a00da
|
[
"MIT"
] | null | null | null |
"""
This script creates users in a JAMF Pro Server instance from an LDAP query.
"""
# Copyright 2020 Dalton Durst
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import sys
from collections import namedtuple
from multiprocessing.pool import ThreadPool
from typing import List
from json.decoder import JSONDecodeError
import ldap
import requests
from ldap.controls import SimplePagedResultsControl
from conf import (
JAMF_PASSWORD,
JAMF_URL,
JAMF_USERNAME,
LDAP_BIND_PASSWORD,
LDAP_BIND_URI,
LDAP_BIND_USERNAME,
LDAP_FILTER,
LDAP_INSECURE,
LDAP_SEARCH_DN_LIST,
)
JAMF_AUTH = requests.auth.HTTPBasicAuth(JAMF_USERNAME, JAMF_PASSWORD)
SESSION = requests.Session()
User = namedtuple("User", ["sAMAccountName", "email", "last_name", "first_name"])
def eprint(*args, **kwargs):
"""Like print, but outputs to stderr."""
print(*args, file=sys.stderr, **kwargs)
def results_for_dn(directory: ldap.ldapobject, base_dn: str, filter: str) -> List[User]:
"""Returns a list of User objects found in the directory object for filter
:param directory: A ldap.LDAPObject that has already been bound to a
directory.
:param base_dn: The base of the directory tree to run the search filter
against.
:param filter: The LDAP search filter to run on base_dn using directory.
"""
req_ctrl = SimplePagedResultsControl(True, size=5000, cookie="")
known_ldap_resp_ctrls = {
SimplePagedResultsControl.controlType: SimplePagedResultsControl,
}
# Send search request
msgid = directory.search_ext(
base_dn, ldap.SCOPE_SUBTREE, filterstr=LDAP_FILTER, serverctrls=[req_ctrl]
)
results = []
while True:
__, result_data, __, serverctrls = directory.result3(
msgid, resp_ctrl_classes=known_ldap_resp_ctrls
)
results.extend(
[
User(
ldap_entry["sAMAccountName"][0].decode(),
ldap_entry["mail"][0].decode(),
ldap_entry["sn"][0].decode(),
ldap_entry["givenName"][0].decode(),
)
for __, ldap_entry in result_data
]
)
page_controls = [
control
for control in serverctrls
if control.controlType == SimplePagedResultsControl.controlType
]
if page_controls:
if page_controls[0].cookie:
# Copy cookie from response control to request control
req_ctrl.cookie = page_controls[0].cookie
msgid = directory.search_ext(
base_dn,
ldap.SCOPE_SUBTREE,
filterstr=LDAP_FILTER,
serverctrls=[req_ctrl],
)
else:
break
else:
eprint("Warning: Server ignores RFC 2696 control.")
break
return results
def create_user_in_jamf(user: User):
""" Creates a user in the JPS
:param user: A User object which will be used to create the JPS user.
This function uses the following module variables:
* SESSION must be a requests.Session instance
* JAMF_AUTH must be a requests.auth interface instance
* JAMF_URL must be the full base URL of a JAMF instance.
"""
eprint("Attempting to create", user.sAMAccountName)
xml = """
<user>
<name>{name}</name>
<full_name>{last_name}, {first_name}</full_name>
<email>{email}</email>
</user>
""".format(
name=user.sAMAccountName,
last_name=user.last_name,
first_name=user.first_name,
email=user.email,
).encode()
r = SESSION.post(
JAMF_URL + "/JSSResource/users/id/-1",
data=xml,
headers={"Content-Type": "application/xml", "Accept": "application/xml"},
auth=JAMF_AUTH,
)
try:
r.raise_for_status()
except requests.exceptions.RequestException as e:
eprint("Failed to create user with username", user.sAMAccountName)
eprint(e)
eprint(r.text)
else:
print(user.sAMAccountName)
if __name__ == "__main__":
main()
| 31.009174
| 88
| 0.657988
|
d0d82bd9b120db172a8b9b2c6622284777e11985
| 442
|
py
|
Python
|
l8.py
|
snowleung/mypychallenge
|
9482e267906a23fc10041f49f7d308c596447f16
|
[
"MIT"
] | null | null | null |
l8.py
|
snowleung/mypychallenge
|
9482e267906a23fc10041f49f7d308c596447f16
|
[
"MIT"
] | null | null | null |
l8.py
|
snowleung/mypychallenge
|
9482e267906a23fc10041f49f7d308c596447f16
|
[
"MIT"
] | null | null | null |
# coding:utf-8
'''
from http://www.pythonchallenge.com/pc/def/integrity.html
'''
un = 'BZh91AY&SYA\xaf\x82\r\x00\x00\x01\x01\x80\x02\xc0\x02\x00 \x00!\x9ah3M\x07<]\xc9\x14\xe1BA\x06\xbe\x084'
pw = 'BZh91AY&SY\x94$|\x0e\x00\x00\x00\x81\x00\x03$ \x00!\x9ah3M\x13<]\xc9\x14\xe1BBP\x91\xf08'
if __name__ == '__main__':
print bz2_un()
print bz2_pw()
| 23.263158
| 110
| 0.665158
|
d0d8be8cd8fd46f56d5540bc555ac35643dd277b
| 1,387
|
py
|
Python
|
app.py
|
arian-nasr/Temporary-SMS
|
cedbe68b3e329362049c86e0974396bc660875da
|
[
"MIT"
] | null | null | null |
app.py
|
arian-nasr/Temporary-SMS
|
cedbe68b3e329362049c86e0974396bc660875da
|
[
"MIT"
] | null | null | null |
app.py
|
arian-nasr/Temporary-SMS
|
cedbe68b3e329362049c86e0974396bc660875da
|
[
"MIT"
] | 1
|
2021-09-10T05:02:48.000Z
|
2021-09-10T05:02:48.000Z
|
from flask import Flask, jsonify, request
from flask_cors import CORS
from twilio.twiml.messaging_response import MessagingResponse, Message
from twilio.rest import Client
import sqlconnector as sql
from datetime import datetime
import os
# configuration
DEBUG = True
twilio_sid = os.environ.get('TWILIO_SID')
twilio_secret = os.environ.get('TWILIO_SECRET')
client = Client(twilio_sid, twilio_secret)
# instantiate the app
app = Flask(__name__)
app.config.from_object(__name__)
# enable CORS
CORS(app, resources={r'/*': {'origins': '*'}})
if __name__ == '__main__':
app.run(host="192.168.0.21")
| 28.895833
| 79
| 0.716655
|
d0dd4e4b7186f7188547583db738e69bad28912d
| 1,174
|
py
|
Python
|
testLib.py
|
quarker/stream-metrics
|
ae03748b75f840dbff346bedb195f9414243553f
|
[
"Apache-2.0"
] | null | null | null |
testLib.py
|
quarker/stream-metrics
|
ae03748b75f840dbff346bedb195f9414243553f
|
[
"Apache-2.0"
] | 1
|
2018-02-27T20:57:06.000Z
|
2018-02-27T20:57:06.000Z
|
testLib.py
|
dnguyen219/stream-metrics
|
ae03748b75f840dbff346bedb195f9414243553f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
from scapy.all import *
import dpkt
import argparse
import sys
if __name__ == '__main__':
sys.exit(main())
| 28.634146
| 123
| 0.563884
|
d0df0de6b9dc212d463d40040bc158f2287e5e3f
| 1,101
|
py
|
Python
|
scripts/txtool/txtool/get_logs.py
|
baajur/cita
|
763c7866e6ea59ff96de085b4c72665f4e2f69ba
|
[
"Apache-2.0"
] | 930
|
2017-07-25T08:27:55.000Z
|
2019-11-26T10:07:48.000Z
|
scripts/txtool/txtool/get_logs.py
|
baajur/cita
|
763c7866e6ea59ff96de085b4c72665f4e2f69ba
|
[
"Apache-2.0"
] | 484
|
2017-07-25T14:32:44.000Z
|
2019-11-14T11:16:45.000Z
|
scripts/txtool/txtool/get_logs.py
|
QingYanL/testCITA
|
6d2e82c87831553c8d34749c56c4e5c8b94ece9c
|
[
"Apache-2.0"
] | 184
|
2017-07-26T01:37:36.000Z
|
2019-11-19T07:07:49.000Z
|
#!/usr/bin/env python3
# coding=utf-8
from __future__ import print_function
from jsonrpcclient.http_client import HTTPClient
from url_util import endpoint
import argparse
import simplejson
if __name__ == "__main__":
main()
| 22.02
| 64
| 0.659401
|
d0df17b69424625513414f688731105176ee7001
| 319
|
py
|
Python
|
demo/flask_app/flask_api/model.py
|
joshyjoseph/react-docker-swagger-demo
|
7ba7dce6ff1457fd6bfa2af0873f60c07f918ade
|
[
"MIT"
] | null | null | null |
demo/flask_app/flask_api/model.py
|
joshyjoseph/react-docker-swagger-demo
|
7ba7dce6ff1457fd6bfa2af0873f60c07f918ade
|
[
"MIT"
] | null | null | null |
demo/flask_app/flask_api/model.py
|
joshyjoseph/react-docker-swagger-demo
|
7ba7dce6ff1457fd6bfa2af0873f60c07f918ade
|
[
"MIT"
] | null | null | null |
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
| 24.538462
| 74
| 0.680251
|
d0e03d5a5825d11b0df50f5373fcf12a4e9bb5fb
| 114
|
py
|
Python
|
rev_powersystems/__init__.py
|
NREL-SIIP/reV-PowerSystems
|
39e2577082743f638426f14c8b01a1576a985558
|
[
"BSD-3-Clause"
] | null | null | null |
rev_powersystems/__init__.py
|
NREL-SIIP/reV-PowerSystems
|
39e2577082743f638426f14c8b01a1576a985558
|
[
"BSD-3-Clause"
] | null | null | null |
rev_powersystems/__init__.py
|
NREL-SIIP/reV-PowerSystems
|
39e2577082743f638426f14c8b01a1576a985558
|
[
"BSD-3-Clause"
] | null | null | null |
__version__ = "0.0.1"
from .revx_output_siip import SIIPTimeSeriesMetadata, concat, max_fiber_size, match_points
| 28.5
| 90
| 0.824561
|
d0e07e8ca8f962f207b1a467ec124c229cd57722
| 2,095
|
py
|
Python
|
w2/w2/t1.py
|
mvgrigoriev/ml-course
|
fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b
|
[
"MIT"
] | null | null | null |
w2/w2/t1.py
|
mvgrigoriev/ml-course
|
fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b
|
[
"MIT"
] | null | null | null |
w2/w2/t1.py
|
mvgrigoriev/ml-course
|
fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 07 07:52:52 2018
@author: MVGrigoriev
@task: kNN method
"""
import pandas
import numpy as np
from sklearn.neighbors import KNeighborsClassifier # Import class from scikit-learn
from sklearn.model_selection import KFold # Import KFold function
from sklearn.model_selection import cross_val_score # Import metrics for cross validation
from sklearn.preprocessing import scale # Import Scale function
data = pandas.read_csv('wine.data', header=None) # Import data
target = data[0] # Extract target
features = data.drop(0, axis=1) # Extract features
kf = KFold(n_splits=5, shuffle=True, random_state=42)
# At what k is the maximum quality obtained without normalization of characteristics?
#
# What is the maximum quality without the normalization of characteristics (the number in the scale from 0 to 1)?
#
listOfAccuracy = []
for i in range(1, 51):
neigh = KNeighborsClassifier(n_neighbors=i)
neigh.fit(features, target)
cvs = cross_val_score(neigh, features, target, cv=kf, scoring='accuracy')
cvsValue = np.mean(cvs)
listOfAccuracy.append(cvsValue)
optValue = max(listOfAccuracy)
optIndex = listOfAccuracy.index(optValue)
with open('2_1.txt', 'w') as f1:
print(optIndex+1, file=f1, end='')
with open('2_2.txt', 'w') as f2:
print(round(optValue, 2), file=f2, end='')
# Which optimal K is obtained after the normalization of the characteristics?
#
# What is the maximum quality after the normalization of characteristics (a number in the range from 0 to 1)?
#
features = scale(features)
listOfAccuracy = []
for i in range(1, 51):
neigh = KNeighborsClassifier(n_neighbors=i)
neigh.fit(features, target)
cvs = cross_val_score(neigh, features, target, cv=kf, scoring='accuracy')
cvsValue = np.mean(cvs)
listOfAccuracy.append(cvsValue)
optValue = max(listOfAccuracy)
optIndex = listOfAccuracy.index(optValue)
with open('2_3.txt', 'w') as f3:
print(optIndex+1, file=f3, end='')
with open('2_4.txt', 'w') as f4:
print(round(optValue, 2), file=f4, end='')
| 36.754386
| 115
| 0.719809
|
d0e15314e67099f053fc8acbea6a1a91c7a8ed52
| 1,946
|
py
|
Python
|
tutorial/tutorial.py
|
isabella232/sosp21-artifact
|
1b4a11c648e456c9ff9d74f16b09f4238d6694a0
|
[
"BSD-3-Clause"
] | 1
|
2021-09-20T07:57:50.000Z
|
2021-09-20T07:57:50.000Z
|
tutorial/tutorial.py
|
digi-project/sosp21-artifact
|
1b4a11c648e456c9ff9d74f16b09f4238d6694a0
|
[
"BSD-3-Clause"
] | 1
|
2022-03-21T11:33:33.000Z
|
2022-03-21T11:33:33.000Z
|
tutorial/tutorial.py
|
isabella232/sosp21-artifact
|
1b4a11c648e456c9ff9d74f16b09f4238d6694a0
|
[
"BSD-3-Clause"
] | 2
|
2021-12-09T12:54:52.000Z
|
2022-03-21T08:43:31.000Z
|
# ipython utils
import os
import sys
import time
import yaml
import datetime
from pathlib import Path
from IPython import get_ipython
from IPython.core.magic import (register_line_magic, register_cell_magic,
register_line_cell_magic)
import warnings; warnings.simplefilter('ignore')
start = time.time()
os.environ.update({
"GROUP": "tutorial",
"VERSION": "v1",
"KOPFLOG": "false",
"DOCKER_TLS_VERIFY": "1",
"DOCKER_HOST": "tcp://127.0.0.1:32770",
"DOCKER_CERT_PATH": str(Path(os.environ["HOME"], ".minikube/certs")),
"MINIKUBE_ACTIVE_DOCKERD": "minikube",
"IMAGEPULL": "Never",
"REPO": "tutorial",
})
workdir = (Path(os.environ["GOPATH"],
"src", "digi.dev",
"tutorial", "workdir"))
os.environ["WORKDIR"] = str(workdir)
| 26.297297
| 73
| 0.613052
|
d0e275de32ffad1ac148c2e85a79a876fec1fd53
| 362
|
py
|
Python
|
examples/rotation.py
|
aallan/picamera2
|
d64fbe669e071402d11c043cf044f52f6b2edc57
|
[
"BSD-2-Clause"
] | null | null | null |
examples/rotation.py
|
aallan/picamera2
|
d64fbe669e071402d11c043cf044f52f6b2edc57
|
[
"BSD-2-Clause"
] | null | null | null |
examples/rotation.py
|
aallan/picamera2
|
d64fbe669e071402d11c043cf044f52f6b2edc57
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/python3
# Run the camera with a 180 degree rotation.
from qt_gl_preview import *
from picamera2 import *
import time
picam2 = Picamera2()
preview = QtGlPreview(picam2)
preview_config = picam2.preview_configuration()
preview_config["transform"] = libcamera.Transform(hflip=1, vflip=1)
picam2.configure(preview_config)
picam2.start()
time.sleep(5)
| 20.111111
| 67
| 0.779006
|
d0e40ed7df88adf45e5432f3af67cf4214a7c00a
| 331
|
py
|
Python
|
examples/system-prompt.py
|
davidbrochart/python-prompt-toolkit
|
8498692b31671fee7c5a426300a9df2ee290eae2
|
[
"BSD-3-Clause"
] | 2
|
2020-04-12T01:23:25.000Z
|
2021-05-22T13:46:00.000Z
|
examples/system-prompt.py
|
davidbrochart/python-prompt-toolkit
|
8498692b31671fee7c5a426300a9df2ee290eae2
|
[
"BSD-3-Clause"
] | null | null | null |
examples/system-prompt.py
|
davidbrochart/python-prompt-toolkit
|
8498692b31671fee7c5a426300a9df2ee290eae2
|
[
"BSD-3-Clause"
] | 2
|
2016-12-30T23:57:44.000Z
|
2021-05-22T13:50:21.000Z
|
#!/usr/bin/env python
from __future__ import unicode_literals
from prompt_toolkit import prompt
if __name__ == '__main__':
print('If you press meta-! or esc-! at the following prompt, you can enter system commands.')
answer = prompt('Give me some input: ', enable_system_bindings=True)
print('You said: %s' % answer)
| 33.1
| 97
| 0.725076
|
d0e4e293078cbb35e7bb94fd2a5b26005400333e
| 3,294
|
py
|
Python
|
searcheval/test/test_metrics.py
|
VikasNeha/searcheval
|
90f3be8e57dd70179f707ef73241306cdd2ec915
|
[
"Apache-2.0"
] | 1
|
2018-01-18T18:37:11.000Z
|
2018-01-18T18:37:11.000Z
|
searcheval/test/test_metrics.py
|
VikasNeha/searcheval
|
90f3be8e57dd70179f707ef73241306cdd2ec915
|
[
"Apache-2.0"
] | 1
|
2022-01-11T10:37:11.000Z
|
2022-01-11T17:11:01.000Z
|
searcheval/test/test_metrics.py
|
VikasNeha/searcheval
|
90f3be8e57dd70179f707ef73241306cdd2ec915
|
[
"Apache-2.0"
] | 1
|
2022-01-11T10:46:05.000Z
|
2022-01-11T10:46:05.000Z
|
import unittest
import searcheval.metrics as sm
if __name__ == '__main__':
unittest.main()
| 32.94
| 77
| 0.608682
|
d0e54036779246dea8bdd23ebf8e7a5ba24254b9
| 1,054
|
py
|
Python
|
debpkgr/compat.py
|
sassoftware/python-debpkgr
|
220d57b461c2f323a30fb44b2d1126ca4a0f9ea6
|
[
"Apache-2.0"
] | 7
|
2017-03-09T11:28:42.000Z
|
2019-10-26T02:12:09.000Z
|
debpkgr/compat.py
|
sassoftware/python-debpkgr
|
220d57b461c2f323a30fb44b2d1126ca4a0f9ea6
|
[
"Apache-2.0"
] | 12
|
2017-03-24T07:45:41.000Z
|
2019-12-20T15:44:11.000Z
|
debpkgr/compat.py
|
sassoftware/python-debpkgr
|
220d57b461c2f323a30fb44b2d1126ca4a0f9ea6
|
[
"Apache-2.0"
] | 5
|
2017-03-09T11:28:15.000Z
|
2021-02-18T13:14:34.000Z
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# flake8: noqa
from six import (
add_metaclass,
iteritems,
raise_from,
string_types,
text_type,
)
from six.moves import configparser
from six.moves.reprlib import Repr
from six.moves.urllib.parse import parse_qs, urlsplit, urlunsplit
from six.moves.urllib.parse import urlparse, urlencode
from six.moves.urllib.request import urlopen, urlretrieve
from six.moves.urllib.error import HTTPError
try:
maketrans = str.maketrans
except AttributeError:
from string import maketrans
| 30.114286
| 74
| 0.766603
|
d0e6ec9507e696a89752b35b6c0b3c155c6656fe
| 16,732
|
py
|
Python
|
nomic/proposal.py
|
HactarCE/Quobot
|
e13f28990f212b92835dd9c8fcbdc53bc37d5ab8
|
[
"MIT"
] | null | null | null |
nomic/proposal.py
|
HactarCE/Quobot
|
e13f28990f212b92835dd9c8fcbdc53bc37d5ab8
|
[
"MIT"
] | null | null | null |
nomic/proposal.py
|
HactarCE/Quobot
|
e13f28990f212b92835dd9c8fcbdc53bc37d5ab8
|
[
"MIT"
] | null | null | null |
from collections import OrderedDict
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import Optional, Set
import discord
import functools
from .gameflags import GameFlagsManager
from .playerdict import PlayerDict
from .repoman import GameRepoManager
from constants import colors, emoji, info, strings
import utils
VOTE_ALIASES = {
'+': 'for',
'-': 'against',
'abstain': 'abstain',
'against': 'against',
'del': 'remove',
'delete': 'remove',
'for': 'for',
'remove': 'remove',
'rm': 'remove',
}
VOTE_TYPES = ('for', 'against', 'abstain')
class ProposalManager(GameRepoManager):
def has_proposal(self, n: int) -> bool:
return isinstance(n, int) and 1 <= n <= len(self.proposals)
def get_proposal(self, n: int) -> Optional[Proposal]:
if self.has_proposal(n):
return self.proposals[n - 1]
| 35.449153
| 97
| 0.573392
|
d0e74c23345f71b01c04f878f36260962612bba5
| 897
|
py
|
Python
|
vexmpp/features/stream_mgmt.py
|
nicfit/vexmpp
|
e67070d2822da8356345976fb15d365935b550a6
|
[
"MIT"
] | null | null | null |
vexmpp/features/stream_mgmt.py
|
nicfit/vexmpp
|
e67070d2822da8356345976fb15d365935b550a6
|
[
"MIT"
] | 349
|
2017-02-18T22:48:17.000Z
|
2021-12-13T19:50:23.000Z
|
vexmpp/features/stream_mgmt.py
|
nicfit/vexmpp
|
e67070d2822da8356345976fb15d365935b550a6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from ..stanzas import Stanza
from ..errors import makeStanzaError
from ..protocols.stream_mgmt import NS_URI
| 32.035714
| 70
| 0.625418
|
d0e7767371bb84c6b1d217086e05c8cb123f2e3b
| 293
|
py
|
Python
|
napari_allencell_segmenter/model/channel.py
|
neuromusic/napari-allencell-segmenter
|
c732408023c828c07ec2a425f4f426174d94946b
|
[
"BSD-3-Clause"
] | 8
|
2021-06-29T09:24:22.000Z
|
2022-03-22T23:43:10.000Z
|
napari_allencell_segmenter/model/channel.py
|
neuromusic/napari-allencell-segmenter
|
c732408023c828c07ec2a425f4f426174d94946b
|
[
"BSD-3-Clause"
] | 97
|
2021-02-18T02:39:31.000Z
|
2021-06-18T21:38:41.000Z
|
napari_allencell_segmenter/model/channel.py
|
neuromusic/napari-allencell-segmenter
|
c732408023c828c07ec2a425f4f426174d94946b
|
[
"BSD-3-Clause"
] | 2
|
2021-09-14T22:07:22.000Z
|
2022-02-07T16:41:02.000Z
|
from dataclasses import dataclass
| 19.533333
| 60
| 0.627986
|
d0e808e3235cc1782c3a0aac8f2ccd3eaf6e8e7d
| 705
|
py
|
Python
|
tests/feature_extraction/pattern/test_pattern.py
|
fidsusj/HateSpeechDetection
|
1306a8a901aed856e51ee8fe16158ff267fb5405
|
[
"BSD-3-Clause"
] | null | null | null |
tests/feature_extraction/pattern/test_pattern.py
|
fidsusj/HateSpeechDetection
|
1306a8a901aed856e51ee8fe16158ff267fb5405
|
[
"BSD-3-Clause"
] | 17
|
2020-11-08T16:55:54.000Z
|
2021-05-28T05:58:17.000Z
|
tests/feature_extraction/pattern/test_pattern.py
|
fidsusj/HateSpeechDetection
|
1306a8a901aed856e51ee8fe16158ff267fb5405
|
[
"BSD-3-Clause"
] | 2
|
2020-12-18T10:42:58.000Z
|
2021-05-24T19:32:57.000Z
|
from unittest import TestCase
import pandas as pd
from feature_extraction.pattern.pattern import Pattern
from preprocessing.corpus import build_corpus
| 27.115385
| 88
| 0.639716
|
d0e812a6800aac72cae877576878f53d8cd3bd64
| 11,525
|
py
|
Python
|
main.py
|
C3ald/Token-API
|
5bb34ac1276b23a6f3c780c8d7011d621f02ab90
|
[
"MIT"
] | 4
|
2021-12-20T22:51:20.000Z
|
2021-12-30T17:55:34.000Z
|
main.py
|
C3ald/Token-API
|
5bb34ac1276b23a6f3c780c8d7011d621f02ab90
|
[
"MIT"
] | 14
|
2021-12-08T18:30:00.000Z
|
2022-01-06T05:27:08.000Z
|
main.py
|
C3ald/Token-API
|
5bb34ac1276b23a6f3c780c8d7011d621f02ab90
|
[
"MIT"
] | null | null | null |
from starlette.responses import Response
from passlib.hash import pbkdf2_sha256
from starlette.websockets import WebSocketDisconnect
from blockchain import Blockchain
# from wallet import Wallet
from fastapi import FastAPI, WebSocket
import uvicorn
import socket
import requests as r
from pydantic import BaseModel
from fastapi.templating import Jinja2Templates
import json
import asyncio
# from Utilities.algorithims import Algs
import time as t
import random
import base64
from sys import getsizeof
# from Utilities.cryptography_testing import Make_Keys
# from Utilities.cryptography_testing import primary_addresses
# from Utilities.cryptography_testing import Check_Wallet_Balance
# from Utilities.cryptography_testing import Ring_CT
# from Utilities.cryptography_testing import Decoy_addresses
from Utilities.cryptography_testing import *
from fastapi_signals import *
ring_ct = Ring_CT()
checkbalance = Check_Wallet_Balance()
create_keys = Make_Keys()
primary_addr = primary_addresses()
decoy_addresses = Decoy_addresses()
#imported templates
#from fastapi.staticfiles import StaticFiles #imported staticfiles
# {
# "node": [
# "http://127.0.0.1:8000", "http://127.0.0.1:8001"
# ]
#}
tags_metadata = [
{'name':'information',
'description': 'This will allow you to get info about the blockchain',
'name':'wallet',
'description': 'this will allow you to access your wallet and make wallets',
'name': 'transaction',
'description': 'transactions',
'name': 'mining',
'description': 'mining',
'name': 'nodes',
'description': 'adding nodes and replacing the chain',
'name': 'contracts',
'description': 'smart contracts on the blockchain'
}]
# CONSTANTS
SERVER_NAME = 'Token Network'
SERVER_HOST = '0.0.0.0'
SERVER_PORT = 8000
SERVER_RELOAD = False
DESCRIPTION = "Welcome to The Token Network, a blockchain network with a cryptocurrency called Token, it's like Dogecoin and Bitcoin but faster than Bitcoin and harder to mine than Dogecoin, welcome to the Future of the world."
algs = Algs()
S = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
hostname = socket.gethostname()
IP = socket.gethostbyname(hostname)
# wallet = Wallet()
# class Phrase(BaseModel):
# phrase: str
app = FastAPI(title=SERVER_NAME, openapi_tags=tags_metadata, description=DESCRIPTION)
templates = Jinja2Templates(directory="templates/")
blockchain = Blockchain()
""" Wallets should be made offline. """
# @app.post('/recover_wallet', tags=['wallet'])
# async def recover_wallet(recover:Recover):
# """ recover wallet with passphrase and publickey """
# is_valid = wallets.recover_wallet_with_passphrase(recover.passphrase)
# if is_valid == True:
# return {'message': 'Wallet recovery is successful!', 'private key': wallets.privatekey, 'public key': wallets.publickey, 'passphrase': recover.passphrase}
# else:
# return 'invalid publickey or passphrase!'
if __name__ == '__main__':
# hostname = socket.gethostname()
# IP = socket.gethostbyname(hostname)
# blockchain.replace_chain()
uvicorn.run('main:app', host=SERVER_HOST, port=SERVER_PORT, reload=SERVER_RELOAD)
# ran = run
# while run == ran:
# update = blockchain.replace_chain()
# t.sleep(60.0)
| 29.551282
| 227
| 0.702907
|
d0e8204ae150a3e8c57ae24fe1a684bdf4ee48d0
| 4,650
|
py
|
Python
|
utils/config.py
|
ebadawy/JointModeling
|
5140e596113a6dabbc503a1fb1a3234efabf0f0b
|
[
"Apache-2.0"
] | null | null | null |
utils/config.py
|
ebadawy/JointModeling
|
5140e596113a6dabbc503a1fb1a3234efabf0f0b
|
[
"Apache-2.0"
] | null | null | null |
utils/config.py
|
ebadawy/JointModeling
|
5140e596113a6dabbc503a1fb1a3234efabf0f0b
|
[
"Apache-2.0"
] | 1
|
2020-04-11T09:40:17.000Z
|
2020-04-11T09:40:17.000Z
|
import json
from bunch import Bunch
import os
def get_config_from_json(json_file):
"""
Get the config from a json file
:param json_file:
:return: config(namespace) or config(dictionary)
"""
# parse the configurations from the config json file provided
with open(json_file, 'r') as config_file:
config_dict = json.load(config_file)
# convert the dictionary to a namespace using bunch lib
config = Bunch(config_dict)
config = default_values(config)
return config, config_dict
| 73.809524
| 133
| 0.529892
|
d0e9d30679706cc8bbbaa272614c4af5c8ce41cd
| 7,096
|
py
|
Python
|
app/user/views.py
|
cosmos-sajal/magic-link
|
346e828673f298bae9ec3075db8d5e837e4b7aaf
|
[
"MIT"
] | 2
|
2020-10-19T07:35:59.000Z
|
2020-10-24T17:43:41.000Z
|
app/user/views.py
|
cosmos-sajal/magic-link
|
346e828673f298bae9ec3075db8d5e837e4b7aaf
|
[
"MIT"
] | null | null | null |
app/user/views.py
|
cosmos-sajal/magic-link
|
346e828673f298bae9ec3075db8d5e837e4b7aaf
|
[
"MIT"
] | null | null | null |
import json
from django.views import View
from django.shortcuts import redirect, render
from django.core.exceptions import ObjectDoesNotExist
from django.contrib import messages
from django.http.response import HttpResponseRedirect
from rest_framework.renderers import TemplateHTMLRenderer
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from user.services.user_service import UserService, TokenService
from user.services.cookies_service import CookiesService
from user.forms.user_forms import LoginForm, MagicLinkForm, RegisterUserForm
from helpers.cache_adapter import CacheAdapter
from user.serializers import RegisterUserSerializer, LoginUserSerializer, \
GenerateMagicLinkSerializer
from user.services.magic_link_service import MagicLinkService
from worker.send_email import send_email
| 28.612903
| 82
| 0.604284
|
d0ebd202e54ba07a8cd29fa1c18be451fa7b6215
| 560
|
py
|
Python
|
dvadmin-backend/apps/vadmin/system/models/__init__.py
|
yuanlaimantou/vue-django-admin
|
3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9
|
[
"MIT"
] | 193
|
2021-02-25T17:36:47.000Z
|
2022-03-31T09:54:48.000Z
|
dvadmin-backend/apps/vadmin/system/models/__init__.py
|
yuanlaimantou/vue-django-admin
|
3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9
|
[
"MIT"
] | 6
|
2021-04-23T12:35:14.000Z
|
2021-09-16T03:27:28.000Z
|
dvadmin-backend/apps/vadmin/system/models/__init__.py
|
yuanlaimantou/vue-django-admin
|
3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9
|
[
"MIT"
] | 59
|
2021-03-29T09:25:00.000Z
|
2022-03-24T06:53:27.000Z
|
from apps.vadmin.system.models.celery_log import CeleryLog
from apps.vadmin.system.models.config_settings import ConfigSettings
from apps.vadmin.system.models.dict_data import DictData
from apps.vadmin.system.models.dict_details import DictDetails
from apps.vadmin.system.models.logininfor import LoginInfor
from apps.vadmin.system.models.message_push import MessagePush
from apps.vadmin.system.models.message_push import MessagePushUser
from apps.vadmin.system.models.operation_log import OperationLog
from apps.vadmin.system.models.save_file import SaveFile
| 56
| 68
| 0.871429
|
d0efa324455bfc009730dba34af132a2fd676468
| 260
|
py
|
Python
|
app/hunter/urls.py
|
edynox/iis
|
594200506b641cbac249dc6e95d229bea1edeb28
|
[
"MIT"
] | null | null | null |
app/hunter/urls.py
|
edynox/iis
|
594200506b641cbac249dc6e95d229bea1edeb28
|
[
"MIT"
] | null | null | null |
app/hunter/urls.py
|
edynox/iis
|
594200506b641cbac249dc6e95d229bea1edeb28
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from .views import profile, hunterList, changePass
urlpatterns = [
url(r'^$', profile, name='hunter_profile'),
url(r'^password', changePass, name='hunter_password'),
url(r'^list', hunterList, name='hunter_list')
]
| 28.888889
| 58
| 0.703846
|
d0f002a005c22f45ad7c152982173fb33768f83a
| 1,822
|
py
|
Python
|
src/aves/features/sparse.py
|
sergioangulo/aves
|
43a14ec9c82929136a39590b15fe7f92182aae20
|
[
"CC-BY-3.0"
] | 34
|
2020-10-23T08:57:03.000Z
|
2022-03-23T17:07:20.000Z
|
src/aves/features/sparse.py
|
sergioangulo/aves
|
43a14ec9c82929136a39590b15fe7f92182aae20
|
[
"CC-BY-3.0"
] | 3
|
2021-12-02T22:42:25.000Z
|
2021-12-10T02:37:01.000Z
|
src/aves/features/sparse.py
|
sergioangulo/aves
|
43a14ec9c82929136a39590b15fe7f92182aae20
|
[
"CC-BY-3.0"
] | 11
|
2021-03-25T02:40:34.000Z
|
2022-01-03T22:41:29.000Z
|
from scipy.sparse import dok_matrix
import pandas as pd
from cytoolz import itemmap
| 25.305556
| 83
| 0.625137
|
d0f021bab54a09a9592e986f398fbcf5edaf9bb8
| 1,059
|
py
|
Python
|
backent/api/migrations/0010_populate_event_tags.py
|
namafutatsu/backent
|
9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d
|
[
"MIT"
] | null | null | null |
backent/api/migrations/0010_populate_event_tags.py
|
namafutatsu/backent
|
9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d
|
[
"MIT"
] | null | null | null |
backent/api/migrations/0010_populate_event_tags.py
|
namafutatsu/backent
|
9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d
|
[
"MIT"
] | null | null | null |
from django.db import migrations
from backent.api import enums
| 34.16129
| 76
| 0.777148
|
d0f08bf1893b85dd5be23260d025c66fdf373d25
| 938
|
py
|
Python
|
test_ukz/test_ukzlang/test_uk_pipes.py
|
clauderichard/Ultrakazoid
|
619f1afd1fd55afb06e7d27b2bc30eee9929f660
|
[
"MIT"
] | null | null | null |
test_ukz/test_ukzlang/test_uk_pipes.py
|
clauderichard/Ultrakazoid
|
619f1afd1fd55afb06e7d27b2bc30eee9929f660
|
[
"MIT"
] | null | null | null |
test_ukz/test_ukzlang/test_uk_pipes.py
|
clauderichard/Ultrakazoid
|
619f1afd1fd55afb06e7d27b2bc30eee9929f660
|
[
"MIT"
] | null | null | null |
from .test_uk_base import TestUkBase
################################
| 24.051282
| 44
| 0.574627
|
d0f14602b75f864977df1e3824cf1f1c1da55ef2
| 25,329
|
py
|
Python
|
wltp/autograph.py
|
ankostis/wltp
|
c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e
|
[
"Apache-2.0"
] | null | null | null |
wltp/autograph.py
|
ankostis/wltp
|
c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e
|
[
"Apache-2.0"
] | null | null | null |
wltp/autograph.py
|
ankostis/wltp
|
c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e
|
[
"Apache-2.0"
] | 1
|
2015-02-20T11:47:33.000Z
|
2015-02-20T11:47:33.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019-2020 European Commission (JRC);
# Licensed under the EUPL (the 'Licence');
# You may not use this work except in compliance with the Licence.
# You may obtain a copy of the Licence at: http://ec.europa.eu/idabc/eupl
"""
Harvest functions & annotate their :term:`dependencies <dependency>` to build :term:`pipeline`\\s.
>>> from wltp.autograph import *
>>> __name__ = "wltp.autograph"
"""
import functools as fnt
import inspect
import logging
import re
import sys
from collections import ChainMap
from inspect import Parameter
from pathlib import Path
from types import ModuleType
from typing import (
Any,
Callable,
Collection,
Iterable,
List,
Mapping,
Pattern,
Set,
Tuple,
Union,
cast,
)
from boltons.iterutils import first
from boltons.setutils import IndexedSet as iset
from graphtik import keyword, optional, sfx, sfxed
from graphtik.base import Operation, func_name
from graphtik.fnop import FnOp, reparse_operation_data
from graphtik.modifier import is_sfx
from .utils import Literal, Token, asdict, aslist, astuple
try:
from re import Pattern as RegexPattern
except ImportError:
# PY3.6
from typing import Pattern as RegexPattern
log = logging.getLogger(__name__)
_my_project_dir = Path(__file__).parent
_FnKey = Union[Union[str, Pattern], Iterable[Union[str, Pattern]]]
def camel_2_snake_case(word):
"""
>>> camel_2_snake_case("HTTPResponseCodeXYZ")
'http_response_code_xyz'
From https://stackoverflow.com/a/1176023/548792
"""
return re.sub(r"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))", r"_\1", word).lower()
def _is_in_my_project(item) -> bool:
"""UNUSED"""
in_my_project = False
try:
path = inspect.getfile(item)
except TypeError:
pass # raised for builtins e.g.`sys`
else:
try:
Path(path).relative_to(_my_project_dir)
in_my_project = True
except ValueError:
pass # raised when unrelated
return in_my_project
_unset = Token("unset") # TODO: replace `_unset` with ...
def autographed(
fn=_unset,
*,
name=None,
needs=_unset,
provides=_unset,
renames=_unset,
returns_dict=_unset,
aliases=_unset,
inp_sideffects=_unset,
out_sideffects=_unset,
domain: Union[str, int, Collection] = None,
**kws,
):
"""
Decorator adding ``_autograph`` func-attribute with overrides for :class:`Autograph`.
:param name:
the name of the operation.
- If the same `name` has already been defined for the same `domain`,
it is overwritten; otherwise, a new decoration is appended, so that
:meth:`.Autograph.yield_wrapped_ops()` will produce more than one operations.
- if not given, it will be derrived from the `fn` on wrap-time.
:param domain:
one or more list-ified domains to assign decors into
(instead of the "default" domain);
it allows to reuse the same function to build different operation,
when later wrapped into an operation by :class:`.Autograph`.
:param renames:
mappings to rename both any matching the final `needs` & `provides`
:param inp_sideffects:
appended into `needs`; if a tuple, makes it a :class:`.sfxed`
:param out_sideffects:
appended into `provides`; if a tuple, makes it a :class:`.sfxed`
:param kws:
the rest arguments of :class:`graphtik.operation`, such as::
endured, parallel, marshalled, node_props
The rest arguments (e.g. `needs`, etc) are coming from :class:`graphtik.operation`.
"""
kws.update(
{
k: v
for k, v in locals().items()
if v is not _unset and k not in "kws fn name domain".split()
}
)
if fn is _unset:
return decorator
return decorator(fn)
def get_autograph_decors(
fn, default=None, domain: Union[str, int, Collection] = None
) -> dict:
"""
Get the 1st match in `domain` of the `fn` :func:`autographed` special attribute.
:param default:
return this if `fn` non-autographed, or domain don't match
:param domain:
list-ified if a single str
:return:
the decors that will override :class:`Autograph` attributes, as found
from the given `fn`, and for the 1st matching domain in `domain`::
<fn>():
_autograph (function-attribute)
<domain> (dict)
<name> (dict)
<decors> (dict)
"""
for dmn in astuple(domain, "domain"):
if hasattr(fn, "_autograph"):
if dmn in fn._autograph:
return fn._autograph[dmn]
return default
"""
Example code hidden from Sphinx:
>>> from graphtik import compose
>>> aug = Autograph(['calc_', 'upd_'], {
... 'calc_p_available':{'provides': 'p_avail'},
... 'calc_p_resist': {'provides': 'p_resist'},
... 'calc_inertial_power': {'provides': 'p_inert'},
... })
>>> ops = [aug.wrap_funcs(funcs.items()]
>>> netop = compose('wltp', *(op for op in ops if op.provides))
"""
| 34.182186
| 98
| 0.569189
|
d0f233170bb0ccf428c1b03e76710d47e94cea40
| 2,637
|
py
|
Python
|
gen_embeddings.py
|
dominiccarrano/backdoor-nn-geometry
|
d1fa0754f1d57a9b303e2eb71edf0787a86529c8
|
[
"MIT"
] | 1
|
2021-05-28T14:57:57.000Z
|
2021-05-28T14:57:57.000Z
|
gen_embeddings.py
|
dominiccarrano/backdoor-nn-geometry
|
d1fa0754f1d57a9b303e2eb71edf0787a86529c8
|
[
"MIT"
] | null | null | null |
gen_embeddings.py
|
dominiccarrano/backdoor-nn-geometry
|
d1fa0754f1d57a9b303e2eb71edf0787a86529c8
|
[
"MIT"
] | null | null | null |
import pandas as pd
import os
import torch
import numpy as np
import argparse
from trojai_utils import *
# Get args
parser = argparse.ArgumentParser(description="Generate embeddings")
parser.add_argument('--embedding-type', type=str,
help='Model architecture (one of "BERT", "DistilBERT", "GPT-2")')
parser.add_argument('--n', type=int, default=1000,
help='Number of embeddings of each sentiment to generate')
parser.add_argument('--batch-size', type=int, default=50,
help='Size of batches to feed into the language model for embedding generation')
args = parser.parse_args()
# Load in the data
base_huggingface_path = "your path with the huggingface transformer files"
base_data_path = "your file path with the reviews datasets"
sentiment_data = pd.read_csv(os.path.join(base_data_path, "train_datasets.csv"))
# Split by sentiment
pos_data = sentiment_data[sentiment_data.sentiment==True].sample(args.n)
neg_data = sentiment_data[sentiment_data.sentiment==False].sample(args.n)
# Get random samples
pos_reviews = list(np.asarray(pos_data.reviewText, dtype=str))
pos_labels = torch.ones(args.n)
neg_reviews = list(np.asarray(neg_data.reviewText, dtype=str))
neg_labels = torch.zeros(args.n)
# Make embeddings
cls_first = (args.embedding_type == "DistilBERT") or (args.embedding_type == "BERT")
tokenizer, embedding = get_LM(args.embedding_type, base_huggingface_path)
pos_embeddings = batch_embeddings(pos_reviews, args.n, args.batch_size, tokenizer, embedding, cls_first)
neg_embeddings = batch_embeddings(neg_reviews, args.n, args.batch_size, tokenizer, embedding, cls_first)
# Save results
base_embedding_path = "your path to save embeddings to"
torch.save(pos_embeddings, os.path.join(base_embedding_path, args.embedding_type, "pos_embeddings{}.pt".format(args.n)))
torch.save(neg_embeddings, os.path.join(base_embedding_path, args.embedding_type, "neg_embeddings{}.pt".format(args.n)))
torch.save(pos_labels, os.path.join(base_embedding_path, args.embedding_type, "pos_labels{}.pt".format(args.n)))
torch.save(neg_labels, os.path.join(base_embedding_path, args.embedding_type, "neg_labels{}.pt".format(args.n)))
| 49.754717
| 120
| 0.759954
|
d0f527a740c29092c88c485c40c531a07e3a243b
| 1,584
|
py
|
Python
|
example/image-classification/test_score.py
|
Abusnina/mxnet
|
7f8d94a24bf64fe0f24712a7952a09725c2df9bd
|
[
"Apache-2.0"
] | 399
|
2017-05-30T05:12:48.000Z
|
2022-01-29T05:53:08.000Z
|
smd_hpi/examples/binary-imagenet1k/test_score.py
|
yanghaojin/BMXNet
|
102f8d0ed59529bbd162c37bf07ae58ad6c4caa1
|
[
"Apache-2.0"
] | 58
|
2017-05-30T23:25:32.000Z
|
2019-11-18T09:30:54.000Z
|
smd_hpi/examples/binary-imagenet1k/test_score.py
|
yanghaojin/BMXNet
|
102f8d0ed59529bbd162c37bf07ae58ad6c4caa1
|
[
"Apache-2.0"
] | 107
|
2017-05-30T05:53:22.000Z
|
2021-06-24T02:43:31.000Z
|
"""
test pretrained models
"""
from __future__ import print_function
import mxnet as mx
from common import find_mxnet, modelzoo
from common.util import download_file, get_gpus
from score import score
if __name__ == '__main__':
gpus = get_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])
download_data()
test_imagenet1k_resnet(gpus=gpus, batch_size=batch_size)
test_imagenet1k_inception_bn(gpus=gpus, batch_size=batch_size)
| 33.702128
| 84
| 0.599747
|
d0f7159e2aa65ab951c742e07f51245c54e91b4b
| 293
|
py
|
Python
|
rest_framework_siren/renderers.py
|
clarkperkins/django-rest-framework-siren
|
a0bc3d9990d88cfda7204fd0cf78ef08c38084b5
|
[
"Apache-2.0"
] | 2
|
2017-01-29T03:08:12.000Z
|
2019-03-28T20:12:01.000Z
|
rest_framework_siren/renderers.py
|
clarkperkins/django-rest-framework-siren
|
a0bc3d9990d88cfda7204fd0cf78ef08c38084b5
|
[
"Apache-2.0"
] | null | null | null |
rest_framework_siren/renderers.py
|
clarkperkins/django-rest-framework-siren
|
a0bc3d9990d88cfda7204fd0cf78ef08c38084b5
|
[
"Apache-2.0"
] | null | null | null |
"""
Provides Siren rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
| 18.3125
| 49
| 0.726962
|
d0f7870601183663c27b2407e2c8458ad2ea5542
| 4,050
|
py
|
Python
|
src/ekpmeasure/experiments/ferroelectric/_tester/_wfs.py
|
cjfinnell/ekpmeasure
|
e6611c053cad28e06f4f8a94764ebe3805cddb15
|
[
"MIT"
] | null | null | null |
src/ekpmeasure/experiments/ferroelectric/_tester/_wfs.py
|
cjfinnell/ekpmeasure
|
e6611c053cad28e06f4f8a94764ebe3805cddb15
|
[
"MIT"
] | null | null | null |
src/ekpmeasure/experiments/ferroelectric/_tester/_wfs.py
|
cjfinnell/ekpmeasure
|
e6611c053cad28e06f4f8a94764ebe3805cddb15
|
[
"MIT"
] | null | null | null |
import numpy as np
__all__ = (
"standard_bipolar_sine",
"double_bipolar_sine",
"standard_bipolar",
"double_bipolar",
"semicircle",
"double_semicircle",
"gaussian",
"double_gaussian",
)
def semicircle(a, T):
"""Return semicircle bipolar wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
wf = []
for i in range(count):
if i <= count / 2:
wf.append(np.sqrt(1 - ((i - count / 4) / (count / 4)) ** 2))
else:
wf.append(
-1 * np.sqrt(1 - ((i - count / 2 - count / 4) / (count / 4)) ** 2)
)
wf = np.array([int_amp * i + 2047 for i in wf])
return wf
def double_semicircle(a, T):
"""Return double semicircle bipolar wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
wf = np.concatenate((semicircle(a, T / 2), semicircle(a, T / 2)))
return wf
def standard_bipolar(a, T):
"""Return standard bipolar triangle wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
step = 4 / (count)
wf = []
for i in range(count):
if i <= count / 4:
wf.append(i * step)
elif i > count / 4 and i <= 3 * count / 4:
wf.append(wf[i - 1] - step)
else:
wf.append(wf[i - 1] + step)
wf = np.array([int_amp * i + 2047 for i in wf])
return wf
def double_bipolar(a, T):
"""Return double bipolar triangle wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
wf = np.concatenate((standard_bipolar(a, T / 2), standard_bipolar(a, T / 2)))
return wf
def standard_bipolar_sine(a, T):
"""Return standard bipolar sine wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
wf = np.array(
[int_amp * np.sin(2 * np.pi * i / (count)) + 2047 for i in range(count)]
)
return wf
def double_bipolar_sine(a, T):
"""Return standard bipolar sine wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
wf = np.array(
[int_amp * np.sin(4 * np.pi * i / (count)) + 2047 for i in range(count)]
)
return wf
| 23.546512
| 101
| 0.536049
|
d0f98a3a2c3fd048d824ad74c18868bd24ec85c0
| 1,686
|
py
|
Python
|
inference.py
|
KirtoXX/Object_track
|
92b7d3308ab12d9211b04d18f825bf9a488c46a2
|
[
"Apache-2.0"
] | null | null | null |
inference.py
|
KirtoXX/Object_track
|
92b7d3308ab12d9211b04d18f825bf9a488c46a2
|
[
"Apache-2.0"
] | null | null | null |
inference.py
|
KirtoXX/Object_track
|
92b7d3308ab12d9211b04d18f825bf9a488c46a2
|
[
"Apache-2.0"
] | null | null | null |
from keras import layers
import tensorflow as tf
from Resnet import ResNet50
import keras
from keras.models import Input,Model
| 31.811321
| 72
| 0.645907
|
d0f9bc7af6fe73617d028c362d0710385e92b83d
| 1,807
|
py
|
Python
|
src/visualizations/Visualize.py
|
chpatola/election_nlp
|
6463edb2eacca09ff828029c69d11be7985ceeb0
|
[
"MIT"
] | 1
|
2020-04-11T12:00:09.000Z
|
2020-04-11T12:00:09.000Z
|
src/visualizations/Visualize.py
|
chpatola/election_nlp
|
6463edb2eacca09ff828029c69d11be7985ceeb0
|
[
"MIT"
] | null | null | null |
src/visualizations/Visualize.py
|
chpatola/election_nlp
|
6463edb2eacca09ff828029c69d11be7985ceeb0
|
[
"MIT"
] | null | null | null |
"""Visualizations for NLP analysis"""
import pandas as pd
import numpy as np
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt
import seaborn as sns
| 35.431373
| 78
| 0.598229
|
d0f9d9924cd74ed348272e7fd7ebf2c3d8c9e835
| 2,844
|
py
|
Python
|
sqlpuzzle/_queries/selectoptions.py
|
Dundee/python-sqlpuzzle
|
260524922a0645c9bf94a9779195f93ef2c78cba
|
[
"MIT"
] | 8
|
2015-03-19T11:25:32.000Z
|
2020-09-02T11:30:10.000Z
|
sqlpuzzle/_queries/selectoptions.py
|
Dundee/python-sqlpuzzle
|
260524922a0645c9bf94a9779195f93ef2c78cba
|
[
"MIT"
] | 7
|
2015-03-23T14:34:28.000Z
|
2022-02-21T12:36:01.000Z
|
sqlpuzzle/_queries/selectoptions.py
|
Dundee/python-sqlpuzzle
|
260524922a0645c9bf94a9779195f93ef2c78cba
|
[
"MIT"
] | 4
|
2018-11-28T21:59:27.000Z
|
2020-01-05T01:50:08.000Z
|
from sqlpuzzle._common import Object
from sqlpuzzle._queries.options import Options
__all__ = ()
| 27.085714
| 71
| 0.54782
|
d0fa3582daacb7e7b72dfa9ecf2ba23f58d16510
| 1,199
|
py
|
Python
|
Code/RowVariable.py
|
akankshadiwedy/t2wml
|
02f60611eec19d10a92fd2cb06f07339cd2cb269
|
[
"MIT"
] | null | null | null |
Code/RowVariable.py
|
akankshadiwedy/t2wml
|
02f60611eec19d10a92fd2cb06f07339cd2cb269
|
[
"MIT"
] | null | null | null |
Code/RowVariable.py
|
akankshadiwedy/t2wml
|
02f60611eec19d10a92fd2cb06f07339cd2cb269
|
[
"MIT"
] | null | null | null |
from Code.utility_functions import get_excel_row_index
| 29.243902
| 98
| 0.574646
|
d0fc29ac4209ca758dbc0af3c328c5e20828a2e9
| 9,189
|
py
|
Python
|
cvprac_abstraction/cvpChangeControl.py
|
titom73/arista-cvp-scripts
|
64f7ffa28d2483b3dd357e9b6c671725a51661b4
|
[
"BSD-3-Clause"
] | 2
|
2019-08-20T07:35:08.000Z
|
2019-10-01T00:52:14.000Z
|
cvprac_abstraction/cvpChangeControl.py
|
inetsix/arista-cvp-scripts
|
64f7ffa28d2483b3dd357e9b6c671725a51661b4
|
[
"BSD-3-Clause"
] | 2
|
2019-05-07T14:36:38.000Z
|
2019-07-26T05:56:51.000Z
|
cvprac_abstraction/cvpChangeControl.py
|
titom73/arista-cvp-scripts
|
64f7ffa28d2483b3dd357e9b6c671725a51661b4
|
[
"BSD-3-Clause"
] | 1
|
2021-05-08T20:15:36.000Z
|
2021-05-08T20:15:36.000Z
|
import logging
from datetime import datetime
from datetime import timedelta
from cvprac.cvp_client_errors import CvpApiError
| 34.939163
| 114
| 0.575144
|
d0fd59da3a0fd9fe3acc37b2d63f3055243a7e1f
| 1,289
|
py
|
Python
|
app/controllers/stores/update.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
app/controllers/stores/update.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
app/controllers/stores/update.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
from flask import current_app, request
from http import HTTPStatus
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm import Session
from app.models.stores.store_model import StoreModel
from app.decorators import verify_payload, validator
| 25.78
| 68
| 0.59969
|
190383e67ecd4d49f6a52f77fa42e3e6a18c204f
| 4,197
|
py
|
Python
|
misc/openstack-dev.py
|
tnoff/OpenDerp
|
44f1e5c2027a2949b785941044a8503a34423228
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
misc/openstack-dev.py
|
tnoff/OpenDerp
|
44f1e5c2027a2949b785941044a8503a34423228
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
misc/openstack-dev.py
|
tnoff/OpenDerp
|
44f1e5c2027a2949b785941044a8503a34423228
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
#!/usr/bin/env python
import argparse
import boto
from boto.s3 import connection as s3_connection
from cinderclient.v1 import client as cinder_v1
import code
from novaclient.v1_1 import client as nova_v1
from novaclient.shell import OpenStackComputeShell as open_shell
from glanceclient import Client as glance_client
from keystoneclient.v2_0 import client as key_v2
from neutronclient.v2_0 import client as neutron_v2
import os
import swiftclient
import sys
from urlparse import urlparse
if __name__ == '__main__':
main()
| 41.97
| 84
| 0.571122
|
190468c6a6a1d847c7290e48226df8d86d0b3049
| 330
|
py
|
Python
|
src/area51/nowd/decorator.py
|
thatcr/knowed
|
025b66e4b660ed4339d3156ebca19065bb65b630
|
[
"MIT"
] | null | null | null |
src/area51/nowd/decorator.py
|
thatcr/knowed
|
025b66e4b660ed4339d3156ebca19065bb65b630
|
[
"MIT"
] | null | null | null |
src/area51/nowd/decorator.py
|
thatcr/knowed
|
025b66e4b660ed4339d3156ebca19065bb65b630
|
[
"MIT"
] | null | null | null |
import inspect
from .descriptors import Descriptor
from .arguments import ArgsDescriptor
| 27.5
| 74
| 0.718182
|
1904779111ad03b3fcf85f5ea88241c74cfe55ac
| 200
|
py
|
Python
|
tushare/bond/bonds.py
|
li-yong/tushare
|
26da8129fb770e26128b9c2cebc7ef72c9491243
|
[
"BSD-3-Clause"
] | 12,490
|
2015-01-11T09:49:07.000Z
|
2022-03-31T15:03:16.000Z
|
tushare/bond/bonds.py
|
li-yong/tushare
|
26da8129fb770e26128b9c2cebc7ef72c9491243
|
[
"BSD-3-Clause"
] | 1,532
|
2015-02-05T11:20:59.000Z
|
2022-03-29T13:30:26.000Z
|
tushare/bond/bonds.py
|
li-yong/tushare
|
26da8129fb770e26128b9c2cebc7ef72c9491243
|
[
"BSD-3-Clause"
] | 4,867
|
2015-01-07T08:18:09.000Z
|
2022-03-31T07:03:53.000Z
|
# -*- coding:utf-8 -*-
"""
Created on 2017/10/01
@author: Jimmy Liu
@group : waditu
@contact: jimmysoa@sina.cn
"""
if __name__ == '__main__':
pass
| 11.764706
| 26
| 0.635
|