commit
stringlengths
40
40
subject
stringlengths
1
1.49k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
new_contents
stringlengths
1
29.8k
old_contents
stringlengths
0
9.9k
lang
stringclasses
3 values
proba
float64
0
1
912b1e33eff873a07ca089c69fef51bf05e79051
Add User and Group to admin custom site
ideas/admin.py
ideas/admin.py
from .models import Idea, Outstanding from django.contrib import admin from django.contrib.admin import AdminSite from django.contrib.auth.models import User, Group class MyAdminSite(AdminSite): site_header = "Hackatrix Backend" site_title = "Hackatrix Backend" index_title = "Administrator" class IdeaAdmin(admin.ModelAdmin): list_display = ('name', 'votes', 'description', 'register', 'is_active') def save_model(self, request, obj, form, change): if getattr(obj, 'register', None) is None: obj.register = request.user obj.save() class OutstandingAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'comment', 'register') def save_model(self, request, obj, form, change): if getattr(obj, 'register', None) is None: obj.register = request.user obj.save() admin_site = MyAdminSite(name='myadmin') admin_site.register(User) admin_site.register(Group) admin_site.register(Idea, IdeaAdmin) admin_site.register(Outstanding, OutstandingAdmin)
from .models import Idea, Outstanding from django.contrib import admin from django.contrib.admin import AdminSite class MyAdminSite(AdminSite): site_header = "Hackatrix Backend" site_title = "Hackatrix Backend" index_title = "Administrator" class IdeaAdmin(admin.ModelAdmin): list_display = ('name', 'votes', 'description', 'register', 'is_active') def save_model(self, request, obj, form, change): if getattr(obj, 'register', None) is None: obj.register = request.user obj.save() class OutstandingAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'comment', 'register') def save_model(self, request, obj, form, change): if getattr(obj, 'register', None) is None: obj.register = request.user obj.save() admin_site = MyAdminSite(name='myadmin') admin_site.register(Idea, IdeaAdmin) admin_site.register(Outstanding, OutstandingAdmin)
Python
0
c0fc14f3f9f33e20650113803f8a0a81dd49f3ec
generate result.json
example_config.py
example_config.py
import os import logging from apscheduler.triggers.cron import CronTrigger if __name__ == "__main__": raise SystemExit("Not meant to be run directly!") def _rsync_cmd(dest): cmd = ("rsync --delete-delay --recursive --times --stats " "'{output}/' '{dest}'") return cmd.format(dest=dest, output="{output}") # configure the logger logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s') # make sure git does not block giving pw prompts, git 2.3+ only os.environ["GIT_TERMINAL_PROMPT"] = "0" # needs to be a byte like object GITHUB_SECRET = b"changetosomethingrandomlong" RUNNERS = { # unique name of the runner, avoid spaces and other obscure characters "website_master": { # directory where building takes place, will be created if not there # multiple runners may point to the same one "working_directory": "/tmp/test", # upstream url of the repository which contains the website # use https://git::@github.com... to avoid pw prompts and instead fail # (e.g. if github gives errornously 401 temporarily, git would block) # os.environ["GIT_TERMINAL_PROMPT"] = "0" does the same but git 2.3+only "clone_url": "https://git::@github.com/IEEE-SB-Passau/pelican-ieee-passau.git", # branch which will be built "git_branch": "master", # command which installs the generated directory tree to it's final # destination (the wwwroot) e.g. rsync. {output} will be replaced by # the path to the generator output "final_install_command": _rsync_cmd("/tmp/testroot"), # command which builds the website # important: specify {output} as output path of the generator "build_command": ('tox -e pelican --result-json "{output}/result.json" ' '--recreate -- -d --output "{output}"'), # will be added to env when running build_command "build_env": {"PELICAN_SITEURL": "//apu:800"} } } # define crojobs as sequence of (runner, trigger) pairs, for cron triggers see # http://apscheduler.readthedocs.io/en/latest/modules/triggers/cron.html SCHEDULED_BUILD_JOBS = [ ("website_master", CronTrigger(minute="*/5")) ]
import os import logging from apscheduler.triggers.cron import CronTrigger if __name__ == "__main__": raise SystemExit("Not meant to be run directly!") def _rsync_cmd(dest): cmd = ("rsync --delete-delay --recursive --times --stats " "'{output}/' '{dest}'") return cmd.format(dest=dest, output="{output}") # configure the logger logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s') # make sure git does not block giving pw prompts, git 2.3+ only os.environ["GIT_TERMINAL_PROMPT"] = "0" # needs to be a byte like object GITHUB_SECRET = b"changetosomethingrandomlong" RUNNERS = { # unique name of the runner, avoid spaces and other obscure characters "website_master": { # directory where building takes place, will be created if not there # multiple runners may point to the same one "working_directory": "/tmp/test", # upstream url of the repository which contains the website # use https://git::@github.com... to avoid pw prompts and instead fail # (e.g. if github gives errornously 401 temporarily, git would block) # os.environ["GIT_TERMINAL_PROMPT"] = "0" does the same but git 2.3+only "clone_url": "https://git::@github.com/IEEE-SB-Passau/pelican-ieee-passau.git", # branch which will be built "git_branch": "master", # command which installs the generated directory tree to it's final # destination (the wwwroot) e.g. rsync. {output} will be replaced by # the path to the generator output "final_install_command": _rsync_cmd("/tmp/testroot"), # command which builds the website # important: specify {output} as output path of the generator "build_command": 'tox -e pelican --recreate -- -d --output "{output}"', # will be added to env when running build_command "build_env": {"PELICAN_SITEURL": "//apu:800"} } } # define crojobs as sequence of (runner, trigger) pairs, for cron triggers see # http://apscheduler.readthedocs.io/en/latest/modules/triggers/cron.html SCHEDULED_BUILD_JOBS = [ ("website_master", CronTrigger(minute="*/5")) ]
Python
0.998579
1db8627731a2e23693cd9fe38a455956b783c0cd
Update NoticiasTecnologicas.py
03-RSSTelegram/NoticiasTecnologicas.py
03-RSSTelegram/NoticiasTecnologicas.py
#!/usr/bin/env python3 # -*- coding: iso-8859-1 -*- """ Ejemplo: Leer Noticias RSS en Telegram (II) Libreria: pyTelegramBotAPI 1.4.2 [ok] Libreria: pyTelegramBotAPI 2.0 [ok] Python: 3.5.1 """ import telebot import sys import feedparser url = "http://blog.bricogeek.com/noticias/arduino/rss/" rss = feedparser.parse(url) servicio="Servicio del Bot de Telegram" inicio_servicio="Iniciando..."+servicio print (inicio_servicio), TOKEN = 'AQUÍ EL NUMERO DE VUESTRO TOKEN' #Ponemos nuestro TOKEN generado con el @BotFather telegram = telebot.TeleBot(TOKEN) # Combinamos la declaración del Token con la función de la API def listener(messages): for m in messages: chatID = m.chat.id if m.content_type == 'text': for noticia in rss.entries: evento=noticia.title+"\n"+noticia.link telegram.send_message(chatID, evento) try: telegram.get_me() # Comprobar el API. Devuelve un objeto print ("-> OK") print ("Token: "+TOKEN) print ("- Presionar Ctrl+C para parar el servicio...") telegram.set_update_listener(listener) except Exception as e: print ("-> ERROR") print (e) sys.exit(0) telegram.polling(none_stop=False) # Interval setup. Sleep 3 secs between request new message. telegram.polling(interval=3) telegram.polling() try: while True: pass except KeyboardInterrupt: print ("Programa Finalizado...") sys.exit(0)
#!/usr/bin/env python3 # -*- coding: iso-8859-1 -*- """ Ejemplo: Leer Noticias RSS en Telegram (III) Libreria: pyTelegramBotAPI 1.4.2 [ok] Libreria: pyTelegramBotAPI 2.0 [ok] Python: 3.5.1 """ import telebot import sys import feedparser url = "http://blog.bricogeek.com/noticias/arduino/rss/" rss = feedparser.parse(url) servicio="Servicio del Bot de Telegram" inicio_servicio="Iniciando..."+servicio print (inicio_servicio), TOKEN = 'AQUÍ EL NUMERO DE VUESTRO TOKEN' #Ponemos nuestro TOKEN generado con el @BotFather telegram = telebot.TeleBot(TOKEN) # Combinamos la declaración del Token con la función de la API def listener(messages): for m in messages: chatID = m.chat.id if m.content_type == 'text': for noticia in rss.entries: evento=noticia.title+"\n"+noticia.link telegram.send_message(chatID, evento) try: telegram.get_me() # Comprobar el API. Devuelve un objeto print ("-> OK") print ("Token: "+TOKEN) print ("- Presionar Ctrl+C para parar el servicio...") telegram.set_update_listener(listener) except Exception as e: print ("-> ERROR") print (e) sys.exit(0) telegram.polling(none_stop=False) # Interval setup. Sleep 3 secs between request new message. telegram.polling(interval=3) telegram.polling() try: while True: pass except KeyboardInterrupt: print ("Programa Finalizado...") sys.exit(0)
Python
0
f68a10fec5d4dbc743c5d84f8b26d122e81b26e4
Use standard urlencode() for encoding URLs
derpibooru/request.py
derpibooru/request.py
# Copyright (c) 2014, Joshua Stone # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from requests import get, codes from sys import version_info from .image import Image if version_info < (3, 0): from urllib import urlencode else: from urllib.parse import urlencode def url(parameters): p = {} for key, value in parameters.items(): if key == "key": if value: p["key"] = value elif key == "q": p["q"] = ",".join(value) if value else "*" else: p[key] = value url = "https://derpiboo.ru/search.json?{}".format(urlencode(p)) return url def request(parameters): p = parameters p.update({ "page": 1, "perpage": 50}) request = get(url(p)) while request.status_code == codes.ok: for image in request.json()["search"]: yield Image(image) parameters["page"] += 1 request = get(url(p)) yield None
# Copyright (c) 2014, Joshua Stone # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from requests import get, codes from sys import version_info from .image import Image if version_info < (3, 0): from urllib import quote_plus else: from urllib.parse import quote_plus def join_tags(tags): q = quote_plus(",".join(tags)) return q def join_parameters(parameters): p = ["{}={}".format(k, v) for k, v in parameters.items()] return p def url(parameters): url, p = "https://derpiboo.ru/search.json?", {} for key, value in parameters.items(): if key == "key": if value: p["key"] = value elif key == "q": p["q"] = join_tags(value) if value else "*" else: p[key] = value url += "&".join(join_parameters(p)) return url def request(parameters): p = parameters p.update({ "page": 1, "perpage": 50}) request = get(url(p)) while request.status_code == codes.ok: for image in request.json()["search"]: yield Image(image) parameters["page"] += 1 request = get(url(p)) yield None
Python
0.000006
8188008cf1bd41c1cbe0452ff635dd0319dfecd9
Add trailing slash to url
derrida/books/urls.py
derrida/books/urls.py
from django.conf.urls import url from django.contrib.admin.views.decorators import staff_member_required from derrida.books.views import ( PublisherAutocomplete, LanguageAutocomplete, InstanceDetailView, InstanceListView ) urlpatterns = [ # TODO: come up with cleaner url patterns/names for autocomplete views url(r'^publishers/autocomplete/$', staff_member_required(PublisherAutocomplete.as_view()), name='publisher-autocomplete'), url(r'^languages/autocomplete/$', staff_member_required(LanguageAutocomplete.as_view()), name='language-autocomplete'), url(r'^(?P<pk>\d+)/$', InstanceDetailView.as_view(), name='detail'), url(r'^$', InstanceListView.as_view(), name='list'), ]
from django.conf.urls import url from django.contrib.admin.views.decorators import staff_member_required from derrida.books.views import ( PublisherAutocomplete, LanguageAutocomplete, InstanceDetailView, InstanceListView ) urlpatterns = [ # TODO: come up with cleaner url patterns/names for autocomplete views url(r'^publishers/autocomplete/$', staff_member_required(PublisherAutocomplete.as_view()), name='publisher-autocomplete'), url(r'^languages/autocomplete/$', staff_member_required(LanguageAutocomplete.as_view()), name='language-autocomplete'), url(r'^(?P<pk>\d+)$', InstanceDetailView.as_view(), name='detail'), url(r'^$', InstanceListView.as_view(), name='list'), ]
Python
0.000004
66568b681307835aa36da291581eea7e29d22d54
Fix BUG in backfill
backfill.py
backfill.py
import titanic import requests import json import time ''' Status new updated building running done ''' server = 'http://0.0.0.0:8314/' auth = None # auth = ('<username>@mozilla.com', '<password>') def updateJob(jobID, branch, buildername, revision, delta=7): revList, buildList = titanic.runAnalysis( branch, buildername, revision, delta) print revList print buildList if not (revList or buildList): return 401 buildRevs = ','.join(buildList) revs = ','.join(revList) data = {'id': jobID, 'buildrevs': buildRevs, 'analyzerevs': revs} headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} r = requests.post(server + 'update', data=json.dumps(data), headers=headers) print r.status_code return r.status_code def updateStatus(jobID, status): data = {'id': jobID, 'status': status} headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} r = requests.post(server + 'update_status', data=json.dumps(data), headers=headers) def processJob(job): if job['status'] == 'error': return if job['status'] == 'new': print 'New Job...' updateJob(job['id'], job['branch'], job['buildername'], job['revision']) updateStatus(job['id'], 'updated') print 'Updated Job...' if job['status'] == 'updated': if not (job['buildrevs'] == ''): buildList = job['buildrevs'].split(',') for rev in buildList: print rev titanic.triggerBuild(job['branch'], job['buildername'], rev, auth) updateStatus(job['id'], 'building') print 'Building for Job...' if job['status'] == 'building': print 'Builds are triggered!' buildFlag = 1 revList = job['analyzerevs'].split(',') for rev in revList: if (titanic.isBuildPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isBuildRunning(job['branch'], job['buildername'], rev, auth)): buildFlag = 0 continue elif not titanic.isBuildSuccessful(job['branch'], job['buildername'], rev): print 'Error: For ' + rev + ' ' + job['buildername'] updateStatus(job['id'], 'error') buildFlag = 0 continue if buildFlag: print 'Builds are done!' for rev in revList: titanic.triggerJob(job['branch'], job['buildername'], rev, auth) if not (titanic.isJobPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isJobRunning(job['branch'], job['buildername'], rev, auth)): updateStatus(job['id'], 'error') updateStatus(job['id'], 'running') print 'Running Jobs...' if job['status'] == 'running': doneFlag = 1 revList = job['analyzerevs'].split(',') for rev in revList: if (titanic.isJobPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isJobRunning(job['branch'], job['buildername'], rev, auth)): doneFlag = 0 if doneFlag: updateStatus(job['id'], 'done') print 'Done' def processCron(): jobsJSON = requests.get(server + 'active_jobs') jobs = json.loads(jobsJSON.text) for job in jobs['jobs']: processJob(job) # Schedule backfill.py to run every few minutes! if __name__ == '__main__': processCron()
import titanic import requests import json import time ''' Status new updated building running done ''' server = 'http://0.0.0.0:8314/' auth = None # auth = ('<username>@mozilla.com', '<password>') def updateJob(jobID, branch, buildername, revision, delta=7): revList, buildList = titanic.runAnalysis( branch, buildername, revision, delta) print revList print buildList if not (revList or buildList): return 401 buildRevs = ','.join(buildList) revs = ','.join(revList) data = {'id': jobID, 'buildrevs': buildRevs, 'analyzerevs': revs} headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} r = requests.post(server + 'update', data=json.dumps(data), headers=headers) print r.status_code return r.status_code def updateStatus(jobID, status): data = {'id': jobID, 'status': status} headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} r = requests.post(server + 'update_status', data=json.dumps(data), headers=headers) def processJob(job): if job['status'] == 'error': return if job['status'] == 'new': print 'New Job...' updateJob(job['id'], job['branch'], job['buildername'], job['revision']) updateStatus(job['id'], 'updated') print 'Updated Job...' if job['status'] == 'updated': if not (job['buildrevs'] == ''): buildList = job['buildrevs'].split(',') for rev in buildList: print rev titanic.triggerBuild(job['branch'], job['buildername'], rev, auth) updateStatus(job['id'], 'building') print 'Building for Job...' if job['status'] == 'building': print 'Builds are triggered!' buildFlag = 1 revList = job['analyzerevs'].split(',') for rev in revList: if (titanic.isBuildPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isBuildRunning(job['branch'], job['buildername'], rev, auth)): buildFlag = 0 continue elif not titanic.isBuildSuccessful(job['branch'], job['buildername'], rev, auth): print 'Error: For ' + rev + ' ' + job['buildername'] updateStatus(job['id'], 'error') buildFlag = 0 continue if buildFlag: print 'Builds are done!' for rev in revList: titanic.triggerJob(job['branch'], job['buildername'], rev, auth) if not (titanic.isJobPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isJobRunning(job['branch'], job['buildername'], rev, auth)): updateStatus(job['id'], 'error') updateStatus(job['id'], 'running') print 'Running Jobs...' if job['status'] == 'running': doneFlag = 1 revList = job['analyzerevs'].split(',') for rev in revList: if (titanic.isJobPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isJobRunning(job['branch'], job['buildername'], rev, auth)): doneFlag = 0 if doneFlag: updateStatus(job['id'], 'done') print 'Done' def processCron(): jobsJSON = requests.get(server + 'active_jobs') jobs = json.loads(jobsJSON.text) for job in jobs['jobs']: processJob(job) # Schedule backfill.py to run every few minutes! if __name__ == '__main__': processCron()
Python
0.000073
76e73fd9502dbd6f179dcea2fb4fd4d3ef6c913c
make fabfile working correctly
deploy/fabfile.py
deploy/fabfile.py
from fabric.api import sudo, cd, task, prompt, run from fabric.contrib import files from fabtools import require, python, supervisor # Variables newebe_dir = "/home/newebe/newebe" newebe_process = newebe_user = "newebe" newebe_user_dir = "/home/newebe/" python_exe = newebe_dir + "/virtualenv/bin/python" newebe_exe = newebe_dir + "/newebe_server.py" # Helpers import random import string def random_string(n): """Create n length random string""" chars = string.letters + string.digits code = ''.join([random.choice(chars) for i in range(n)]) return code def newebedo(cmd): """Run a commande as a newebe user""" sudo(cmd, user=newebe_user) def delete_if_exists(filename): """Delete given file if it already exists""" if files.exists(filename): newebedo("rm -rf %s" % filename) # Install tasks @task() def setup(): """Deploy the whole newebe stack""" install_deb_packages() create_user() get_source() install_python_dependencies() sync_db() build_configuration_file() build_certificates() setup_supervisord() set_supervisord_config() @task() def install_deb_packages(): """Install required deb packages""" require.deb.packages([ 'python', 'python-setuptools', 'python-pip', 'python-pycurl', 'python-imaging', 'couchdb', 'git', 'openssl' ]) @task() def create_user(): """Create newebe user""" require.user(newebe_user, newebe_user_dir) @task() def get_source(): """Get source from master branch of official git repo""" with cd(newebe_user_dir): delete_if_exists("newebe") newebedo("git clone git://github.com/gelnior/newebe.git") @task() def install_python_dependencies(): """Install Python dependencies.""" require.python.virtualenv(newebe_dir + "/virtualenv", use_sudo=True, user=newebe_user) with python.virtualenv(newebe_dir + "/virtualenv"): newebedo("pip install --use-mirrors -r %s/deploy/requirements.txt" % \ newebe_dir) @task() def sync_db(): """Build required Couchdb views""" with python.virtualenv(newebe_dir + "/virtualenv"), cd(newebe_dir): newebedo("python syncdb.py") @task() def build_configuration_file(): """Build default configuration file """ timezone = prompt(""" Which time zone do you want for your database (default is Europe/Paris, Check Newebe wiki for timezone list) ? \n \n """) if not timezone: timezone = "Europe/Paris" with cd(newebe_dir): delete_if_exists('local_settings.py') newebedo('echo "TORNADO_PORT = 8000" >> local_settings.py') newebedo('echo "DEBUG = False" >> local_settings.py') newebedo("echo 'COUCHDB_DB_NAME = \"newebe\"' >> local_settings.py") newebedo("echo 'TIMEZONE = \"%s\"' >> local_settings.py" % timezone) newebedo("echo 'COOKIE_KEY = \"%s\"' >> local_settings.py" % \ random_string(42)) @task() def build_certificates(): """Build HTTPS certificates""" with cd(newebe_dir): delete_if_exists('server.key') delete_if_exists('server.crt') newebedo("openssl genrsa -out ./server.key 1024") newebedo("openssl req -new -x509 -days 3650 -key ./server.key -out\ ./server.crt") @task() def setup_supervisord(): """Install python daemon manager, supervisord""" require.deb.package("python-meld3") python.install("meld3==0.6.7", use_sudo=True) @task() def set_supervisord_config(): """Configure Newebe runner for supervisord""" require.supervisor.process(newebe_process, command='%s %s' % (python_exe, newebe_exe), directory=newebe_dir, user=newebe_user ) supervisor.restart_process(newebe_process) # Update tasks @task() def update(): """Update source code, build require couchdb views then restart newebe""" update_source() sync_db() restart_newebe() @task() def update_source(): """Simple git pull inside newebe directory""" with cd(newebe_dir): newebedo("git pull") @task() def restart_newebe(): """Restart newebe surpervisord process""" supervisor.restart_process(newebe_process)
from fabric.api import sudo, cd, task, settings from fabtools import require, python, supervisor newebe_dir = "/home/newebe/newebe" newebe_process = newebe_user = "newebe" newebe_user_dir = "/home/newebe/" def newebedo(cmd): sudo(cmd, user=newebe_user) @task def setup(timezone="Europe/Paris"): install_deb_packages() create_user() get_source() install_python_dependencies() sync_db() build_configuration_file(timezone) set_supervisord_config() def install_deb_packages(): require.deb.packages([ 'python', 'python-setuptools', 'python-pip', 'python-pycurl', 'python-imaging', 'couchdb', 'git', 'openssl' ]) def create_user(): require.user(newebe_user, newebe_user_dir) def get_source(): with cd(newebe_user_dir): newebedo("git clone git://github.com/gelnior/newebe.git") def install_python_dependencies(): require.python.virtualenv(newebe_dir + "/virtualenv", use_sudo=True, user=newebe_user) with python.virtualenv(newebe_dir + "/virtualenv"): newebedo("pip install --use-mirrors -r %s/deploy/requirements.txt" % \ newebe_dir) @task def sync_db(): with python.virtualenv(newebe_dir + "/virtualenv"), cd(newebe_dir): newebedo("python syncdb.py") def build_configuration_file(timezone="Europe/Paris"): with cd(newebe_dir): newebedo('echo "TIMEZONE = \"%s\"" >> local_settings.py' % timezone) cmd = 'echo "COOKIE_KEY = \"`< /dev/urandom tr -dc A-Za-z0-9_ | head' cmd += ' -c50`\"" >> local_settings.py' newebedo(cmd) @task def set_supervisord_config(): require.deb.package("python-meld3") python.install("meld3==0.6.7", use_sudo=True) python_exe = newebe_dir + "/virtualenv/bin/python" require.supervisor.process(newebe_process, command='%s /home/newebe/newebe/newebe_server.py' % python_exe, directory=newebe_dir, user=newebe_user ) supervisor.restart_process(newebe_process) @task def update(): update_source() sync_db() restart_newebe() def update_source(): with cd(newebe_dir): newebedo("git pull") @task def restart_newebe(): supervisor.start_process(newebe_process)
Python
0.000001
c4ea3ce306d4464ac0bc80286a60689972c7bc63
Test isolation.
agon/tests.py
agon/tests.py
from threading import Thread from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.test import TestCase from django.contrib.auth.models import User from agon.models import award_points, points_awarded class PointsTestCase(TestCase): def setUp(self): self.users = [ User.objects.create_user("user_%d" % i, "user_%d@example.com" % i, str(i)) for i in xrange(1) ] def tearDown(self): if hasattr(settings, "AGON_POINT_VALUES"): del settings.AGON_POINT_VALUES def setup_points(self, value): settings.AGON_POINT_VALUES = value def test_improperly_configured(self): user = self.users[0] try: award_points(user, "JOINED_SITE") except ImproperlyConfigured, e: self.assertEqual(str(e), "You must define 'AGON_POINT_VALUES' in settings") self.setup_points({}) try: award_points(user, "JOINED_SITE") except ImproperlyConfigured, e: self.assertEqual(str(e), "You must define a point value for 'JOINED_SITE'") def test_simple_user_point_award(self): self.setup_points({ "JOINED_SITE": 1, }) user = self.users[0] award_points(user, "JOINED_SITE") self.assertEqual(points_awarded(user), 1) def test_concurrent_award(self): user = self.users[0] self.setup_points({ "TEST_1": 10, }) return def run(): award_points(user, "TEST_1") threads = [] for i in xrange(5): t = Thread(target=run) threads.append(t) t.start() for t in threads: t.join() self.assertEqual(points_awarded(user), 50)
from threading import Thread from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.test import TestCase from django.contrib.auth.models import User from agon.models import award_points, points_awarded class PointsTestCase(TestCase): def setUp(self): self.users = [ User.objects.create_user("user_%d" % i, "user_%d@example.com" % i, str(i)) for i in xrange(1) ] def setup_points(self, value): settings.AGON_POINT_VALUES = value def test_improperly_configured(self): user = self.users[0] try: award_points(user, "JOINED_SITE") except ImproperlyConfigured, e: self.assertEqual(str(e), "You must define 'AGON_POINT_VALUES' in settings") self.setup_points({}) try: award_points(user, "JOINED_SITE") except ImproperlyConfigured, e: self.assertEqual(str(e), "You must define a point value for 'JOINED_SITE'") def test_simple_user_point_award(self): self.setup_points({ "JOINED_SITE": 1, }) user = self.users[0] award_points(user, "JOINED_SITE") self.assertEqual(points_awarded(user), 1) def test_concurrent_award(self): user = self.users[0] return def run(): award_points(user, "TEST_1") threads = [] for i in xrange(5): t = Thread(target=run) threads.append(t) t.start() for t in threads: t.join() self.assertEqual(points_awarded(user), 50)
Python
0
ac084c574b58771bd240af3fa4b4a000fc742229
update to handle different kinds of files
projects/allan_cont/showlog_long.py
projects/allan_cont/showlog_long.py
import numpy as np import pylab as pl from ourgui import openFile def plotline(maxx, minx=0, value=0, style="k-", plotfunc=pl.plot): plotfunc([minx, maxx], [value, value], style) def quickplot(filename): alldata = np.loadtxt(filename, comments="#", delimiter=",") datashape = np.shape(alldata) try: col = np.shape(alldata)[1] data = alldata[:, col-1] except (IndexError): data = alldata maxdata, mindata, stddata, meandata = np.max(data), np.min(data), np.std(data), np.mean(data) n = len(data) pl.subplot(211) pl.plot(data,'k.') plotline(n, value=maxdata, style="g-") plotline(n, value=mindata, style="r-") plotline(n, value=meandata, style="k-") plotline(n, value=(meandata+stddata), style="b-") plotline(n, value=(meandata-stddata), style="b-") pl.xlabel('data points') pl.ylabel('Frequency (Hz)') pl.title("Frequency: %f (+- %f) Hz" %(meandata, stddata)) pl.subplot(212) n, bins, patches = pl.hist(data-meandata, 100, normed=1, facecolor='green', alpha=0.75) pl.xlabel('Frequency deviation from mean (Hz)') pl.ylabel('distribution') pl.show() filename = openFile("log") if filename: quickplot(filename)
import numpy as np import pylab as pl from ourgui import openFile def plotline(maxx, minx=0, value=0, style="k-", plotfunc=pl.plot): plotfunc([minx, maxx], [value, value], style) def quickplot(filename): data = np.loadtxt(filename, comments="#") maxdata, mindata, stddata, meandata = np.max(data), np.min(data), np.std(data), np.mean(data) n = len(data) pl.subplot(211) pl.plot(data,'k.') plotline(n, value=maxdata, style="g-") plotline(n, value=mindata, style="r-") plotline(n, value=meandata, style="k-") plotline(n, value=(meandata+stddata), style="b-") plotline(n, value=(meandata-stddata), style="b-") pl.xlabel('data points') pl.ylabel('Frequency (Hz)') pl.title("Frequency: %f (+- %f) Hz" %(meandata, stddata)) pl.subplot(212) n, bins, patches = pl.hist(data-meandata, 100, normed=1, facecolor='green', alpha=0.75) pl.xlabel('Frequency deviation from mean (Hz)') pl.ylabel('distribution') pl.show() filename = openFile("log") if filename: quickplot(filename)
Python
0
c206936120519912762f30eb269f1733b5593bf8
fix window edges
contrib/spryte/balls.py
contrib/spryte/balls.py
import random from pyglet import window, clock, gl, event from pyglet.window import key import spryte win = window.Window(vsync=False) fps = clock.ClockDisplay(color=(1, 1, 1, 1)) layer = spryte.Layer() balls = [] for i in range(200): balls.append(spryte.Sprite('ball.png', layer, (win.width - 64) * random.random(), (win.height - 64) * random.random(), dx=-50 + 100*random.random(), dy=-50 + 100*random.random(), dead=False)) def animate(dt): for ball in balls: ball.x += ball.dx * dt ball.y += ball.dy * dt if ball.x + ball.width > win.width or ball.x < 0: ball.dx *= -1 if ball.y + ball.height > win.height or ball.y < 0: ball.dy *= -1 clock.schedule(animate) layer2 = spryte.Layer() car = spryte.Sprite('car.png', layer2, win.width/2, win.height/2) keyboard = key.KeyStateHandler() win.push_handlers(keyboard) def animate(dt): car.x += (keyboard[key.RIGHT] - keyboard[key.LEFT]) * 200 * dt car.y += (keyboard[key.UP] - keyboard[key.DOWN]) * 200 * dt for i, ball in enumerate(balls): if ball.intersects(car): if ball.width > ball.image.width * 2: # pop! balls[i].delete() balls[i] = spryte.Sprite('ball.png', layer, win.width * random.random(), win.height * random.random(), dx=-50 + 100*random.random(), dy=-50 + 100*random.random()) else: ball.width += 1 ball.height += 1 clock.schedule(animate) while not win.has_exit: clock.tick() win.dispatch_events() win.clear() gl.glPushAttrib(gl.GL_ENABLE_BIT) gl.glEnable(gl.GL_BLEND) gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA) layer.draw() layer2.draw() gl.glPopAttrib() fps.draw() win.flip()
import random from pyglet import window, clock, gl, event from pyglet.window import key import spryte win = window.Window(vsync=False) fps = clock.ClockDisplay(color=(1, 1, 1, 1)) layer = spryte.Layer() balls = [] for i in range(200): balls.append(spryte.Sprite('ball.png', layer, win.width * random.random(), win.height * random.random(), dx=-50 + 100*random.random(), dy=-50 + 100*random.random(), dead=False)) def animate(dt): for ball in balls: ball.x += ball.dx * dt ball.y += ball.dy * dt if ball.x > win.width or ball.x < 0: ball.dx *= -1 if ball.y > win.height or ball.y < 0: ball.dy *= -1 clock.schedule(animate) layer2 = spryte.Layer() car = spryte.Sprite('car.png', layer2, win.width/2, win.height/2) keyboard = key.KeyStateHandler() win.push_handlers(keyboard) def animate(dt): car.x += (keyboard[key.RIGHT] - keyboard[key.LEFT]) * 200 * dt car.y += (keyboard[key.UP] - keyboard[key.DOWN]) * 200 * dt for i, ball in enumerate(balls): if ball.intersects(car): if ball.width > ball.image.width * 2: # pop! balls[i].delete() balls[i] = spryte.Sprite('ball.png', layer, win.width * random.random(), win.height * random.random(), dx=-50 + 100*random.random(), dy=-50 + 100*random.random()) else: ball.width += 1 ball.height += 1 clock.schedule(animate) while not win.has_exit: clock.tick() win.dispatch_events() win.clear() gl.glPushAttrib(gl.GL_ENABLE_BIT) gl.glEnable(gl.GL_BLEND) gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA) layer.draw() layer2.draw() gl.glPopAttrib() fps.draw() win.flip()
Python
0.000001
b77cb1ac7524e76fd1f29ee6c8e214d12d04226f
Improve variable names.
scripts/gen_regex.py
scripts/gen_regex.py
import unicodedata from ftfy import chardata import pathlib from pkg_resources import resource_filename CATEGORIES = [unicodedata.category(chr(i)) for i in range(0x110000)] DATA_PATH = pathlib.Path(resource_filename('wordfreq', 'data')) def func_to_regex(accept_func): """ Given a function that returns True or False for a numerical codepoint, return a regex character class accepting the characters resulting in True. Ranges separated only by unassigned characters are merged for efficiency. """ # Where the last range would end if it also included unassigned codepoints. # If we need to add a codepoint right after this point, we extend the # range; otherwise we start a new one. tentative_end = None ranges = [] for codepoint, category in enumerate(CATEGORIES): if accept_func(codepoint): if tentative_end == codepoint - 1: ranges[-1][1] = codepoint else: ranges.append([codepoint, codepoint]) tentative_end = codepoint elif category == 'Cn' and tentative_end == codepoint - 1: tentative_end = codepoint return '[%s]' % ''.join(chr(r[0]) + '-' + chr(r[1]) for r in ranges) def cache_regex_from_func(filename, func): """ Generates a regex from a function that accepts a single unicode character, and caches it in the data path at filename. """ with (DATA_PATH / filename).open(mode='w') as file: file.write(func_to_regex(func)) def _is_emoji_codepoint(i): """ Report whether a numerical codepoint is (likely) an emoji: a Unicode 'So' character (as future-proofed by the ftfy chardata module) but excluding symbols like © and ™ below U+2600 and the replacement character U+FFFD. """ return chardata.CHAR_CLASS_STRING[i] == '3' and i >= 0x2600 and i != 0xfffd def _is_non_punct_codepoint(i): """ Report whether a numerical codepoint is not one of the following classes: - P: punctuation - S: symbols - Z: separators - C: control characters This will classify symbols, including emoji, as punctuation; users that want to accept emoji should add them separately. """ return CATEGORIES[i][0] not in 'PSZC' def _is_combining_mark_codepoint(i): """ Report whether a numerical codepoint is a combining mark (Unicode 'M'). """ return CATEGORIES[i][0] == 'M' if __name__ == '__main__': cache_regex_from_func('emoji.txt', _is_emoji_codepoint) cache_regex_from_func('non_punct.txt', _is_non_punct_codepoint) cache_regex_from_func('combining_mark.txt', _is_combining_mark_codepoint)
import unicodedata from ftfy import chardata import pathlib from pkg_resources import resource_filename CATEGORIES = [unicodedata.category(chr(i)) for i in range(0x110000)] DATA_PATH = pathlib.Path(resource_filename('wordfreq', 'data')) def func_to_regex(func): """ Given a function that returns True or False for a numerical codepoint, return a regex character class accepting the characters resulting in True. Ranges separated only by unassigned characters are merged for efficiency. """ # Where the last range would end if it also included unassigned codepoints. # If we need to add a codepoint right after this point, we extend the # range; otherwise we start a new one. tentative_end = None ranges = [] for i, cat in enumerate(CATEGORIES): if func(i): if tentative_end == i - 1: ranges[-1][1] = i else: ranges.append([i, i]) tentative_end = i elif cat == 'Cn' and tentative_end == i - 1: tentative_end = i return '[%s]' % ''.join(chr(r[0]) + '-' + chr(r[1]) for r in ranges) def cache_regex_from_func(filename, func): """ Generates a regex from a function that accepts a single unicode character, and caches it in the data path at filename. """ with (DATA_PATH / filename).open(mode='w') as file: file.write(func_to_regex(func)) def _is_emoji_codepoint(i): """ Report whether a numerical codepoint is (likely) an emoji: a Unicode 'So' character (as future-proofed by the ftfy chardata module) but excluding symbols like © and ™ below U+2600 and the replacement character U+FFFD. """ return chardata.CHAR_CLASS_STRING[i] == '3' and i >= 0x2600 and i != 0xfffd def _is_non_punct_codepoint(i): """ Report whether a numerical codepoint is not one of the following classes: - P: punctuation - S: symbols - Z: separators - C: control characters This will classify symbols, including emoji, as punctuation; users that want to accept emoji should add them separately. """ return CATEGORIES[i][0] not in 'PSZC' def _is_combining_mark_codepoint(i): """ Report whether a numerical codepoint is a combining mark (Unicode 'M'). """ return CATEGORIES[i][0] == 'M' if __name__ == '__main__': cache_regex_from_func('emoji.txt', _is_emoji_codepoint) cache_regex_from_func('non_punct.txt', _is_non_punct_codepoint) cache_regex_from_func('combining_mark.txt', _is_combining_mark_codepoint)
Python
0.999999
960618782d81035dd9671c973ad6d95c55ff6534
Use the firefox capabilities if wires exist
tests/functional_tests_gerrit.py
tests/functional_tests_gerrit.py
#!/bin/env python import unittest import os import yaml from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from gerrit import Gerrit MAX_WAIT = 10 class TestGerrit(unittest.TestCase): @classmethod def setUpClass(cls): try: with open('test-config.yaml', 'r') as ymlfile: cls.config = yaml.load(ymlfile) except IOError: # No config file, set defaults cls.config = {'webdriver': 'firefox', 'url': 'http://localhost:8080/', 'username': 'felix', 'password': '<password>', } if cls.config.get('webdriver') == 'firefox': if os.path.isfile('./wires'): firefox_capabilities = DesiredCapabilities.FIREFOX firefox_capabilities['marionette'] = True firefox_capabilities['binary'] = os.environ.get('firefox_path', '/usr/bin/firefox') cls._browser = webdriver.Firefox(capabilities=firefox_capabilities) else: cls._browser = webdriver.Firefox() elif cls.config.get('webdriver') == 'chrome': cls._browser = webdriver.Chrome() else: raise Exception('Webdriver not supported') cls._url = cls.config.get('url') cls._username = cls.config.get('username') cls._password = cls.config.get('password') # Log in once to make user admin cls._browser.get('%slogin' % cls._url) cls._browser.implicitly_wait(MAX_WAIT) elem = cls._browser.find_element_by_id('f_user') elem.send_keys(cls._username) elem = cls._browser.find_element_by_id('f_pass') elem.send_keys(cls._password + Keys.RETURN) element = WebDriverWait(cls._browser, MAX_WAIT).until( expected_conditions.title_contains('My Reviews') ) @classmethod def tearDownClass(cls): cls._browser.close() def test_add_project(self): # Felix wants to add a project, he uses the gerrit module to do this gerrit = Gerrit( url=self._url, auth_type='http', auth_id=self._username, auth_pw=self._password, ) project = gerrit.create_project('my project') # Felix can now access his project in the web interface self._browser.get('%s#/admin/projects/my+project' % self._url) element = WebDriverWait(self._browser, MAX_WAIT).until( expected_conditions.title_contains('Project my project') ) self.assertIn('Project my project', self._browser.title)
#!/bin/env python import unittest import os import yaml from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from gerrit import Gerrit MAX_WAIT = 10 class TestGerrit(unittest.TestCase): @classmethod def setUpClass(cls): try: with open('test-config.yaml', 'r') as ymlfile: cls.config = yaml.load(ymlfile) except IOError: # No config file, set defaults cls.config = {'webdriver': 'firefox', 'url': 'http://localhost:8080/', 'username': 'felix', 'password': '<password>', } if cls.config.get('webdriver') == 'firefox': if os.path.isfile('./wires'): firefox_capabilities = DesiredCapabilities.FIREFOX firefox_capabilities['marionette'] = True firefox_capabilities['binary'] = os.environ.get('firefox_path', '/usr/bin/firefox') cls._browser = webdriver.Firefox() elif cls.config.get('webdriver') == 'chrome': cls._browser = webdriver.Chrome() else: raise Exception('Webdriver not supported') cls._url = cls.config.get('url') cls._username = cls.config.get('username') cls._password = cls.config.get('password') # Log in once to make user admin cls._browser.get('%slogin' % cls._url) cls._browser.implicitly_wait(MAX_WAIT) elem = cls._browser.find_element_by_id('f_user') elem.send_keys(cls._username) elem = cls._browser.find_element_by_id('f_pass') elem.send_keys(cls._password + Keys.RETURN) element = WebDriverWait(cls._browser, MAX_WAIT).until( expected_conditions.title_contains('My Reviews') ) @classmethod def tearDownClass(cls): cls._browser.quit() def test_add_project(self): # Felix wants to add a project, he uses the gerrit module to do this gerrit = Gerrit( url=self._url, auth_type='http', auth_id=self._username, auth_pw=self._password, ) project = gerrit.create_project('my project') # Felix can now access his project in the web interface self._browser.get('%s#/admin/projects/my+project' % self._url) element = WebDriverWait(self._browser, MAX_WAIT).until( expected_conditions.title_contains('Project my project') ) self.assertIn('Project my project', self._browser.title)
Python
0.000001
5ff27451b55cdd03fa7913aee9e0762297341e29
make image printer thingy iterable, and optimize output data
fabulous/image.py
fabulous/image.py
"""Print Images to a 256-Color Terminal """ import sys import fcntl import struct import termios import itertools from PIL import Image as Pills from grapefruit import Color from fabulous.xterm256 import rgb_to_xterm class Image(object): def __init__(self, path, width=None, bgcolor='black'): self.bgcolor = Color.NewFromHtml(bgcolor) self.img = Pills.open(path) self.resize(width) def __str__(self): return "\n".join(self) def __iter__(self): return self.reduce(self.convert()) def resize(self, width=None): (iw, ih) = self.img.size if width is None: width = _term_width() if iw * 2 <= width: return width //= 2 height = int(float(ih) * (float(width) / float(iw))) self.img = self.img.resize((width, height)) def reduce(self, colors): need_reset = False line = [] for color, items in itertools.groupby(colors): if color is None: if need_reset: line.append("\x1b[49m") need_reset = False line.append(' ' * len(list(items))) elif color == "EOL": if need_reset: line.append("\x1b[49m") need_reset = False yield "".join(line) else: line.pop() yield "".join(line) line = [] else: need_reset = True line.append("\x1b[48;5;%dm%s" % (color, ' ' * len(list(items)))) def convert(self): (width, height) = self.img.size pix = self.img.load() for y in xrange(height): for x in xrange(width): rgba = pix[x, y] if len(rgba) == 4 and rgba[3] == 0: yield None elif len(rgba) == 3 or rgba[3] == 255: yield rgb_to_xterm(rgba[:3]) else: color = Color.NewFromRgb(*[c / 255.0 for c in rgba]) yield rgb_to_xterm(color.AlphaBlend(self.bgcolor)) yield "EOL" def _term_width(): call = fcntl.ioctl(0, termios.TIOCGWINSZ, "\000" * 8) height, width = struct.unpack("hhhh", call)[:2] return width
import sys from PIL import Image # from fabulous.ansi import fg from fabulous.test_xterm256 import fg def image(path, resize=None, resize_antialias=None): im = Image.open(path) if resize: im = im.resize(resize) elif resize_antialias: im = im.resize(resize, Image.ANTIALIAS) pix = im.load() (width, height) = im.size for y in xrange(height): for x in xrange(width): color = pix[x, y] if len(color) == 4 and color[3] <= 0.001: s = sys.stdout.write(' ') else: sys.stdout.write(unicode(fg(color, u"\u2588")).encode('utf8')) sys.stdout.write("\n")
Python
0.000001
439b977b14b12d42ee886a432f3a4af555d8de10
add storage stuctures
minMaxCalc.py
minMaxCalc.py
import pandas as pd # read in dataset xl = pd.ExcelFile("data/130N_Cycles_1-47.xlsx") df = xl.parse("Specimen_RawData_1") df """ This is what the dataset currently looks like - it has 170,101 rows and two columns. The dataset contains data from 47 cycles following an experiment. The output of these experiments form the two columns:<br> - time (seconds) - load (exerted force, in Newtons) My task is to find the local maxima and minima in the dataset, and mark these values in a database. Initially, the database will consist of four columns: time, load, max, and min. It can be modified or condensed later on to fit further requirements. This is the criteria I will use to find the maxima: - write each row in the db to a cache - initialize a flag value to false - if the force in the previous row is smaller than the force in the next row, write the new row to the cache (leave the flag as false) - if the force in the previous row is bigger than the force in the next row, write the new row to cache and mark it as a max cycle (change the flag to true) This is the criteria I will use to find the minima: - write each row in the db to a cache - initialize a flag value to false - if the force in the previous row is bigger than the force in the next row, write the new row to the cache (leave the flag as false) - if the force in the previous row is smaller than the force in the next row, write the new row to the cache and mark it as a min cycle (change the flag to true) """ # append data from time column to list time = [] for item in df.index: time.append(df["Time"][item]) # append data from load column to list load = [] for item in df.index: load.append(df["Load"][item]) # create list of tuples for time and load data = [] for i, j in zip(time, load): data.append((i,j)) # apply algorithm for finding maxima in data max_data = [] for idx, item in enumerate(data): prev = data[idx-1][1] curr = item[1] if prev > curr: max_data.append(item + ("max",)) else: max_data.append(item + ("",)) # apply algorithm for finding minima in data min_data = [] for idx, item in enumerate(max_data): prev = max_data[idx-1][1] curr = item[1] if prev < curr: min_data.append(item + ("min",)) else: min_data.append(item + ("",)) all_data = min_data # count maxima number max_count = 0 for item in all_data: if item[2] == "max": max_count += 1 print(max_count) # count minima number min_count = 0 for item in all_data: if item[3] == "min": min_count += 1 print(min_count) # create db model db = [] # create cache store cache = []
import pandas as pd # read in dataset xl = pd.ExcelFile("data/130N_Cycles_1-47.xlsx") df = xl.parse("Specimen_RawData_1") df """ This is what the dataset currently looks like - it has 170,101 rows and two columns. The dataset contains data from 47 cycles following an experiment. The output of these experiments form the two columns:<br> - time (seconds) - load (exerted force, in Newtons) My task is to find the local maxima and minima in the dataset, and mark these values in a database. Initially, the database will consist of four columns: time, load, max, and min. It can be modified or condensed later on to fit further requirements. This is the criteria I will use to find the maxima: - write each row in the db to a cache - initialize a flag value to false - if the force in the previous row is smaller than the force in the next row, write the new row to the cache (leave the flag as false) - if the force in the previous row is bigger than the force in the next row, write the new row to cache and mark it as a max cycle (change the flag to true) This is the criteria I will use to find the minima: - write each row in the db to a cache - initialize a flag value to false - if the force in the previous row is bigger than the force in the next row, write the new row to the cache (leave the flag as false) - if the force in the previous row is smaller than the force in the next row, write the new row to the cache and mark it as a min cycle (change the flag to true) """ # append data from time column to list time = [] for item in df.index: time.append(df["Time"][item]) # append data from load column to list load = [] for item in df.index: load.append(df["Load"][item]) # create list of tuples for time and load data = [] for i, j in zip(time, load): data.append((i,j)) # apply algorithm for finding maxima in data max_data = [] for idx, item in enumerate(data): prev = data[idx-1][1] curr = item[1] if prev > curr: max_data.append(item + ("max",)) else: max_data.append(item + ("",)) # apply algorithm for finding minima in data min_data = [] for idx, item in enumerate(max_data): prev = max_data[idx-1][1] curr = item[1] if prev < curr: min_data.append(item + ("min",)) else: min_data.append(item + ("",)) all_data = min_data # count maxima number max_count = 0 for item in all_data: if item[2] == "max": max_count += 1 print(max_count) # count minima number min_count = 0 for item in all_data: if item[3] == "min": min_count += 1 print(min_count) df = pd.DataFrame(data, columns=['Time', 'Load', 'Max', 'Min']) df
Python
0
642cd34041a579fa37ea3790143d79842c7141f3
add implementation for all makers
ismrmrdpy/backend/acquisition.py
ismrmrdpy/backend/acquisition.py
# -*- coding: utf-8 -*- # # Copyright (c) 2014-2015, Ghislain Antony Vaillant # All rights reserved. # # This file is distributed under the BSD License, see the LICENSE file or # checkout the license terms at http://opensource.org/licenses/BSD-2-Clause). from __future__ import absolute_import, division, print_function from .constants import Constants, AcquisitionFlags, DataTypes from .constants import acquisition_header_dtype, ismrmrd_to_numpy_dtypes import numpy def make_header(version=Constants.version, *args, **kwargs): header = numpy.zeros((), dtype=acquisition_header_dtype) header['version'] = version for key in kwargs: if key in acquisition_header_dtype.fields: header[key] = kwargs[key] return header def make_dtype(header): data_dtype = ismrmrd_to_numpy_dtypes[DataTypes.cxfloat] data_shape = (header['active_channels'], header['number_of_samples']) traj_dtype = ismrmrd_to_numpy_dtypes[DataTypes.float] traj_shape = (header['number_of_samples'], header['trajectory_dimensions']) return numpy.dtype([ ('head', acquisition_header_dtype), ('traj', (traj_dtype, traj_shape)), ('data', (data_dtype, data_shape)), ]) def make_array(header=None, *args, **kwargs): header = header or make_header(**kwargs) trajectory = None data = None dtype = make_dtype(header) array = numpy.zeros((), dtype=dtype) array['head'] = header if trajectory is not None: array['traj'] = trajectory if data is not None: array['data'] = data def frombytes(bytestring): pass def set_flags(header, flags=None): pass def clear_flags(header, flags=None): pass def is_flag_set(header, flag): pass def _verify_flags(flags): pass def set_channels(header, channels=None): pass def clear_channels(header, channels=None): pass def is_channel_set(header, channel): pass def _verify_channels(flags): pass
# -*- coding: utf-8 -*- # # Copyright (c) 2014-2015, Ghislain Antony Vaillant # All rights reserved. # # This file is distributed under the BSD License, see the LICENSE file or # checkout the license terms at http://opensource.org/licenses/BSD-2-Clause). from __future__ import absolute_import, division, print_function from .constants import Constants, AcquisitionFlags, acquisition_header_dtype def make_header(*args, **kwargs): pass def make_dtype(header): pass def make_array(header=None, *args, **kwargs): pass def frombytes(bytestring): pass def set_flags(header, flags=None): pass def clear_flags(header, flags=None): pass def is_flag_set(header, flag): pass def _verify_flags(flags): pass def set_channels(header, channels=None): pass def clear_channels(header, channels=None): pass def is_channel_set(header, channel): pass def _verify_channels(flags): pass
Python
0
72067069138ce9568c06140d23bd07cc6741a30e
Test case can't throw away windows, it needs shared context space to continue. XXX fix this in pyglet, ideally.
tests/resource/RES_LOAD_IMAGE.py
tests/resource/RES_LOAD_IMAGE.py
#!/usr/bin/python # $Id:$ import os import sys import unittest from pyglet.gl import * from pyglet import image from pyglet import resource from pyglet import window __noninteractive = True # Test image is laid out # M R # B G # In this test the image is sampled at four points from top-right clockwise: # R G B M (red, green, blue, magenta) class TestCase(unittest.TestCase): def setUp(self): self.w = window.Window(width=10, height=10) self.w.dispatch_events() resource.path.append('@' + __name__) resource.reindex() def tearDown(self): #self.w.close() pass def check(self, img, colors): glClear(GL_COLOR_BUFFER_BIT) glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST) glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST) img.blit(0, 0) buffer = image.get_buffer_manager().get_color_buffer().image_data buffer.format = 'RGBA' buffer.pitch = len(buffer.format) * buffer.width bytes = buffer.data def sample(x, y): i = y * buffer.pitch + x * len(buffer.format) r, g, b, _ = bytes[i:i+len(buffer.format)] r, g, b = map(ord, (r, g, b)) return { (255, 0, 0): 'r', (0, 255, 0): 'g', (0, 0, 255): 'b', (255, 0, 255): 'm'}.get((r, g, b), 'x') samples = ''.join([ sample(3, 3), sample(3, 0), sample(0, 0), sample(0, 3)]) self.assertTrue(samples == colors, samples) def test0(self): self.check(resource.image('rgbm.png'), 'rgbm') def test1(self): self.check(resource.image('rgbm.png', pad=1), 'rgbm') def test1a(self): self.check(resource.image('rgbm.png', pad=2), 'rgbm') def test1b(self): self.check(resource.image('rgbm.png', pad=4), 'rgbm') def test2(self): self.check(resource.image('rgbm.png', flip_x=True), 'mbgr') def test3(self): self.check(resource.image('rgbm.png', flip_y=True), 'grmb') def test4(self): self.check(resource.image('rgbm.png', flip_x=True, flip_y=True), 'bmrg') def test5(self): self.check(resource.image('rgbm.png', rotate=90), 'mrgb') def test5a(self): self.check(resource.image('rgbm.png', rotate=-270), 'mrgb') def test6(self): self.check(resource.image('rgbm.png', rotate=180), 'bmrg') def test6a(self): self.check(resource.image('rgbm.png', rotate=-180), 'bmrg') def test7(self): self.check(resource.image('rgbm.png', rotate=270), 'gbmr') def test7a(self): self.check(resource.image('rgbm.png', rotate=-90), 'gbmr') if __name__ == '__main__': unittest.main()
#!/usr/bin/python # $Id:$ import os import sys import unittest from pyglet.gl import * from pyglet import image from pyglet import resource from pyglet import window __noninteractive = True # Test image is laid out # M R # B G # In this test the image is sampled at four points from top-right clockwise: # R G B M (red, green, blue, magenta) class TestCase(unittest.TestCase): def setUp(self): self.w = window.Window(width=10, height=10) self.w.dispatch_events() resource.path.append('@' + __name__) resource.reindex() def tearDown(self): self.w.close() def check(self, img, colors): glClear(GL_COLOR_BUFFER_BIT) glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST) glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST) img.blit(0, 0) buffer = image.get_buffer_manager().get_color_buffer().image_data buffer.format = 'RGBA' buffer.pitch = len(buffer.format) * buffer.width bytes = buffer.data def sample(x, y): i = y * buffer.pitch + x * len(buffer.format) r, g, b, _ = bytes[i:i+len(buffer.format)] r, g, b = map(ord, (r, g, b)) return { (255, 0, 0): 'r', (0, 255, 0): 'g', (0, 0, 255): 'b', (255, 0, 255): 'm'}.get((r, g, b), 'x') samples = ''.join([ sample(3, 3), sample(3, 0), sample(0, 0), sample(0, 3)]) self.assertTrue(samples == colors, samples) def test0(self): self.check(resource.image('rgbm.png'), 'rgbm') def test1(self): self.check(resource.image('rgbm.png', pad=1), 'rgbm') def test1a(self): self.check(resource.image('rgbm.png', pad=2), 'rgbm') def test1b(self): self.check(resource.image('rgbm.png', pad=4), 'rgbm') def test2(self): self.check(resource.image('rgbm.png', flip_x=True), 'mbgr') def test3(self): self.check(resource.image('rgbm.png', flip_y=True), 'grmb') def test4(self): self.check(resource.image('rgbm.png', flip_x=True, flip_y=True), 'bmrg') def test5(self): self.check(resource.image('rgbm.png', rotate=90), 'mrgb') def test5a(self): self.check(resource.image('rgbm.png', rotate=-270), 'mrgb') def test6(self): self.check(resource.image('rgbm.png', rotate=180), 'bmrg') def test6a(self): self.check(resource.image('rgbm.png', rotate=-180), 'bmrg') def test7(self): self.check(resource.image('rgbm.png', rotate=270), 'gbmr') def test7a(self): self.check(resource.image('rgbm.png', rotate=-90), 'gbmr') if __name__ == '__main__': unittest.main()
Python
0.000001
a80e063a4afb65018a8b137f1909956839f42767
Test default search context
tests/sentry/interfaces/tests.py
tests/sentry/interfaces/tests.py
# -*- coding: utf-8 -*- from __future__ import absolute_import import pickle from sentry.interfaces import Interface, Message, Query, Stacktrace from sentry.models import Event from sentry.testutils import TestCase, fixture class InterfaceBase(TestCase): @fixture def event(self): return Event( id=1, ) class InterfaceTest(InterfaceBase): @fixture def interface(self): return Interface(foo=1) def test_init_sets_attrs(self): assert self.interface.attrs == ['foo'] def test_setstate_sets_attrs(self): data = pickle.dumps(self.interface) obj = pickle.loads(data) assert obj.attrs == ['foo'] def test_to_html_default(self): assert self.interface.to_html(self.event) == '' def test_to_string_default(self): assert self.interface.to_string(self.event) == '' def test_get_search_context_default(self): assert self.interface.get_search_context(self.event) == {} class MessageTest(InterfaceBase): @fixture def interface(self): return Message(message='Hello there %s!', params=('world',)) def test_serialize_behavior(self): assert self.interface.serialize() == { 'message': self.interface.message, 'params': self.interface.params, } def test_get_hash_uses_message(self): assert self.interface.get_hash() == [self.interface.message] def test_get_search_context_with_params_as_list(self): interface = self.interface interface.params = ['world'] assert interface.get_search_context(self.event) == { 'text': [interface.message] + list(interface.params) } def test_get_search_context_with_params_as_tuple(self): assert self.interface.get_search_context(self.event) == { 'text': [self.interface.message] + list(self.interface.params) } def test_get_search_context_with_params_as_dict(self): interface = self.interface interface.params = {'who': 'world'} interface.message = 'Hello there %(who)s!' assert self.interface.get_search_context(self.event) == { 'text': [interface.message] + interface.params.values() } class QueryTest(InterfaceBase): @fixture def interface(self): return Query(query='SELECT 1', engine='psycopg2') def test_serialize_behavior(self): assert self.interface.serialize() == { 'query': self.interface.query, 'engine': self.interface.engine, } def test_get_hash_uses_query(self): assert self.interface.get_hash() == [self.interface.query] def test_get_search_context(self): assert self.interface.get_search_context(self.event) == { 'text': [self.interface.query], }
# -*- coding: utf-8 -*- from __future__ import absolute_import import pickle from sentry.interfaces import Interface, Message, Query, Stacktrace from sentry.models import Event from sentry.testutils import TestCase, fixture class InterfaceBase(TestCase): @fixture def event(self): return Event( id=1, ) class InterfaceTest(InterfaceBase): @fixture def interface(self): return Interface(foo=1) def test_init_sets_attrs(self): assert self.interface.attrs == ['foo'] def test_setstate_sets_attrs(self): data = pickle.dumps(self.interface) obj = pickle.loads(data) assert obj.attrs == ['foo'] def test_to_html_default(self): assert self.interface.to_html(self.event) == '' def test_to_string_default(self): assert self.interface.to_string(self.event) == '' class MessageTest(InterfaceBase): @fixture def interface(self): return Message(message='Hello there %s!', params=('world',)) def test_serialize_behavior(self): assert self.interface.serialize() == { 'message': self.interface.message, 'params': self.interface.params, } def test_get_hash_uses_message(self): assert self.interface.get_hash() == [self.interface.message] def test_get_search_context_with_params_as_list(self): interface = self.interface interface.params = ['world'] assert interface.get_search_context(self.event) == { 'text': [interface.message] + list(interface.params) } def test_get_search_context_with_params_as_tuple(self): assert self.interface.get_search_context(self.event) == { 'text': [self.interface.message] + list(self.interface.params) } def test_get_search_context_with_params_as_dict(self): interface = self.interface interface.params = {'who': 'world'} interface.message = 'Hello there %(who)s!' assert self.interface.get_search_context(self.event) == { 'text': [interface.message] + interface.params.values() } class QueryTest(InterfaceBase): @fixture def interface(self): return Query(query='SELECT 1', engine='psycopg2') def test_serialize_behavior(self): assert self.interface.serialize() == { 'query': self.interface.query, 'engine': self.interface.engine, } def test_get_hash_uses_query(self): assert self.interface.get_hash() == [self.interface.query] def test_get_search_context(self): assert self.interface.get_search_context(self.event) == { 'text': [self.interface.query], }
Python
0.000001
59ba038f117744ca0c5fe8c24b97b64830f8e7ec
Put bulk data into db
court_bulk_collector.py
court_bulk_collector.py
from courtreader import readers from courtutils.logger import get_logger from datetime import datetime, timedelta import pymongo import os import sys import time # configure logging log = get_logger() log.info('Worker running') def get_db_connection(): return pymongo.MongoClient(os.environ['MONGO_DB'])['va_court_search'] # Fill in cases court_reader = None current_court_fips = None db = get_db_connection() court_fips = '013' case_type = 'criminal' year = 2015 reader = readers.CircuitCourtReader() reader.connect() def get_cases_on_date(dateStr): log.info('Getting cases on ' + dateStr) cases = reader.get_cases_by_date(court_fips, case_type, dateStr) for case in cases: case['details'] = reader.get_case_details_by_number( \ court_fips, \ case_type, \ case['case_number']) case['details_fetched'] = datetime.utcnow() case['court_fips'] = court_fips print case['case_number'], case['defendant'], case['details']['Filed'] db.circuit_court_detailed_cases.find_one_and_replace({ 'court_fips': case['court_fips'], 'case_number': case['case_number'] }, case, upsert=True) date = datetime(year, 12, 31) while date.year == year: date_search = { 'court_fips': court_fips, 'case_type': case_type, 'date': date } dateStr = date.strftime('%m/%d/%Y') if db.circuit_court_dates_collected.find_one(date_search) != None: log.info(dateStr + ' already searched') else: get_cases_on_date(dateStr) db.circuit_court_dates_collected.insert_one(date_search) date += timedelta(days=-1) reader.log_off()
from courtreader import readers from courtutils.logger import get_logger from datetime import datetime, timedelta import pymongo import os import sys import time # configure logging log = get_logger() log.info('Worker running') def get_db_connection(): return pymongo.MongoClient(os.environ['MONGO_DB'])['va_court_search'] # Fill in cases court_reader = None current_court_fips = None db = get_db_connection() court_fips = '013' case_type = 'R' year = 2015 reader = readers.CircuitCourtReader() reader.connect() date = datetime(year, 12, 31) while date.year == year: dateStr = date.strftime('%m/%d/%Y') log.info('Getting cases on ' + dateStr) cases = reader.get_cases_by_date(court_fips, case_type, dateStr) for case in cases: case['details'] = reader.get_case_details_by_number( \ court_fips, \ case_type, \ case['case_number']) case['details_fetched'] = datetime.utcnow() print case['case_number'], case['defendant'], case['details']['Filed'] break date += timedelta(days=-1) reader.log_off()
Python
0.000004
8c0dc68c41137cd809d4403045834ab4f876294c
Add small test for parsing the Var datashape
tests/test_datashape_creation.py
tests/test_datashape_creation.py
import blaze from blaze import datashape import numpy as np import unittest class TestDatashapeCreation(unittest.TestCase): def test_raise_on_bad_input(self): # Make sure it raises exceptions on a few nonsense inputs self.assertRaises(TypeError, blaze.dshape, None) self.assertRaises(TypeError, blaze.dshape, lambda x: x+1) def test_atom_shapes(self): self.assertEqual(blaze.dshape('bool'), datashape.bool_) self.assertEqual(blaze.dshape('int8'), datashape.int8) self.assertEqual(blaze.dshape('int16'), datashape.int16) self.assertEqual(blaze.dshape('int32'), datashape.int32) self.assertEqual(blaze.dshape('int64'), datashape.int64) self.assertEqual(blaze.dshape('uint8'), datashape.uint8) self.assertEqual(blaze.dshape('uint16'), datashape.uint16) self.assertEqual(blaze.dshape('uint32'), datashape.uint32) self.assertEqual(blaze.dshape('uint64'), datashape.uint64) self.assertEqual(blaze.dshape('float32'), datashape.float32) self.assertEqual(blaze.dshape('float64'), datashape.float64) self.assertEqual(blaze.dshape('complex64'), datashape.complex64) self.assertEqual(blaze.dshape('complex128'), datashape.complex128) def test_atom_shape_errors(self): self.assertRaises(TypeError, blaze.dshape, 'boot') self.assertRaises(TypeError, blaze.dshape, 'int33') self.assertRaises(TypeError, blaze.dshape, '12') def test_type_decl(self): self.assertRaises(TypeError, blaze.dshape, 'type X T = 3, T') self.assertEqual(blaze.dshape('3, int32'), blaze.dshape('type X = 3, int32')) def test_string_atom(self): self.assertEqual(blaze.dshape('string'), blaze.dshape("string('U8')")) self.assertEqual(blaze.dshape("string('ascii')").encoding, 'A') self.assertEqual(blaze.dshape("string('A')").encoding, 'A') self.assertEqual(blaze.dshape("string('utf-8')").encoding, 'U8') self.assertEqual(blaze.dshape("string('U8')").encoding, 'U8') self.assertEqual(blaze.dshape("string('utf-16')").encoding, 'U16') self.assertEqual(blaze.dshape("string('U16')").encoding, 'U16') self.assertEqual(blaze.dshape("string('utf-32')").encoding, 'U32') self.assertEqual(blaze.dshape("string('U32')").encoding, 'U32') def test_struct_of_array(self): self.assertEqual(str(blaze.dshape('5, int32')), '5, int32') self.assertEqual(str(blaze.dshape('{field: 5, int32}')), '{ field : 5, int32 }') self.assertEqual(str(blaze.dshape('{field: M, int32}')), '{ field : M, int32 }') def test_ragged_array(self): self.assertTrue(isinstance(blaze.dshape('3, Var, int32')[1], datashape.Var)) if __name__ == '__main__': unittest.main()
import blaze from blaze import datashape import numpy as np import unittest class TestDatashapeCreation(unittest.TestCase): def test_raise_on_bad_input(self): # Make sure it raises exceptions on a few nonsense inputs self.assertRaises(TypeError, blaze.dshape, None) self.assertRaises(TypeError, blaze.dshape, lambda x: x+1) def test_atom_shapes(self): self.assertEqual(blaze.dshape('bool'), datashape.bool_) self.assertEqual(blaze.dshape('int8'), datashape.int8) self.assertEqual(blaze.dshape('int16'), datashape.int16) self.assertEqual(blaze.dshape('int32'), datashape.int32) self.assertEqual(blaze.dshape('int64'), datashape.int64) self.assertEqual(blaze.dshape('uint8'), datashape.uint8) self.assertEqual(blaze.dshape('uint16'), datashape.uint16) self.assertEqual(blaze.dshape('uint32'), datashape.uint32) self.assertEqual(blaze.dshape('uint64'), datashape.uint64) self.assertEqual(blaze.dshape('float32'), datashape.float32) self.assertEqual(blaze.dshape('float64'), datashape.float64) self.assertEqual(blaze.dshape('complex64'), datashape.complex64) self.assertEqual(blaze.dshape('complex128'), datashape.complex128) def test_atom_shape_errors(self): self.assertRaises(TypeError, blaze.dshape, 'boot') self.assertRaises(TypeError, blaze.dshape, 'int33') self.assertRaises(TypeError, blaze.dshape, '12') def test_type_decl(self): self.assertRaises(TypeError, blaze.dshape, 'type X T = 3, T') self.assertEqual(blaze.dshape('3, int32'), blaze.dshape('type X = 3, int32')) def test_string_atom(self): self.assertEqual(blaze.dshape('string'), blaze.dshape("string('U8')")) self.assertEqual(blaze.dshape("string('ascii')").encoding, 'A') self.assertEqual(blaze.dshape("string('A')").encoding, 'A') self.assertEqual(blaze.dshape("string('utf-8')").encoding, 'U8') self.assertEqual(blaze.dshape("string('U8')").encoding, 'U8') self.assertEqual(blaze.dshape("string('utf-16')").encoding, 'U16') self.assertEqual(blaze.dshape("string('U16')").encoding, 'U16') self.assertEqual(blaze.dshape("string('utf-32')").encoding, 'U32') self.assertEqual(blaze.dshape("string('U32')").encoding, 'U32') def test_struct_of_array(self): self.assertEqual(str(blaze.dshape('5, int32')), '5, int32') self.assertEqual(str(blaze.dshape('{field: 5, int32}')), '{ field : 5, int32 }') self.assertEqual(str(blaze.dshape('{field: M, int32}')), '{ field : M, int32 }') if __name__ == '__main__': unittest.main()
Python
0.000001
55bff70c3dabe5251ed23720c9f2491cc8bd1ed1
Add support for django 1.8+
favit/managers.py
favit/managers.py
# -*- coding: utf-8 -*- from django.contrib.contenttypes.models import ContentType from django.db import models from django.db.models import get_model def _get_content_type_and_obj(obj, model=None): if isinstance(model, basestring): model = get_model(*model.split(".")) if isinstance(obj, (int, long)): obj = model.objects.get(pk=obj) return ContentType.objects.get_for_model(type(obj)), obj class FavoriteManager(models.Manager): """ A Manager for Favorite objects """ from django import VERSION if VERSION > (1,8): def get_query_set(self): return self.get_queryset() def for_user(self, user, model=None): """ Returns a Favorite objects queryset for a given user. If a model params is provided, it returns only the favorited objects of that model class Usage: Favorite.objects.for_user(user) Favorite.objects.for_user(user, model=Song) Favorite.objects.for_user(user, model="music.song") """ qs = self.get_query_set().filter(user=user) if model: if isinstance(model, basestring): model = get_model(*model.split(".")) content_type = ContentType.objects.get_for_model(model) qs = qs.filter(target_content_type=content_type) return qs.order_by("-timestamp") def for_model(self, model): """ Returns a Favorite objects queryset for a given model. `model` may be a django model class or an string representing a model in module-notation, ie: "auth.User" Usage: Favorite.objects.for_model(Song) Favorite.objects.for_model("music.Song") """ # if model is an app_label.model string make it a Model class if isinstance(model, basestring): model = get_model(*model.split(".")) content_type = ContentType.objects.get_for_model(model) qs = self.get_query_set().filter( target_content_type=content_type ) return qs.order_by("-timestamp") def for_object(self, obj, model=None): """ Returns a Favorite objects queryset for a given object Usage: Favorite.objects.for_object(1, "music.Song") Favorite.objects.for_object(1, Song) or given a music app with a Song model: song = Song.objects.get(pk=1) Favorite.objects.for_object(song) """ content_type, obj = _get_content_type_and_obj(obj, model) qs = self.get_query_set().filter( target_content_type=content_type, target_object_id=obj.pk ) return qs.order_by("-timestamp") def get_favorite(self, user, obj, model=None): """ Returns a Favorite instance if the `user` has favorited the given object `obj`. Otherwise returns None Usage: Favorite.objects.get_favorite(user, 1, "music.Song") Favorite.objects.get_favorite(user, 1, Song) or given a music app with a Song model: song = Song.objects.get(pk=1) Favorite.objects.get_favorite(user, song) """ content_type, obj = _get_content_type_and_obj(obj, model) try: return self.get_query_set().get( user=user, target_content_type=content_type, target_object_id=obj.id ) except self.model.DoesNotExist: return None def create(self, user, obj, model=None): """ Creates and returns a new Favorite obj for the given user and obj """ content_type, content_object = _get_content_type_and_obj(obj, model) fav = super(FavoriteManager, self).create( user=user, target_content_type=content_type, target_object_id=content_object.pk, target=content_object ) return fav
# -*- coding: utf-8 -*- from django.contrib.contenttypes.models import ContentType from django.db import models from django.db.models import get_model def _get_content_type_and_obj(obj, model=None): if isinstance(model, basestring): model = get_model(*model.split(".")) if isinstance(obj, (int, long)): obj = model.objects.get(pk=obj) return ContentType.objects.get_for_model(type(obj)), obj class FavoriteManager(models.Manager): """ A Manager for Favorite objects """ def for_user(self, user, model=None): """ Returns a Favorite objects queryset for a given user. If a model params is provided, it returns only the favorited objects of that model class Usage: Favorite.objects.for_user(user) Favorite.objects.for_user(user, model=Song) Favorite.objects.for_user(user, model="music.song") """ qs = self.get_query_set().filter(user=user) if model: if isinstance(model, basestring): model = get_model(*model.split(".")) content_type = ContentType.objects.get_for_model(model) qs = qs.filter(target_content_type=content_type) return qs.order_by("-timestamp") def for_model(self, model): """ Returns a Favorite objects queryset for a given model. `model` may be a django model class or an string representing a model in module-notation, ie: "auth.User" Usage: Favorite.objects.for_model(Song) Favorite.objects.for_model("music.Song") """ # if model is an app_label.model string make it a Model class if isinstance(model, basestring): model = get_model(*model.split(".")) content_type = ContentType.objects.get_for_model(model) qs = self.get_query_set().filter( target_content_type=content_type ) return qs.order_by("-timestamp") def for_object(self, obj, model=None): """ Returns a Favorite objects queryset for a given object Usage: Favorite.objects.for_object(1, "music.Song") Favorite.objects.for_object(1, Song) or given a music app with a Song model: song = Song.objects.get(pk=1) Favorite.objects.for_object(song) """ content_type, obj = _get_content_type_and_obj(obj, model) qs = self.get_query_set().filter( target_content_type=content_type, target_object_id=obj.pk ) return qs.order_by("-timestamp") def get_favorite(self, user, obj, model=None): """ Returns a Favorite instance if the `user` has favorited the given object `obj`. Otherwise returns None Usage: Favorite.objects.get_favorite(user, 1, "music.Song") Favorite.objects.get_favorite(user, 1, Song) or given a music app with a Song model: song = Song.objects.get(pk=1) Favorite.objects.get_favorite(user, song) """ content_type, obj = _get_content_type_and_obj(obj, model) try: return self.get_query_set().get( user=user, target_content_type=content_type, target_object_id=obj.id ) except self.model.DoesNotExist: return None def create(self, user, obj, model=None): """ Creates and returns a new Favorite obj for the given user and obj """ content_type, content_object = _get_content_type_and_obj(obj, model) fav = super(FavoriteManager, self).create( user=user, target_content_type=content_type, target_object_id=content_object.pk, target=content_object ) return fav
Python
0.000007
e3a9db58f03eb73635a94ed6249e3c2a308f4ad0
Fix some typos found in staging.
fedmsg_genacls.py
fedmsg_genacls.py
# -*- coding: utf-8 -*- """ A fedmsg consumer that listens to pkgdb messages to update gitosis acls Authors: Janez Nemanič <janez.nemanic@gmail.com> Ralph Bean <rbean@redhat.com> """ import pprint import subprocess import os import fedmsg.consumers import moksha.hub.reactor class GenACLsConsumer(fedmsg.consumers.FedmsgConsumer): # Really, we want to use this specific topic to listen to. topic = 'org.fedoraproject.prod.pkgdb.acl.update' # But for testing, we'll just listen to all topics with this: #topic = '*' config_key = 'genacls.consumer.enabled' def __init__(self, hub): super(GenACLsConsumer, self).__init__(hub) # This is required. It is the number of seconds that we should wait # until we ultimately act on a pkgdb message. self.delay = self.hub.config['genacls.consumer.delay'] # We use this to manage our state self.queued_messages = [] def consume(self, msg): msg = msg['body'] self.log.info("Got a message %r" % msg['topic']) def delayed_consume(): if self.queued_messages: try: self.action(self.queued_messages) finally: # Empty our list at the end of the day. self.queued_messages = [] else: self.log.debug("Woke up, but there were no messages.") self.queued_messages.append(msg) moksha.hub.reactor.reactor.callLater(self.delay, delayed_consume) def action(self, messages): self.log.debug("Acting on %s" % pprint.pformat(messages)) # This script and the UID/GID are found in our puppet repo. # The fedmsg user must be given passwordless sudo as the gen-acls user # for this to work correctly. command = '/usr/local/bin/genacls.sh' genacls_UID = 417 genacls_GID = 417 def change_subprocess_id(): os.setuid(genacls_UID) os.setgid(genacls_GID) return_code = subprocess.Popen( args=command, preexec_fn=change_subprocess_id) if return_code == 0: self.log.info("%r successful" % command) else: self.log.error("%r exited with %r" % (command, return_code))
# -*- coding: utf-8 -*- """ A fedmsg consumer that listens to pkgdb messages to update gitosis acls Authors: Janez Nemanič <janez.nemanic@gmail.com> Ralph Bean <rbean@redhat.com> """ import pprint import subprocess import os import fedmsg.consumers import moksha.hub.reactor class GenACLsConsumer(fedmsg.consumers.FedmsgConsumer): # Really, we want to use this specific topic to listen to. topic = 'org.fedoraproject.prod.pkgdb.acl.update' # But for testing, we'll just listen to all topics with this: #topic = '*' config_key = 'genacls.consumer.enabled' def __init__(self, hub): super(GenACLsConsumer, self).__init__(hub) # This is required. It is the number of seconds that we should wait # until we ultimately act on a pkgdb message. self.delay = self.hub.config['genacls.consumer.delay'] # We use this to manage our state self.queued_messages = [] def consume(self, msg): msg = msg['body'] self.log.info("Got a message %r" % msg['topic']) def delayed_consume(): if self.queued_messages: try: self.action(self.queued_messages) finally: # Empty our list at the end of the day. self.queued_messages = [] else: self.log.debug("Woke up, but there were no messages.") self.queued_messages.append(msg) moksha.hub.reactor.reactor.callLater(self.delay, delayed_consume) def action(self, messages): self.log.debug("Acting on %r" % pprint.pformat(messages)) command = '/usr/local/bin/genacls.sh' genacls_UID = 417 genacls_GID = 417 def change_subprocess_id(): os.setuid(user_UID) os.setgid(user_GID) return_code = subprocess.Popen( args=command, preexec_fn=change_subprocess_id) if return_code == 0: self.log.info("%r successful" % command) else: self.log.error("%r exited with %r" % (command, return_code))
Python
0.000004
d5e418e24990c2b7294f3fd6fd8ef94819ddfe66
Allow regular users to view any feedback issue that is public.
feedback/views.py
feedback/views.py
from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.views.generic import ListView, DetailView, CreateView, UpdateView from django.db.models import Q from .forms import IssueForm, IssueUpdateStatusForm, CommentForm from .models import Issue, Discussion class LoginRequiredMixin(object): @classmethod def as_view(cls, **initkwargs): view = super(LoginRequiredMixin, cls).as_view(**initkwargs) return login_required(view) class ProfileNameMixin(object): def get_context_data(self, **kwargs): context = super(ProfileNameMixin, self).get_context_data(**kwargs) context['profile_name'] = self.request.user.username return context class IssueList(LoginRequiredMixin, ProfileNameMixin, ListView): model = Issue def get_queryset(self): if self.request.user.is_superuser: return Issue.objects.all() else: return Issue.objects.filter(Q(user=self.request.user) | Q(public=True)) class IssueCreate(LoginRequiredMixin, ProfileNameMixin, CreateView): model = Issue form_class = IssueForm def form_valid(self, form): form.instance.user = self.request.user form.instance.status = Issue.STATUS_UNREVIEWED return super(IssueCreate, self).form_valid(form) class IssueDetail(LoginRequiredMixin, ProfileNameMixin, DetailView): model = Issue def get_context_data(self, **kwargs): context = super(IssueDetail, self).get_context_data(**kwargs) comment_form = CommentForm() comment_form.helper.form_action = reverse('feedback:comment_add', kwargs={'issue_pk': self.kwargs['pk']}) context['comment_form'] = comment_form if self.request.user.is_superuser: status_form = IssueUpdateStatusForm(instance=self.object) status_form.helper.form_action = reverse('feedback:issue_status_update', kwargs={'pk': self.kwargs['pk']}) context['status_form'] = status_form return context class IssueUpdateStatus(LoginRequiredMixin, ProfileNameMixin, UpdateView): model = Issue form_class = IssueUpdateStatusForm def get_success_url(self): print 'success hurr durr' return reverse('feedback:issue_detail', kwargs={'pk': self.kwargs['pk']}) class CommentCreate(LoginRequiredMixin, ProfileNameMixin, CreateView): model = Discussion form_class = CommentForm def form_valid(self, form): form.instance.user = self.request.user form.instance.feedback = Issue.objects.get(pk=self.kwargs['issue_pk']) return super(CommentCreate, self).form_valid(form) def get_success_url(self): return reverse('feedback:issue_detail', kwargs={'pk': self.kwargs['issue_pk']})
from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.views.generic import ListView, DetailView, CreateView, UpdateView from .forms import IssueForm, IssueUpdateStatusForm, CommentForm from .models import Issue, Discussion class LoginRequiredMixin(object): @classmethod def as_view(cls, **initkwargs): view = super(LoginRequiredMixin, cls).as_view(**initkwargs) return login_required(view) class ProfileNameMixin(object): def get_context_data(self, **kwargs): context = super(ProfileNameMixin, self).get_context_data(**kwargs) context['profile_name'] = self.request.user.username return context class IssueList(LoginRequiredMixin, ProfileNameMixin, ListView): model = Issue def get_queryset(self): if self.request.user.is_superuser: return Issue.objects.all() else: return Issue.objects.filter(user=self.request.user) class IssueCreate(LoginRequiredMixin, ProfileNameMixin, CreateView): model = Issue form_class = IssueForm def form_valid(self, form): form.instance.user = self.request.user form.instance.status = Issue.STATUS_UNREVIEWED return super(IssueCreate, self).form_valid(form) class IssueDetail(LoginRequiredMixin, ProfileNameMixin, DetailView): model = Issue def get_context_data(self, **kwargs): context = super(IssueDetail, self).get_context_data(**kwargs) comment_form = CommentForm() comment_form.helper.form_action = reverse('feedback:comment_add', kwargs={'issue_pk': self.kwargs['pk']}) context['comment_form'] = comment_form if self.request.user.is_superuser: status_form = IssueUpdateStatusForm(instance=self.object) status_form.helper.form_action = reverse('feedback:issue_status_update', kwargs={'pk': self.kwargs['pk']}) context['status_form'] = status_form return context class IssueUpdateStatus(LoginRequiredMixin, ProfileNameMixin, UpdateView): model = Issue form_class = IssueUpdateStatusForm def get_success_url(self): print 'success hurr durr' return reverse('feedback:issue_detail', kwargs={'pk': self.kwargs['pk']}) class CommentCreate(LoginRequiredMixin, ProfileNameMixin, CreateView): model = Discussion form_class = CommentForm def form_valid(self, form): form.instance.user = self.request.user form.instance.feedback = Issue.objects.get(pk=self.kwargs['issue_pk']) return super(CommentCreate, self).form_valid(form) def get_success_url(self): return reverse('feedback:issue_detail', kwargs={'pk': self.kwargs['issue_pk']})
Python
0
d6342967598ae7fa822592b42e0f85de2beaf916
use constants
tests/twisted/test-self-alias.py
tests/twisted/test-self-alias.py
# # Test alias setting for the self handle # from sofiatest import exec_test import constants as cs import dbus def test(q, bus, conn, sip_proxy): conn.Connect() q.expect('dbus-signal', signal='StatusChanged', args=[0, 1]) self_handle = conn.GetSelfHandle() default_alias = conn.Aliasing.GetAliases([self_handle])[self_handle] conn.Aliasing.SetAliases({self_handle: 'foo@bar.baz'}) event = q.expect('dbus-signal', signal='AliasesChanged', args=[[(self_handle, u'foo@bar.baz')]]) handle = conn.RequestHandles(1, ['sip:user@somewhere.com'])[0] assert cs.CONN_IFACE_ALIASING in \ conn.Properties.Get(cs.CONN_IFACE_CONTACTS, "ContactAttributeInterfaces") attrs = conn.Contacts.GetContactAttributes([self_handle, handle], [cs.CONN_IFACE_ALIASING], False) assert cs.CONN_IFACE_ALIASING + "/alias" in attrs[self_handle] assert attrs[self_handle][cs.CONN_IFACE_ALIASING + "/alias"] == u'foo@bar.baz' conn.RequestChannel(cs.CHANNEL_TYPE_TEXT, 1, handle, True) event = q.expect('dbus-signal', signal='NewChannel') text_iface = dbus.Interface(bus.get_object(conn.bus_name, event.args[0]), cs.CHANNEL_TYPE_TEXT) text_iface.Send(0, 'Check the display name in From') event = q.expect('sip-message') self_uri = conn.InspectHandles(1, [self_handle])[0] from_header = event.sip_message.headers['from'][0] assert from_header.startswith('"foo@bar.baz" <' + self_uri + '>'), from_header # Test setting of the default alias conn.Aliasing.SetAliases({self_handle: default_alias}) text_iface.Send(0, 'The display name should be missing in From') event = q.expect('sip-message') from_header = event.sip_message.headers['from'][0] assert from_header.startswith('<' + self_uri + '>'), from_header # Test if escaping and whitespace normalization works conn.Aliasing.SetAliases({self_handle: 'foo " bar \\\r\n baz\t'}) text_iface.Send(0, 'Check display name escaping in From') event = q.expect('sip-message') from_header = event.sip_message.headers['from'][0] assert from_header.startswith(r'"foo \" bar \\ baz " <' + self_uri + '>'), from_header if __name__ == '__main__': exec_test(test)
# # Test alias setting for the self handle # from sofiatest import exec_test from servicetest import tp_name_prefix import dbus TEXT_TYPE = tp_name_prefix + '.Channel.Type.Text' ALIASING_INTERFACE = tp_name_prefix + '.Connection.Interface.Aliasing' CONTACTS_INTERFACE = tp_name_prefix + '.Connection.Interface.Contacts' def test(q, bus, conn, sip_proxy): conn.Connect() q.expect('dbus-signal', signal='StatusChanged', args=[0, 1]) self_handle = conn.GetSelfHandle() default_alias = conn.Aliasing.GetAliases([self_handle])[self_handle] conn.Aliasing.SetAliases({self_handle: 'foo@bar.baz'}) event = q.expect('dbus-signal', signal='AliasesChanged', args=[[(self_handle, u'foo@bar.baz')]]) handle = conn.RequestHandles(1, ['sip:user@somewhere.com'])[0] assert ALIASING_INTERFACE in \ conn.Properties.Get(CONTACTS_INTERFACE, "ContactAttributeInterfaces") attrs = conn.Contacts.GetContactAttributes([self_handle, handle], [ALIASING_INTERFACE], False) assert ALIASING_INTERFACE + "/alias" in attrs[self_handle] assert attrs[self_handle][ALIASING_INTERFACE + "/alias"] == u'foo@bar.baz' conn.RequestChannel(TEXT_TYPE, 1, handle, True) event = q.expect('dbus-signal', signal='NewChannel') text_iface = dbus.Interface(bus.get_object(conn.bus_name, event.args[0]), TEXT_TYPE) text_iface.Send(0, 'Check the display name in From') event = q.expect('sip-message') self_uri = conn.InspectHandles(1, [self_handle])[0] from_header = event.sip_message.headers['from'][0] assert from_header.startswith('"foo@bar.baz" <' + self_uri + '>'), from_header # Test setting of the default alias conn.Aliasing.SetAliases({self_handle: default_alias}) text_iface.Send(0, 'The display name should be missing in From') event = q.expect('sip-message') from_header = event.sip_message.headers['from'][0] assert from_header.startswith('<' + self_uri + '>'), from_header # Test if escaping and whitespace normalization works conn.Aliasing.SetAliases({self_handle: 'foo " bar \\\r\n baz\t'}) text_iface.Send(0, 'Check display name escaping in From') event = q.expect('sip-message') from_header = event.sip_message.headers['from'][0] assert from_header.startswith(r'"foo \" bar \\ baz " <' + self_uri + '>'), from_header if __name__ == '__main__': exec_test(test)
Python
0.00001
16abb3720d9c41b130ea83a4b678ec99521567eb
Fix Grid unit test
tests/unit/analysis/test_grid.py
tests/unit/analysis/test_grid.py
# """Unit tests for cartoframes.analysis.grid""" import os import pytest import numpy as np from pandas import read_csv from geopandas import GeoDataFrame from shapely.geometry import box, shape from cartoframes.utils import set_geometry from cartoframes.analysis.grid import QuadGrid from geopandas.testing import assert_geodataframe_equal # DATA FRAME SRC BBOX pol_1 = box(1, 1, 2, 2) pol_2 = box(3, 3, 4, 4) GDF_BOX = GeoDataFrame({'id': [1, 2], 'geom': [pol_1, pol_2]}, columns=['id', 'geom'], geometry='geom') pol_geojson = { 'type': 'Polygon', 'coordinates': [ [ [ -5.899658203125, 38.436379603 ], [ -6.690673828125, 37.67512527892127 ], [ -6.15234375, 37.43997405227057 ], [ -5.8447265625, 37.70120736474139 ], [ -6.13037109375, 37.82280243352756 ], [ -5.877685546874999, 38.02213147353745 ], [ -6.009521484375, 38.12591462924157 ], [ -5.5810546875, 38.1777509666256 ], [ -5.899658203125, 38.436379603 ] ] ] } GDF_IRREGULAR = GeoDataFrame({'id': [1], 'geom': [shape(pol_geojson)]}, columns=['id', 'geom'], geometry='geom') BASE_FOLDER = os.path.dirname(os.path.abspath(__file__)) class TestGrid(object): def _load_test_gdf(self, fname): fname = os.path.join(BASE_FOLDER, fname) df = read_csv(fname, dtype={'id': np.int64, 'geom': object, 'quadkey': object}) gdf = GeoDataFrame(df, crs='epsg:4326') set_geometry(gdf, 'geom', inplace=True) return gdf @pytest.mark.skip() def test_quadgrid_polyfill_box(self, mocker): """cartoframes.analysis.grid.QuadGrid.polyfill""" gdf = QuadGrid().polyfill(GDF_BOX, 12) assert isinstance(gdf, GeoDataFrame) # Check both dataframes are equals gdf_test = self._load_test_gdf('grid_quadkey_bbox.csv') assert_geodataframe_equal(gdf, gdf_test, check_less_precise=True) @pytest.mark.skip() def test_quadgrid_polyfill_pol(self, mocker): """cartoframes.analysis.grid.QuadGrid.polyfill""" gdf = QuadGrid().polyfill(GDF_IRREGULAR, 12) assert isinstance(gdf, GeoDataFrame) # Check both dataframes are equals gdf_test = self._load_test_gdf('grid_quadkey_pol.csv') assert_geodataframe_equal(gdf, gdf_test, check_less_precise=True)
# """Unit tests for cartoframes.analysis.grid""" import os import pytest import numpy as np from pandas import read_csv from geopandas import GeoDataFrame from shapely.geometry import box, shape from cartoframes.utils import decode_geometry from cartoframes.analysis.grid import QuadGrid from geopandas.testing import assert_geodataframe_equal # DATA FRAME SRC BBOX pol_1 = box(1, 1, 2, 2) pol_2 = box(3, 3, 4, 4) GDF_BOX = GeoDataFrame({'id': [1, 2], 'geom': [pol_1, pol_2]}, columns=['id', 'geom'], geometry='geom') pol_geojson = { 'type': 'Polygon', 'coordinates': [ [ [ -5.899658203125, 38.436379603 ], [ -6.690673828125, 37.67512527892127 ], [ -6.15234375, 37.43997405227057 ], [ -5.8447265625, 37.70120736474139 ], [ -6.13037109375, 37.82280243352756 ], [ -5.877685546874999, 38.02213147353745 ], [ -6.009521484375, 38.12591462924157 ], [ -5.5810546875, 38.1777509666256 ], [ -5.899658203125, 38.436379603 ] ] ] } GDF_IRREGULAR = GeoDataFrame({'id': [1], 'geom': [shape(pol_geojson)]}, columns=['id', 'geom'], geometry='geom') BASE_FOLDER = os.path.dirname(os.path.abspath(__file__)) class TestGrid(object): def _load_test_gdf(self, fname): fname = os.path.join(BASE_FOLDER, fname) df = read_csv(fname, dtype={'id': np.int64, 'geom': object, 'quadkey': object}) gdf_test = GeoDataFrame(df, geometry=decode_geometry(df['geom']), crs='epsg:4326') return gdf_test @pytest.mark.skip() def test_quadgrid_polyfill_box(self, mocker): """cartoframes.analysis.grid.QuadGrid.polyfill""" gdf = QuadGrid().polyfill(GDF_BOX, 12) assert isinstance(gdf, GeoDataFrame) # Check both dataframes are equals gdf_test = self._load_test_gdf('grid_quadkey_bbox.csv') assert_geodataframe_equal(gdf, gdf_test, check_less_precise=True) @pytest.mark.skip() def test_quadgrid_polyfill_pol(self, mocker): """cartoframes.analysis.grid.QuadGrid.polyfill""" gdf = QuadGrid().polyfill(GDF_IRREGULAR, 12) assert isinstance(gdf, GeoDataFrame) # Check both dataframes are equals gdf_test = self._load_test_gdf('grid_quadkey_pol.csv') assert_geodataframe_equal(gdf, gdf_test, check_less_precise=True)
Python
0
ae896f3c8eaa7fa9863a862f0679065348a7b971
Remove obsolete argument from workflow cli
src/tmlib/tmaps/args.py
src/tmlib/tmaps/args.py
from ..args import Args class TmapsSubmitArgs(Args): def __init__(self, **kwargs): ''' Initialize an instance of class TmapsSubmitArgs. Parameters ---------- **kwargs: dict arguments as key-value pairs ''' self.stage = self._stage_params['default'] self.step = self._step_params['default'] self.backup = self._backup_params['default'] super(TmapsSubmitArgs, self).__init__(**kwargs) @property def _required_args(self): return set() @property def _persistent_attrs(self): return {'stage', 'step', 'backup'} @property def stage(self): ''' Returns ------- str name of the stage from where workflow should be started (default: ``None``) ''' return self._stage @stage.setter def stage(self, value): if not(isinstance(value, self._stage_params['type']) or value is None): raise TypeError('Attribute "stage" must have type %s' % self._stage_params['type']) self._stage = value @property def _stage_params(self): return { 'type': str, 'default': None, 'help': ''' name of the stage from where workflow should be started ''' } @property def step(self): ''' Returns ------- str name of the step from where workflow should be started (default: ``None``) ''' return self._step @step.setter def step(self, value): if not(isinstance(value, self._step_params['type']) or value is None): raise TypeError('Attribute "step" must have type %s' % self._step_params['type']) self._step = value @property def _step_params(self): return { 'type': str, 'default': None, 'help': ''' name of the step from where workflow should be started ''' }
from ..args import Args class TmapsSubmitArgs(Args): def __init__(self, **kwargs): ''' Initialize an instance of class TmapsSubmitArgs. Parameters ---------- **kwargs: dict arguments as key-value pairs ''' self.stage = self._stage_params['default'] self.step = self._step_params['default'] self.backup = self._backup_params['default'] super(TmapsSubmitArgs, self).__init__(**kwargs) @property def _required_args(self): return set() @property def _persistent_attrs(self): return {'stage', 'step', 'backup'} @property def stage(self): ''' Returns ------- str name of the stage from where workflow should be started (default: ``None``) ''' return self._stage @stage.setter def stage(self, value): if not(isinstance(value, self._stage_params['type']) or value is None): raise TypeError('Attribute "stage" must have type %s' % self._stage_params['type']) self._stage = value @property def _stage_params(self): return { 'type': str, 'default': None, 'help': ''' name of the stage from where workflow should be started ''' } @property def step(self): ''' Returns ------- str name of the step from where workflow should be started (default: ``None``) ''' return self._step @step.setter def step(self, value): if not(isinstance(value, self._step_params['type']) or value is None): raise TypeError('Attribute "step" must have type %s' % self._step_params['type']) self._step = value @property def _step_params(self): return { 'type': str, 'default': None, 'help': ''' name of the step from where workflow should be started ''' } @property def backup(self): ''' Returns ------- bool indicator that an existing session should be overwritten (default: ``False``) ''' return self._backup @backup.setter def backup(self, value): if not isinstance(value, self._backup_params['type']): raise TypeError('Attribute "backup" must have type %s.' % self._backup_params['type']) self._backup = value @property def _backup_params(self): return { 'type': bool, 'default': False, 'help': ''' backup an existing session ''' }
Python
0.000003
82921fb53db2b6e7fdd731f23addd413a6f87673
Add function to sign SSH key
misc/sshca.py
misc/sshca.py
#!/usr/bin/python import confluent.collective.manager as collective import eventlet.green.subprocess as subprocess import glob import os import shutil import tempfile def normalize_uid(): curruid = os.getuid() neededuid = os.stat('/etc/confluent').st_uid if curruid != neededuid: os.setuid(neededuid) if os.getuid() != neededuid: raise Exception('Need to run as root or owner of /etc/confluent') def initialize_ca(): normalize_uid() try: os.makedirs('/etc/confluent/ssh', mode=0o700) except OSError as e: if e.errno != 17: raise caname = '{0} SSH CA'.format(collective.get_myname()) subprocess.check_call(['ssh-keygen', '-C', caname, '-t', 'ed25519', '-f', '/etc/confluent/ssh/ca', '-N', '']) try: os.makedirs('/var/lib/confluent/ssh', mode=0o755) except OSError as e: if e.errno != 17: raise currknownhosts = [] try: with open('/var/lib/confluent/ssh/ssh_known_hosts', 'r') as skh: for ent in skh: descr = ent.split(' ', 4)[-1].strip() if descr != caname: currknownhosts.append(ent) except OSError as e: if e.errno != 2: raise with open('/etc/confluent/ssh/ca.pub', 'r') as capub: newent = '@cert-authority * ' + capub.read() currknownhosts.append(newent) with open('/var/lib/confluent/ssh/ssh_known_hosts', 'w') as skh: for ckh in currknownhosts: skh.write(ckh) def sign_host_key(pubkey, nodename): tmpdir = tempfile.mkdtemp() try: pkeyname = os.path.join(tmpdir, 'hostkey.pub') with open(pkeyname, 'w') as pubfile: pubfile.write(pubkey) subprocess.check_call( ['ssh-keygen', '-s', '/etc/confluent/ssh/ca', '-I', nodename, '-n', nodename, '-h', pkeyname]) certname = pkeyname.replace('.pub', '-cert.pub') with open(certname) as cert: return cert.read() finally: shutil.rmtree(tmpdir) def initialize_root_key(): authorized = [] for currkey in glob.glob('/root/.ssh/*.pub'): authorized.append(open(currkey).read()) if not authorized: subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-f', '/root/.ssh/id_ed25519', '-N', '']) for currkey in glob.glob('/root/.ssh/*.pub'): authorized.append(open(currkey).read()) try: os.makedirs('/var/lib/confluent/ssh', mode=0o755) neededuid = os.stat('/etc/confluent').st_uid os.chown('/var/lib/confluent/ssh', neededuid, -1) except OSError as e: if e.errno != 17: raise for auth in authorized: if 'PRIVATE' in auth: continue currcomment = auth.split(' ', 2)[-1].strip() curralgo = auth.split(' ', 1)[0] authed = [] try: with open('/var/lib/confluent/ssh/authorized_keys', 'r') as ak: for keyline in ak: comment = keyline.split(' ', 2)[-1].strip() algo = keyline.split(' ', 1)[0] if currcomment != comment or algo != curralgo: authed.append(keyline) except OSError as e: if e.errno != 2: raise authed.append(auth) with open('/var/lib/confluent/ssh/authorized_keys', 'w') as ak: for auth in authed: ak.write(auth) def ca_exists(): return os.path.exists('/etc/confluent/ssh/ca') if __name__ == '__main__': initialize_root_key() if not ca_exists(): initialize_ca() print(repr(sign_host_key(open('/etc/ssh/ssh_host_ed25519_key.pub').read(), collective.get_myname())))
#!/usr/bin/python import confluent.collective.manager as collective import eventlet.green.subprocess as subprocess import glob import os def normalize_uid(): curruid = os.getuid() neededuid = os.stat('/etc/confluent').st_uid if curruid != neededuid: os.setuid(neededuid) if os.getuid() != neededuid: raise Exception('Need to run as root or owner of /etc/confluent') def initialize_ca(): normalize_uid() try: os.makedirs('/etc/confluent/ssh', mode=0o700) except OSError as e: if e.errno != 17: raise caname = '{0} SSH CA'.format(collective.get_myname()) subprocess.check_call(['ssh-keygen', '-C', caname, '-t', 'ed25519', '-f', '/etc/confluent/ssh/ca', '-N', '']) try: os.makedirs('/var/lib/confluent/ssh', mode=0o755) except OSError as e: if e.errno != 17: raise currknownhosts = [] try: with open('/var/lib/confluent/ssh/ssh_known_hosts', 'r') as skh: for ent in skh: descr = ent.split(' ', 4)[-1].strip() if descr != caname: currknownhosts.append(ent) except OSError as e: if e.errno != 2: raise with open('/etc/confluent/ssh/ca.pub', 'r') as capub: newent = '@cert-authority * ' + capub.read() currknownhosts.append(newent) with open('/var/lib/confluent/ssh/ssh_known_hosts', 'w') as skh: for ckh in currknownhosts: skh.write(ckh) def initialize_root_key(): authorized = [] for currkey in glob.glob('/root/.ssh/*.pub'): authorized.append(open(currkey).read()) if not authorized: subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-f', '/root/.ssh/id_ed25519', '-N', '']) for currkey in glob.glob('/root/.ssh/*.pub'): authorized.append(open(currkey).read()) try: os.makedirs('/var/lib/confluent/ssh', mode=0o755) neededuid = os.stat('/etc/confluent').st_uid os.chown('/var/lib/confluent/ssh', neededuid, -1) except OSError as e: if e.errno != 17: raise for auth in authorized: if 'PRIVATE' in auth: continue currcomment = auth.split(' ', 2)[-1].strip() curralgo = auth.split(' ', 1)[0] authed = [] try: with open('/var/lib/confluent/ssh/authorized_keys', 'r') as ak: for keyline in ak: comment = keyline.split(' ', 2)[-1].strip() algo = keyline.split(' ', 1)[0] if currcomment != comment or algo != curralgo: authed.append(keyline) except OSError as e: if e.errno != 2: raise authed.append(auth) with open('/var/lib/confluent/ssh/authorized_keys', 'w') as ak: for auth in authed: ak.write(auth) def ca_exists(): return os.path.exists('/etc/confluent/ssh/ca') if __name__ == '__main__': initialize_root_key() if not ca_exists(): initialize_ca()
Python
0
0a57bcc2faca88d0527bb1f14dae2b0b9b5168f2
bump filer version to 0.9pbs.54
filer/__init__.py
filer/__init__.py
#-*- coding: utf-8 -*- # version string following pep-0396 and pep-0386 __version__ = '0.9pbs.54' # pragma: nocover
#-*- coding: utf-8 -*- # version string following pep-0396 and pep-0386 __version__ = '0.9pbs.53' # pragma: nocover
Python
0
006cbb88f2a06cd1411f88126ccf4a43121aa858
Update app startup process with new servicemanager and websocket communication.
app/main.py
app/main.py
""" The main module for HomePiServer. Initializes SocketIO, ServiceManager, NavigationChannel, View Manager. """ import signal from threading import Thread from gevent import monkey from flask import Flask from flask_socketio import SocketIO from .controllers import CONTROLLERS from .core.logger import configure_logging from .core.websocket_manager import WebSocketManager from .core.servicemanager import ServiceManager from .services import SERVICES monkey.patch_all() class HomePiServer(object): """ Encapsulates the entire server. """ def __init__(self, config): params = { "template_folder": "../templates", "static_folder": "../static" } self.flask_app = Flask(__name__, **params) self.flask_app.config.from_object(config) self.register_blueprints(self.flask_app, CONTROLLERS) self.app = SocketIO(self.flask_app) self.socket_manager = WebSocketManager(self.app) self.service_manager = ServiceManager(SERVICES, self.socket_manager) configure_logging(self.flask_app) self.start_services() def start_services(self): """Starts self.service_manager.start() on a new thread.""" self.service_thread = Thread(target=self.service_manager.start).start() @staticmethod def register_blueprints(app, params): """ Registers all the blueprints in controllers list. Args: app: Flask app to register the blueprint with. controllers: List like: [(prefix, blueprint), ...] """ for prefix, controller in params: app.register_blueprint(controller, url_prefix=prefix) def shutdown(self): pass def setup_signals(app): """ Listen for SIGTERM and SIGINIT and calls app.shutdown()""" def make_new_handler(prev_handler_func): def new_handler(var1, var2): app.shutdown() if prev_handler_func: prev_handler_func(var1, var2) return new_handler for sig in (signal.SIGTERM, signal.SIGINT): prev_handler = signal.getsignal(sig) signal.signal(sig, make_new_handler(prev_handler)) def create_app(config=None): """ Returns a new instance of HomePiServer.""" if config is None: import app.config config = app.config app = HomePiServer(config) setup_signals(app) return app.flask_app, app.app
""" The main module for HomePiServer. Initializes SocketIO, ServiceManager, NavigationChannel, View Manager. """ import signal from threading import Thread from gevent import monkey from flask import Flask from flask_socketio import SocketIO from .controllers import CONTROLLERS from .core.socketchannel import NavigationChannel from .core.logger import configure_logging from .services import ServiceManager, SERVICES from .views import ViewManager monkey.patch_all() class HomePiServer(object): """ Encapsulates the entire server. """ def __init__(self, config): params = { "template_folder": "../templates", "static_folder": "../static" } self.flask_app = Flask(__name__, **params) self.flask_app.config.from_object(config) self.register_blueprints(self.flask_app, CONTROLLERS) self.app = SocketIO(self.flask_app) self.nav_channel = NavigationChannel("/navigation", self.app) self.app.on_namespace(self.nav_channel) self.view_manager = ViewManager(self.nav_channel) self.nav_channel.display = self.view_manager self.service_manager = ServiceManager(SERVICES, self.view_manager) configure_logging(self.flask_app) self.start_services() def start_services(self): """Starts self.service_manager.start() on a new thread.""" self.service_thread = Thread(target=self.service_manager.start).start() @staticmethod def register_blueprints(app, params): """ Registers all the blueprints in controllers list. Args: app: Flask app to register the blueprint with. controllers: List like: [(prefix, blueprint), ...] """ for prefix, controller in params: app.register_blueprint(controller, url_prefix=prefix) def shutdown(self): pass def setup_signals(app): """ Listen for SIGTERM and SIGINIT and calls app.shutdown()""" def make_new_handler(prev_handler_func): def new_handler(var1, var2): app.shutdown() if prev_handler_func: prev_handler_func(var1, var2) return new_handler for sig in (signal.SIGTERM, signal.SIGINT): prev_handler = signal.getsignal(sig) signal.signal(sig, make_new_handler(prev_handler)) def create_app(config=None): """ Returns a new instance of HomePiServer.""" if config is None: import app.config config = app.config app = HomePiServer(config) setup_signals(app) return app.flask_app, app.app
Python
0
a0a92e237ca91dc8f0318a27dfeec9b9c8e95de5
Add utility to guess livelock file for an owner
lib/utils/livelock.py
lib/utils/livelock.py
# # # Copyright (C) 2014 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Lockfiles to prove liveliness When requesting resources, like locks, from wconfd, requesters have to provide the name of a file they own an exclusive lock on, to prove that they are still alive. Provide methods to obtain such a file. """ import fcntl import os import struct import time from ganeti.utils.algo import NiceSort from ganeti import pathutils class LiveLock(object): """Utility for a lockfile needed to request resources from WconfD. """ def __init__(self, name=None): if name is None: name = "pid%d_" % os.getpid() # to avoid reusing existing lock files, extend name # by the current time name = "%s_%d" % (name, int(time.time())) fname = os.path.join(pathutils.LIVELOCK_DIR, name) self.lockfile = open(fname, 'w') fcntl.fcntl(self.lockfile, fcntl.F_SETLKW, struct.pack('hhllhh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)) def close(self): """Close the lockfile and clean it up. """ self.lockfile.close() os.remove(self.lockfile.name) def GuessLockfileFor(name): """For a given name, take the latest file matching. @return: the file with the latest name matching the given prefix in LIVELOCK_DIR, or the plain name, if none exists. """ lockfiles = filter(lambda n: n.startswith(name), os.listdir(pathutils.LIVELOCK_DIR)) if len(lockfiles) > 0: lockfile = NiceSort(lockfiles)[-1] else: lockfile = name return os.path.join(pathutils.LIVELOCK_DIR, lockfile)
# # # Copyright (C) 2014 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Lockfiles to prove liveliness When requesting resources, like locks, from wconfd, requesters have to provide the name of a file they own an exclusive lock on, to prove that they are still alive. Provide methods to obtain such a file. """ import fcntl import os import struct import time from ganeti import pathutils class LiveLock(object): """Utility for a lockfile needed to request resources from WconfD. """ def __init__(self, name=None): if name is None: name = "pid%d_" % os.getpid() # to avoid reusing existing lock files, extend name # by the current time name = "%s_%d" % (name, int(time.time())) fname = os.path.join(pathutils.LIVELOCK_DIR, name) self.lockfile = open(fname, 'w') fcntl.fcntl(self.lockfile, fcntl.F_SETLKW, struct.pack('hhllhh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)) def close(self): """Close the lockfile and clean it up. """ self.lockfile.close() os.remove(self.lockfile.name)
Python
0.000116
f431f2408ca1e1a38479a9ac1224bd608df1c0d4
Test build.source rather than legacy attributes
changes/listeners/green_build.py
changes/listeners/green_build.py
import logging import requests from datetime import datetime from flask import current_app from changes.config import db from changes.constants import Result from changes.db.utils import create_or_update from changes.models import ( Build, Event, EventType, ProjectOption, RepositoryBackend ) from changes.utils.http import build_uri from changes.utils.locking import lock logger = logging.getLogger('green_build') def get_options(project_id): return dict( db.session.query( ProjectOption.name, ProjectOption.value ).filter( ProjectOption.project_id == project_id, ProjectOption.name.in_([ 'green-build.notify', 'green-build.project', ]) ) ) @lock def build_finished_handler(build_id, **kwargs): build = Build.query.get(build_id) if build is None: return if build.result != Result.passed: return url = current_app.config.get('GREEN_BUILD_URL') if not url: logger.info('GREEN_BUILD_URL not set') return auth = current_app.config['GREEN_BUILD_AUTH'] if not auth: logger.info('GREEN_BUILD_AUTH not set') return # we only want to identify stable revisions if build.source.patch_id or not build.source.revision_sha: logger.debug('Ignoring build due to non-commit: %s', build.id) return options = get_options(build.project_id) if options.get('green-build.notify', '1') != '1': logger.info('green-build.notify disabled for project: %s', build.project_id) return if build.repository.backend != RepositoryBackend.hg: logger.info('Repository backend is not supported: %s', build.repository.id) return vcs = build.repository.get_vcs() if vcs is None: logger.info('Repository has no VCS set: %s', build.repository.id) return # ensure we have the latest changes vcs.update() release_id = vcs.run(['log', '-r %s' % (build.revision_sha,), '--limit=1', '--template={rev}:{node|short}']) project = options.get('green-build.project') or build.project.slug try: requests.post(url, auth=auth, data={ 'project': project, 'id': release_id, 'build_url': build_uri('/projects/{0}/builds/{1}/'.format( build.project.slug, build.id.hex)), 'build_server': 'changes', }) except Exception: logger.exception('Failed to report green build') status = 'fail' else: status = 'success' create_or_update(Event, where={ 'type': EventType.green_build, 'item_id': build.id, }, values={ 'data': { 'status': status, }, 'date_modified': datetime.utcnow(), })
import logging import requests from datetime import datetime from flask import current_app from changes.config import db from changes.constants import Result from changes.db.utils import create_or_update from changes.models import ( Build, Event, EventType, ProjectOption, RepositoryBackend ) from changes.utils.http import build_uri from changes.utils.locking import lock logger = logging.getLogger('green_build') def get_options(project_id): return dict( db.session.query( ProjectOption.name, ProjectOption.value ).filter( ProjectOption.project_id == project_id, ProjectOption.name.in_([ 'green-build.notify', 'green-build.project', ]) ) ) @lock def build_finished_handler(build_id, **kwargs): build = Build.query.get(build_id) if build is None: return if build.result != Result.passed: return url = current_app.config.get('GREEN_BUILD_URL') if not url: logger.info('GREEN_BUILD_URL not set') return auth = current_app.config['GREEN_BUILD_AUTH'] if not auth: logger.info('GREEN_BUILD_AUTH not set') return # we only want to identify stable revisions if build.patch_id or not build.revision_sha: logger.debug('Ignoring build due to non-commit: %s', build.id) return options = get_options(build.project_id) if options.get('green-build.notify', '1') != '1': logger.info('green-build.notify disabled for project: %s', build.project_id) return if build.repository.backend != RepositoryBackend.hg: logger.info('Repository backend is not supported: %s', build.repository.id) return vcs = build.repository.get_vcs() if vcs is None: logger.info('Repository has no VCS set: %s', build.repository.id) return # ensure we have the latest changes vcs.update() release_id = vcs.run(['log', '-r %s' % (build.revision_sha,), '--limit=1', '--template={rev}:{node|short}']) project = options.get('green-build.project') or build.project.slug try: requests.post(url, auth=auth, data={ 'project': project, 'id': release_id, 'build_url': build_uri('/projects/{0}/builds/{1}/'.format( build.project.slug, build.id.hex)), 'build_server': 'changes', }) except Exception: logger.exception('Failed to report green build') status = 'fail' else: status = 'success' create_or_update(Event, where={ 'type': EventType.green_build, 'item_id': build.id, }, values={ 'data': { 'status': status, }, 'date_modified': datetime.utcnow(), })
Python
0
a281fd3c49b86012fd370ae82df19525af89ff1c
Disable swift test
parsl/tests/test_swift.py
parsl/tests/test_swift.py
import pytest import parsl from parsl import * parsl.set_stream_logger() from parsl.executors.swift_t import * def foo(x, y): return x * y def slow_foo(x, y): import time time.sleep(x) return x * y def bad_foo(x, y): time.sleep(x) return x * y @pytest.mark.skip('fails intermittently') @pytest.mark.local def test_simple(): print("Start") tex = TurbineExecutor() x = tex.submit(foo, 5, 10) print("Got: ", x) print("X result: ", x.result()) assert x.result() == 50, "X != 50" print("done") @pytest.mark.local @pytest.mark.skip('fails intermittently') def test_slow(): futs = {} tex = TurbineExecutor() for i in range(0, 3): futs[i] = tex.submit(slow_foo, 1, 2) total = sum([futs[i].result(timeout=10) for i in futs]) assert total == 6, "expected 6, got {}".format(total) @pytest.mark.local @pytest.mark.skip('fails intermittently') def test_except(): with pytest.raises(NameError): tex = TurbineExecutor() x = tex.submit(bad_foo, 5, 10) x.result() if __name__ == "__main__": # test_simple() # test_slow() test_except() print("Done")
import pytest import parsl from parsl import * parsl.set_stream_logger() from parsl.executors.swift_t import * def foo(x, y): return x * y def slow_foo(x, y): import time time.sleep(x) return x * y def bad_foo(x, y): time.sleep(x) return x * y @pytest.mark.local def test_simple(): print("Start") tex = TurbineExecutor() x = tex.submit(foo, 5, 10) print("Got: ", x) print("X result: ", x.result()) assert x.result() == 50, "X != 50" print("done") @pytest.mark.local def test_slow(): futs = {} tex = TurbineExecutor() for i in range(0, 3): futs[i] = tex.submit(slow_foo, 1, 2) total = sum([futs[i].result(timeout=10) for i in futs]) assert total == 6, "expected 6, got {}".format(total) @pytest.mark.local def test_except(): with pytest.raises(NameError): tex = TurbineExecutor() x = tex.submit(bad_foo, 5, 10) x.result() if __name__ == "__main__": # test_simple() # test_slow() test_except() print("Done")
Python
0.000004
e9980d7498c0889ecd795a4d2977c1893e0ad7e3
comment on md5 usage
app/util.py
app/util.py
import bcrypt import md5 def hash_pwd(password): return bcrypt.hashpw(password, bcrypt.gensalt()) def check_pwd(password, hashed): return bcrypt.hashpw(password, hashed) == hashed def validate_time(time): return True # XXX md5 module deprecated, use hashlib def gravatar_html(email): h = md5.md5(email.lower()).hexdigest() html = '<img src="http://www.gravatar.com/avatar/%s.jpg?s=15" />' % h return html
import bcrypt import md5 def hash_pwd(password): return bcrypt.hashpw(password, bcrypt.gensalt()) def check_pwd(password, hashed): return bcrypt.hashpw(password, hashed) == hashed def validate_time(time): return True def gravatar_html(email): h = md5.md5(email.lower()).hexdigest() html = '<img src="http://www.gravatar.com/avatar/%s.jpg?s=15" />' % h return html
Python
0
1d443973e8db6265268dd2afe6b6ad7748526335
Add _read_test_file() function.
ipymd/utils.py
ipymd/utils.py
# -*- coding: utf-8 -*- """Utils""" #------------------------------------------------------------------------------ # Imports #------------------------------------------------------------------------------ import os import os.path as op import difflib from .six import exec_ #------------------------------------------------------------------------------ # Utils #------------------------------------------------------------------------------ def _script_dir(): return op.dirname(op.realpath(__file__)) def _test_file_path(filename): """Return the full path to an example filename in the 'examples' directory.""" return op.realpath(op.join(_script_dir(), '../examples', filename)) def _exec_test_file(filename): """Return the 'output' object defined in a Python file.""" path = _test_file_path(filename) with open(path, 'r') as f: contents = f.read() ns = {} exec_(contents, ns) return ns.get('output', None) def _read_test_file(filename): """Read a test file.""" path = _test_file_path(filename) with open(path, 'r') as f: return f.read() def _diff_removed_lines(diff): return ''.join(x[2:] for x in diff if x.startswith('- ')) def _diff(text_0, text_1): """Return a diff between two strings.""" diff = difflib.ndiff(text_0.splitlines(), text_1.splitlines()) return _diff_removed_lines(diff)
# -*- coding: utf-8 -*- """Utils""" #------------------------------------------------------------------------------ # Imports #------------------------------------------------------------------------------ import os import os.path as op import difflib from .six import exec_ #------------------------------------------------------------------------------ # Utils #------------------------------------------------------------------------------ def _script_dir(): return op.dirname(op.realpath(__file__)) def _test_file_path(filename): """Return the full path to an example filename in the 'examples' directory.""" return op.realpath(op.join(_script_dir(), '../examples', filename)) def _exec_test_file(filename): """Return the 'output' object defined in a Python file.""" path = _test_file_path(filename) with open(path, 'r') as f: contents = f.read() ns = {} exec_(contents, ns) return ns.get('output', None) def _diff_removed_lines(diff): return ''.join(x[2:] for x in diff if x.startswith('- ')) def _diff(text_0, text_1): """Return a diff between two strings.""" diff = difflib.ndiff(text_0.splitlines(), text_1.splitlines()) return _diff_removed_lines(diff)
Python
0.000001
95ad2c65fb1b4aacea668c8d9474183b4f107d56
Test with multi args
paver/tests/test_shell.py
paver/tests/test_shell.py
import sys from paver.deps.six import b from mock import patch, Mock from paver import easy from subprocess import PIPE, STDOUT @patch('subprocess.Popen') def test_sh_raises_BuildFailure(popen): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] try: easy.sh('foo') except easy.BuildFailure: e = sys.exc_info()[1] args = e.args assert args == ('Subprocess return code: 1', ) else: assert False, 'Failed to raise BuildFailure' assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert 'stdout' not in popen.call_args[1] @patch('paver.shell.error') @patch('subprocess.Popen') def test_sh_with_capture_raises_BuildFailure(popen, error): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] try: easy.sh('foo', capture=True) except easy.BuildFailure: e = sys.exc_info()[1] args = e.args assert args == ('Subprocess return code: 1', ) else: assert False, 'Failed to raise BuildFailure' assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert popen.call_args[1]['stdout'] == PIPE assert popen.call_args[1]['stderr'] == STDOUT assert error.called assert error.call_args == (('some stderr', ), {}) @patch('subprocess.Popen') def test_sh_ignores_error(popen): popen.return_value.communicate.return_value = [b('some stderr')] popen.return_value.returncode = 1 easy.sh('foo', ignore_error=True) assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert 'stdout' not in popen.call_args[1] @patch('subprocess.Popen') def test_sh_ignores_error_with_capture(popen): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] easy.sh('foo', capture=True, ignore_error=True) assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert popen.call_args[1]['stdout'] == PIPE assert popen.call_args[1]['stderr'] == STDOUT @patch('subprocess.Popen') def test_sh_with_multi_command(popen): popen.return_value.returncode = 0 easy.sh(['foo', ' bar', 'fi"zz']) assert popen.called assert popen.call_args[0][0] == "foo ' bar' 'fi\"zz'" assert popen.call_args[1]['shell'] == True
import sys from paver.deps.six import b from mock import patch, Mock from paver import easy from subprocess import PIPE, STDOUT @patch('subprocess.Popen') def test_sh_raises_BuildFailure(popen): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] try: easy.sh('foo') except easy.BuildFailure: e = sys.exc_info()[1] args = e.args assert args == ('Subprocess return code: 1', ) else: assert False, 'Failed to raise BuildFailure' assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert 'stdout' not in popen.call_args[1] @patch('paver.easy.error') @patch('subprocess.Popen') def test_sh_with_capture_raises_BuildFailure(popen, error): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] try: easy.sh('foo', capture=True) except easy.BuildFailure: e = sys.exc_info()[1] args = e.args assert args == ('Subprocess return code: 1', ) else: assert False, 'Failed to raise BuildFailure' assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert popen.call_args[1]['stdout'] == PIPE assert popen.call_args[1]['stderr'] == STDOUT assert error.called assert error.call_args == (('some stderr', ), {}) @patch('subprocess.Popen') def test_sh_ignores_error(popen): popen.return_value.communicate.return_value = [b('some stderr')] popen.return_value.returncode = 1 easy.sh('foo', ignore_error=True) assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert 'stdout' not in popen.call_args[1] @patch('subprocess.Popen') def test_sh_ignores_error_with_capture(popen): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] easy.sh('foo', capture=True, ignore_error=True) assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert popen.call_args[1]['stdout'] == PIPE assert popen.call_args[1]['stderr'] == STDOUT
Python
0
d329787dc6f862e749ca6f490a155186b48553a7
Fix one more bug; interpreter still broken
bfinterp.py
bfinterp.py
import sys import collections import getch from parser import parse, optimize from parser import OUTPUT, INPUT, LOOPSTART, LOOPEND, MOVE from parser import ADD, SET, MULCOPY, SCAN BUFSIZE = 8192 def interp(code): tokens = parse(code) tokens = optimize(tokens) i = 0 loops = [] mem = bytearray(BUFSIZE) cur = int(BUFSIZE/2) skiploop = False while i < len(tokens)-1: #print("%d:%s cur:%d mem[cur]:%d" % (i, code[i], cur, mem[cur])) #print(loops) token, value = tokens[i] if skiploop: if token == LOOPEND: skiploop = False i += 1 continue if token == OUTPUT: print(chr(mem[cur]), end='') elif token == INPUT: mem[cur] == ord(getch.getch()) elif token == MOVE: cur += value elif token == ADD: offset, add = value newval = mem[cur+offset] + add newval %= 256 mem[cur+offset] = newval elif token == SET: offset, val = value mem[cur+offset] = val elif token == MULCOPY: src, dest, mul = value newval = mem[cur+dest] + mem[cur+src] * mul newval %= 256 mem[cur+dest] = newval elif token == SCAN: while mem[cur] != 0: cur += value elif token == LOOPSTART: if mem[cur]: loops.append(i) else: skiploop = True elif token == LOOPEND: if mem[cur] == 0: loops.pop() else: i = loops[-1] else: raise ValueError('Token not handled') i += 1 if __name__ == '__main__': with open(sys.argv[1]) as bffile: interp(bffile.read())
import sys import collections import getch from parser import parse, optimize from parser import OUTPUT, INPUT, LOOPSTART, LOOPEND, MOVE from parser import ADD, SET, MULCOPY, SCAN BUFSIZE = 8192 def interp(code): tokens = parse(code) tokens = optimize(tokens) i = 0 loops = [] mem = bytearray(BUFSIZE) cur = int(BUFSIZE/2) skiploop = False while i < len(tokens)-1: #print("%d:%s cur:%d mem[cur]:%d" % (i, code[i], cur, mem[cur])) #print(loops) token, value = tokens[i] if skiploop: if token == LOOPEND: skiploop = False continue if token == OUTPUT: print(chr(mem[cur]), end='') elif token == INPUT: mem[cur] == ord(getch.getch()) elif token == MOVE: cur += value elif token == ADD: offset, add = value newval = mem[cur+offset] + add newval %= 256 mem[cur+offset] = newval elif token == SET: offset, val = value mem[cur+offset] = val elif token == MULCOPY: src, dest, mul = value newval = mem[cur+dest] + mem[cur+src] * mul newval %= 256 mem[cur+dest] = newval elif token == SCAN: while mem[cur] != 0: cur += value elif token == LOOPSTART: if mem[cur]: loops.append(i) else: skiploop = True elif token == LOOPEND: if mem[cur] == 0: loops.pop() else: i = loops[-1] else: raise ValueError('Token not handled') i += 1 if __name__ == '__main__': with open(sys.argv[1]) as bffile: interp(bffile.read())
Python
0
4cd44a177147569767a8f53aed67cbee0f759667
bump verion to 3.0.0-alpha
pyani/__init__.py
pyani/__init__.py
# python package version # should match r"^__version__ = '(?P<version>[^']+)'$" for setup.py """Module with main code for pyani application/package.""" __version__ = '0.3.0-alpha'
# python package version # should match r"^__version__ = '(?P<version>[^']+)'$" for setup.py """Module with main code for pyani application/package.""" __version__ = '0.3.0.dev'
Python
0.000001
3fda8faef7dccaefc29bb9c4a84fce4819141118
update some comments and names for readability
src/watchdog/observers/inotify_buffer.py
src/watchdog/observers/inotify_buffer.py
# -*- coding: utf-8 -*- # # Copyright 2014 Thomas Amland <thomas.amland@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import logging import threading from collections import deque from watchdog.utils import DaemonThread from .inotify_c import Inotify __all__ = ['InotifyBuffer'] STOP_EVENT = object() class Worker(DaemonThread): """ Thread that reads events from `inotify` and writes to an InotifyBuffer. """ def __init__(self, inotify, buffer): DaemonThread.__init__(self) self._read_events = inotify.read_events self._buffer = buffer def run(self): while self.should_keep_running(): inotify_events = self._read_events() for inotify_event in inotify_events: logging.debug("worker: in event %s", inotify_event) if inotify_event.is_moved_to: from_event = self._buffer._catch(inotify_event.cookie) if from_event: self._buffer._put((from_event, inotify_event)) else: logging.debug("worker: could not find maching move_from event") self._buffer._put(inotify_event) else: self._buffer._put(inotify_event) class InotifyBuffer(object): """ A wrapper for `Inotify` that keeps events in memory for `delay` seconds. IN_MOVED_FROM and IN_MOVED_TO events are paired during this time. """ def __init__(self, path, recursive=False): self.delay = 0.5 self._lock = threading.Lock() self._not_empty = threading.Condition(self._lock) self._queue = deque() self._inotify = Inotify(path, recursive) self._worker = Worker(self._inotify, self) self._worker.start() def read_event(self): """ Returns a single event or a tuple of from/to events in case of a paired move event. """ while True: # wait for queue self._not_empty.acquire() while len(self._queue) == 0: self._not_empty.wait() head, insert_time = self._queue[0] self._not_empty.release() # wait for delay time_left = insert_time + self.delay - time.time() while time_left > 0: time.sleep(time_left) time_left = insert_time + self.delay - time.time() # return if event is still here self._lock.acquire() try: if len(self._queue) > 0 and self._queue[0][0] is head: self._queue.popleft() return head finally: self._lock.release() def close(self): self._worker.stop() self._inotify.close() self._worker.join() # Interrupt thread calling `self.read_event` self._put(STOP_EVENT) def _put(self, event): self._lock.acquire() self._queue.append((event, time.time())) self._not_empty.notify() self._lock.release() def _catch(self, cookie): """ Remove and return the MOVE_FROM event matching `cookie`. """ self._lock.acquire() ret = None for i, elem in enumerate(self._queue): event, _ = elem try: if event.is_moved_from and event.cookie == cookie: ret = event del self._queue[i] break except AttributeError: pass self._lock.release() return ret
# -*- coding: utf-8 -*- # # Copyright 2014 Thomas Amland <thomas.amland@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import logging import threading from collections import deque from watchdog.utils import DaemonThread from .inotify_c import Inotify STOP_EVENT = object() class _Worker(DaemonThread): """ Thread that reads events from `inotify` and writes to `queue`. """ def __init__(self, inotify, queue): DaemonThread.__init__(self) self._read_events = inotify.read_events self._queue = queue def run(self): while self.should_keep_running(): inotify_events = self._read_events() for inotify_event in inotify_events: logging.debug("worker: in event %s", inotify_event) if inotify_event.is_moved_to: from_event = self._queue._catch(inotify_event.cookie) if from_event: self._queue._put((from_event, inotify_event)) else: logging.debug("worker: could not find maching move_from event") self._queue._put(inotify_event) else: self._queue._put(inotify_event) class InotifyBuffer(object): """ A wrapper for `Inotify` that keeps events in memory for `delay` seconds. IN_MOVED_FROM and IN_MOVED_TO events are paired during this time. """ def __init__(self, path, recursive=False): self.delay = 0.5 self._lock = threading.Lock() self._not_empty = threading.Condition(self._lock) self._queue = deque() self._inotify = Inotify(path, recursive) self._worker = _Worker(self._inotify, self) self._worker.start() def read_event(self): """ Returns a single event or a tuple of from/to events in case of a paired move event. """ while True: # wait for queue self._not_empty.acquire() while len(self._queue) == 0: self._not_empty.wait() head, insert_time = self._queue[0] self._not_empty.release() # wait for delay time_left = insert_time + self.delay - time.time() while time_left > 0: time.sleep(time_left) time_left = insert_time + self.delay - time.time() # return if event is still here self._lock.acquire() try: if len(self._queue) > 0 and self._queue[0][0] is head: self._queue.popleft() return head finally: self._lock.release() def close(self): self._worker.stop() self._inotify.close() self._worker.join() # Add the stop event to unblock the read_event which waits for # events in the queue... even after inotify buffer is closed. self._put(STOP_EVENT) def _put(self, elem): self._lock.acquire() self._queue.append((elem, time.time())) self._not_empty.notify() self._lock.release() def _catch(self, cookie): self._lock.acquire() ret = None for i, elem in enumerate(self._queue): event, _ = elem try: if event.is_moved_from and event.cookie == cookie: ret = event del self._queue[i] break except AttributeError: pass self._lock.release() return ret
Python
0
f52bec382965e166b86821938da07c9dbc80c9de
Switch to Roster implementation with DB backend
pyfire/contact.py
pyfire/contact.py
""" pyfire.contact ~~~~~~~~~~ Handles Contact ("roster item") interpretation as per RFC-6121 :copyright: 2011 by the pyfire Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import xml.etree.ElementTree as ET from sqlalchemy import Table, Column, Boolean, Integer, String, Enum, ForeignKey from sqlalchemy.orm import relationship, backref from pyfire.jid import JID from pyfire.storage import Base class Roster(Base): """List of contacts for a given jid""" __tablename__ = 'rosters' id = Column(Integer, primary_key=True) jid = Column(String(3072), nullable=False) contacts_groups = Table('contacts_groups', Base.metadata, Column('contact_id', Integer, ForeignKey('contacts.id')), Column('group_id', Integer, ForeignKey('groups.id')) ) class Group(Base): """Simple group, only providing a name for now""" __tablename__ = 'groups' id = Column(Integer, primary_key=True) name = Column(String(255)) class Contact(Base): """Jabber Contact, aka roster item. It has some really strict attribute setting mechanism as it leads to all kinds of fantastic crashes with clients which should be avoided in any case. """ __tablename__ = 'contacts' id = Column(Integer, primary_key=True) approved = Column(Boolean) ask = Column(Enum(['subscribe'])) jid = Column(String(3072), nullable=False) name = Column(String(255)) subscription = Column(Enum(["none", "from", "to", "remove", "both"])) groups = relationship(Group, secondary=contacts_groups) roster = relationship(Roster, backref=backref('contacts')) roster_id = Column(Integer, ForeignKey('rosters.id'), nullable=False) def __init__(self, jid, **kwds): super(Contact, self).__init__() # required if isinstance(jid, basestring): self.jid = JID(jid) elif isinstance(jid, JID): self.jid = jid self.jid.validate(raise_error=True) else: raise AttributeError("Needs valid jid either as string or JID instance") # optional self.approved = False self.ask = None self.name = None self.subscription = "none" self.group = [] for k, v in kwds.iteritems(): setattr(self, k, v) def to_element(self): """Formats contact as `class`:ET.Element object""" element = ET.Element("item") if self.approved is not None: element.set("approved", 'true' if self.approved else 'false') if self.ask is not None: element.set("ask", self.ask) element.set("jid", str(self.jid)) if self.name is not None: element.set("name", self.name) if self.subscription is not None: element.set("subscription", self.subscription) for group in self.group: group_element = ET.SubElement(element, "group") group_element.text = group return element @staticmethod def from_element(element): """Creates contact instance from `class`:ET.Element""" if element.tag != "item": raise ValueError("Invalid element with tag %s" % element.tag) cont = Contact(element.get('jid')) cont.ask = element.get('ask') cont.subscription = element.get('subscription') approved = element.get('approved') if approved == 'true': cont.approved = True elif approved == 'false': cont.approved = False else: cont.approved = approved for group in list(element): if group.tag == "group": cont.group.append(group.text) return cont
""" pyfire.contact ~~~~~~~~~~ Handles Contact ("roster item") interpretation as per RFC-6121 :copyright: 2011 by the pyfire Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from pyfire.jid import JID import xml.etree.ElementTree as ET class Contact(object): """Jabber Contact, aka roster item. It has some really strict attribute setting mechanism as it leads to all kinds of fantastic crashes with clients which should be avoided really. """ __slots__ = ('__approved', '__ask', 'jid', 'name', '__subscription', 'group') allowed_approved = frozenset([None, True, False]) allowed_ask = frozenset([None, "subscribe"]) allowed_subscription = frozenset([None, "none", "from", "to", "remove", "both"]) def __init__(self, jid, **kwds): super(Contact, self).__init__() # required if isinstance(jid, basestring): self.jid = JID(jid) else: self.jid = jid self.jid.validate(raise_error=True) # optional self.approved = False self.ask = None self.name = None self.subscription = "none" self.group = [] for name, value in kwds.iteritems(): setattr(self, name, value) def __setattr__(self, name, value): hidden_name = "__%s" % name really_hidden_name = "_%s__%s" % (self.__class__.__name__, name) if hasattr(self, hidden_name) or hidden_name in self.__slots__: range_var = "allowed_%s" % name if value in getattr(self, range_var): object.__setattr__(self, really_hidden_name, value) else: raise ValueError("'%s' not in allowed set of values" % value) elif name in self.__slots__: object.__setattr__(self, name, value) else: raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name)) @property def ask(self): return self.__ask @property def approved(self): return self.__approved @property def subscription(self): return self.__subscription def to_element(self): """Formats contact as `class`:ET.Element object""" element = ET.Element("item") if self.approved is not None: element.set("approved", 'true' if self.approved else 'false') if self.ask is not None: element.set("ask", self.ask) element.set("jid", str(self.jid)) if self.name is not None: element.set("name", self.name) if self.subscription is not None: element.set("subscription", self.subscription) for group in self.group: group_element = ET.SubElement(element, "group") group_element.text = group return element @staticmethod def from_element(element): """Creates contact instance from `class`:ET.Element""" if element.tag != "item": raise ValueError("Invalid element with tag %s" % element.tag) cont = Contact(element.get('jid')) cont.ask = element.get('ask') cont.subscription = element.get('subscription') approved = element.get('approved') if approved == 'true': cont.approved = True elif approved == 'false': cont.approved = False else: cont.approved = approved for group in list(element): if group.tag == "group": cont.group.append(group.text) return cont
Python
0
3066837091621720be0b0338d12ed66fd24a86b1
bump version
pyiso/__init__.py
pyiso/__init__.py
import imp import os.path __version__ = '0.2.7' BALANCING_AUTHORITIES = { 'BPA': {'module': 'bpa', 'class': 'BPAClient'}, 'CAISO': {'module': 'caiso', 'class': 'CAISOClient'}, 'ERCOT': {'module': 'ercot', 'class': 'ERCOTClient'}, 'ISONE': {'module': 'isone', 'class': 'ISONEClient'}, 'MISO': {'module': 'miso', 'class': 'MISOClient'}, 'NEVP': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'NYISO': {'module': 'nyiso', 'class': 'NYISOClient'}, 'PJM': {'module': 'pjm', 'class': 'PJMClient'}, 'SPPC': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'SPP': {'module': 'spp', 'class': 'SPPClient'}, } def client_factory(client_name, **kwargs): """Return a client for an external data set""" # set up dir_name = os.path.dirname(os.path.abspath(__file__)) error_msg = 'No client found for name %s' % client_name client_key = client_name.upper() # find client try: client_vals = BALANCING_AUTHORITIES[client_key] module_name = client_vals['module'] class_name = client_vals['class'] except KeyError: raise ValueError(error_msg) # find module try: fp, pathname, description = imp.find_module(module_name, [dir_name]) except ImportError: raise ValueError(error_msg) # load try: mod = imp.load_module(module_name, fp, pathname, description) finally: # Since we may exit via an exception, close fp explicitly. if fp: fp.close() # instantiate class try: client_inst = getattr(mod, class_name)(**kwargs) except AttributeError: raise ValueError(error_msg) # set name client_inst.NAME = client_name return client_inst
import imp import os.path __version__ = '0.2.6' BALANCING_AUTHORITIES = { 'BPA': {'module': 'bpa', 'class': 'BPAClient'}, 'CAISO': {'module': 'caiso', 'class': 'CAISOClient'}, 'ERCOT': {'module': 'ercot', 'class': 'ERCOTClient'}, 'ISONE': {'module': 'isone', 'class': 'ISONEClient'}, 'MISO': {'module': 'miso', 'class': 'MISOClient'}, 'NEVP': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'NYISO': {'module': 'nyiso', 'class': 'NYISOClient'}, 'PJM': {'module': 'pjm', 'class': 'PJMClient'}, 'SPPC': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'SPP': {'module': 'spp', 'class': 'SPPClient'}, } def client_factory(client_name, **kwargs): """Return a client for an external data set""" # set up dir_name = os.path.dirname(os.path.abspath(__file__)) error_msg = 'No client found for name %s' % client_name client_key = client_name.upper() # find client try: client_vals = BALANCING_AUTHORITIES[client_key] module_name = client_vals['module'] class_name = client_vals['class'] except KeyError: raise ValueError(error_msg) # find module try: fp, pathname, description = imp.find_module(module_name, [dir_name]) except ImportError: raise ValueError(error_msg) # load try: mod = imp.load_module(module_name, fp, pathname, description) finally: # Since we may exit via an exception, close fp explicitly. if fp: fp.close() # instantiate class try: client_inst = getattr(mod, class_name)(**kwargs) except AttributeError: raise ValueError(error_msg) # set name client_inst.NAME = client_name return client_inst
Python
0
76c8096b3aed79391614b32608ab446613c42034
Add LOG_LEVEL global set by DEBUG=True in environment
pyiso/__init__.py
pyiso/__init__.py
import imp import os.path from os import environ from logging import DEBUG, INFO ######################################### # For Testing Purposes # Add caching to unittesting # Print every time the testing hits the cache successfully import requests import requests_cache requests_cache.install_cache(expire_after=60*10) __version__ = '0.2.11' log_dict = {'True': DEBUG, False: INFO} LOG_LEVEL = log_dict[environ.get('DEBUG', False)] BALANCING_AUTHORITIES = { 'AZPS': {'module': 'sveri', 'class': 'SVERIClient'}, 'BPA': {'module': 'bpa', 'class': 'BPAClient'}, 'CAISO': {'module': 'caiso', 'class': 'CAISOClient'}, 'DEAA': {'module': 'sveri', 'class': 'SVERIClient'}, 'ELE': {'module': 'sveri', 'class': 'SVERIClient'}, 'ERCOT': {'module': 'ercot', 'class': 'ERCOTClient'}, 'HGMA': {'module': 'sveri', 'class': 'SVERIClient'}, 'IID': {'module': 'sveri', 'class': 'SVERIClient'}, 'ISONE': {'module': 'isone', 'class': 'ISONEClient'}, 'GRIF': {'module': 'sveri', 'class': 'SVERIClient'}, 'MISO': {'module': 'miso', 'class': 'MISOClient'}, 'NEVP': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'NYISO': {'module': 'nyiso', 'class': 'NYISOClient'}, 'PJM': {'module': 'pjm', 'class': 'PJMClient'}, 'PNM': {'module': 'sveri', 'class': 'SVERIClient'}, 'SPPC': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'SPP': {'module': 'spp', 'class': 'SPPClient'}, 'SRP': {'module': 'sveri', 'class': 'SVERIClient'}, 'TEPC': {'module': 'sveri', 'class': 'SVERIClient'}, 'WALC': {'module': 'sveri', 'class': 'SVERIClient'}, 'EU': {'module': 'eu', 'class': 'EUClient'}, } def client_factory(client_name, **kwargs): """Return a client for an external data set""" # set up dir_name = os.path.dirname(os.path.abspath(__file__)) error_msg = 'No client found for name %s' % client_name client_key = client_name.upper() # find client try: client_vals = BALANCING_AUTHORITIES[client_key] module_name = client_vals['module'] class_name = client_vals['class'] except KeyError: raise ValueError(error_msg) # find module try: fp, pathname, description = imp.find_module(module_name, [dir_name]) except ImportError: raise ValueError(error_msg) # load try: mod = imp.load_module(module_name, fp, pathname, description) finally: # Since we may exit via an exception, close fp explicitly. if fp: fp.close() # instantiate class try: client_inst = getattr(mod, class_name)(**kwargs) except AttributeError: raise ValueError(error_msg) # set name client_inst.NAME = client_name return client_inst
import imp import os.path __version__ = '0.2.11' BALANCING_AUTHORITIES = { 'AZPS': {'module': 'sveri', 'class': 'SVERIClient'}, 'BPA': {'module': 'bpa', 'class': 'BPAClient'}, 'CAISO': {'module': 'caiso', 'class': 'CAISOClient'}, 'DEAA': {'module': 'sveri', 'class': 'SVERIClient'}, 'ELE': {'module': 'sveri', 'class': 'SVERIClient'}, 'ERCOT': {'module': 'ercot', 'class': 'ERCOTClient'}, 'HGMA': {'module': 'sveri', 'class': 'SVERIClient'}, 'IID': {'module': 'sveri', 'class': 'SVERIClient'}, 'ISONE': {'module': 'isone', 'class': 'ISONEClient'}, 'GRIF': {'module': 'sveri', 'class': 'SVERIClient'}, 'MISO': {'module': 'miso', 'class': 'MISOClient'}, 'NEVP': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'NYISO': {'module': 'nyiso', 'class': 'NYISOClient'}, 'PJM': {'module': 'pjm', 'class': 'PJMClient'}, 'PNM': {'module': 'sveri', 'class': 'SVERIClient'}, 'SPPC': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'SPP': {'module': 'spp', 'class': 'SPPClient'}, 'SRP': {'module': 'sveri', 'class': 'SVERIClient'}, 'TEPC': {'module': 'sveri', 'class': 'SVERIClient'}, 'WALC': {'module': 'sveri', 'class': 'SVERIClient'}, 'EU': {'module': 'eu', 'class': 'EUClient'}, } def client_factory(client_name, **kwargs): """Return a client for an external data set""" # set up dir_name = os.path.dirname(os.path.abspath(__file__)) error_msg = 'No client found for name %s' % client_name client_key = client_name.upper() # find client try: client_vals = BALANCING_AUTHORITIES[client_key] module_name = client_vals['module'] class_name = client_vals['class'] except KeyError: raise ValueError(error_msg) # find module try: fp, pathname, description = imp.find_module(module_name, [dir_name]) except ImportError: raise ValueError(error_msg) # load try: mod = imp.load_module(module_name, fp, pathname, description) finally: # Since we may exit via an exception, close fp explicitly. if fp: fp.close() # instantiate class try: client_inst = getattr(mod, class_name)(**kwargs) except AttributeError: raise ValueError(error_msg) # set name client_inst.NAME = client_name return client_inst
Python
0.000064
42ea9fef4203d5acd73e732dbe0e4d8672e81d17
bump version for pypi
jax/version.py
jax/version.py
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "0.1.34"
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "0.1.33"
Python
0
bcc8164f2e6ed4401dc5ecb74a28ebe8554f7b82
Add Windows support.
binding.gyp
binding.gyp
{ 'targets': [{ 'target_name': 'robotjs', 'include_dirs': [ 'node_modules/nan/' ], 'cflags': [ '-Wall', '-Wparentheses', '-Winline', '-Wbad-function-cast', '-Wdisabled-optimization' ], 'conditions': [ ['OS == "mac"', { 'include_dirs': [ 'System/Library/Frameworks/CoreFoundation.Framework/Headers', 'System/Library/Frameworks/Carbon.Framework/Headers', 'System/Library/Frameworks/ApplicationServices.framework/Headers', 'System/Library/Frameworks/OpenGL.framework/Headers', ], 'link_settings': { 'libraries': [ '-framework Carbon', '-framework CoreFoundation', '-framework ApplicationServices', '-framework OpenGL' ] } }], ['OS == "linux"', { 'link_settings': { 'libraries': [ '-lpng', '-lz', '-lX11', '-lXtst' ] }, 'sources': [ 'src/xdisplay.c' ] }], ["OS=='win'", { 'defines': [ 'IS_WINDOWS'] }] ], 'sources': [ 'src/robotjs.cc', 'src/deadbeef_rand.c', 'src/mouse.c', 'src/keypress.c', 'src/keycode.c', 'src/screen.c', 'src/screengrab.c', 'src/snprintf.c', 'src/MMBitmap.c' ] }] }
{ 'targets': [{ 'target_name': 'robotjs', 'include_dirs': [ '<!(node -e \'require("nan")\')' ], 'cflags': [ '-Wall', '-Wparentheses', '-Winline', '-Wbad-function-cast', '-Wdisabled-optimization' ], 'conditions': [ ['OS == "mac"', { 'include_dirs': [ 'System/Library/Frameworks/CoreFoundation.Framework/Headers', 'System/Library/Frameworks/Carbon.Framework/Headers', 'System/Library/Frameworks/ApplicationServices.framework/Headers', 'System/Library/Frameworks/OpenGL.framework/Headers', ], 'link_settings': { 'libraries': [ '-framework Carbon', '-framework CoreFoundation', '-framework ApplicationServices', '-framework OpenGL' ] } }], ['OS == "linux"', { 'link_settings': { 'libraries': [ '-lpng', '-lz', '-lX11', '-lXtst' ] }, 'sources': [ 'src/xdisplay.c' ] }] ], 'sources': [ 'src/robotjs.cc', 'src/deadbeef_rand.c', 'src/mouse.c', 'src/keypress.c', 'src/keycode.c', 'src/screen.c', 'src/screengrab.c', 'src/MMBitmap.c' ] }] }
Python
0
c5da75e3acb4ba4c69204ff1ad3e7e89d6710001
Add whitespace in tests
client/tests/framework_test.py
client/tests/framework_test.py
#!/usr/bin/python3 import unittest import ok class TestProtocol(ok.Protocol): name = "test" def __init__(self, args, src_files): ok.Protocol.__init__(args, src_files) self.called_start = 0 self.called_interact = 0 def on_start(self, buf): self.called_start += 1 def on_interact(self, buf): self.called_interact += 1 class OkTest(unittest.TestCase): def setUp(self): self.hw1 = './demo_assignments/hw1.py' self.hw1_tests = './demo_assignments/hw1_tests.py' def test_parse_input(self): _ = ok.parse_input() # Does not crash and returns a value. def test_is_src_file(self): self.assertTrue(ok.is_src_file('hw1.py')) self.assertFalse(ok.is_src_file('hw1_tests.py')) self.assertFalse(ok.is_src_file('hw1_tests')) self.assertFalse(ok.is_src_file('hw1.html')) self.assertFalse(ok.is_src_file('ok.py')) def test_get_assignment(self): self.assertTrue(ok.get_assignment(self.hw1) == 'hw1') self.assertFalse(ok.get_assignment(self.hw1_tests)) def test_group_by_assignment(self): paths = [self.hw1, self.hw1_tests] groups = ok.group_by_assignment(paths) self.assertIn('hw1', groups) self.assertEqual(groups['hw1'], paths[0:1]) def test_find_assignment(self): assignment, src_files = ok.find_assignment(None, '.') self.assertEqual(assignment, 'hw1') self.assertEqual(src_files, [self.hw1]) self.assertRaises(Exception, ok.find_assignment, [None, 'tests']) self.assertRaises(Exception, ok.find_assignment, ['hw2', '.'])
#!/usr/bin/python3 import unittest import ok class TestProtocol(ok.Protocol): name = "test" def __init__(self, args, src_files): ok.Protocol.__init__(args, src_files) self.called_start = 0 self.called_interact = 0 def on_start(self, buf): self.called_start += 1 def on_interact(self, buf): self.called_interact += 1 class OkTest(unittest.TestCase): def setUp(self): self.hw1 = './demo_assignments/hw1.py' self.hw1_tests = './demo_assignments/hw1_tests.py' def test_parse_input(self): _ = ok.parse_input() # Does not crash and returns a value. def test_is_src_file(self): self.assertTrue(ok.is_src_file('hw1.py')) self.assertFalse(ok.is_src_file('hw1_tests.py')) self.assertFalse(ok.is_src_file('hw1_tests')) self.assertFalse(ok.is_src_file('hw1.html')) self.assertFalse(ok.is_src_file('ok.py')) def test_get_assignment(self): self.assertTrue(ok.get_assignment(self.hw1) == 'hw1') self.assertFalse(ok.get_assignment(self.hw1_tests)) def test_group_by_assignment(self): paths = [self.hw1, self.hw1_tests] groups = ok.group_by_assignment(paths) self.assertIn('hw1', groups) self.assertEqual(groups['hw1'], paths[0:1]) def test_find_assignment(self): assignment, src_files = ok.find_assignment(None, '.') self.assertEqual(assignment, 'hw1') self.assertEqual(src_files, [self.hw1]) self.assertRaises(Exception, ok.find_assignment, [None, 'tests']) self.assertRaises(Exception, ok.find_assignment, ['hw2', '.'])
Python
0.999029
c552dc428b78fae168d59d3ff5af1818cf56f0e2
use DNSServiceGetAddrInfo(…) on Mac OS
binding.gyp
binding.gyp
{ 'targets': [ { 'target_name': 'dns_sd_bindings' , 'sources': [ 'src/dns_sd.cpp' , 'src/dns_service_browse.cpp' , 'src/dns_service_enumerate_domains.cpp' , 'src/dns_service_get_addr_info.cpp' , 'src/dns_service_process_result.cpp' , 'src/dns_service_ref.cpp' , 'src/dns_service_ref_deallocate.cpp' , 'src/dns_service_ref_sock_fd.cpp' , 'src/dns_service_register.cpp' , 'src/dns_service_resolve.cpp' , 'src/mdns_utils.cpp' , 'src/txt_record_ref.cpp' , 'src/txt_record_create.cpp' , 'src/txt_record_deallocate.cpp' , 'src/txt_record_set_value.cpp' , 'src/txt_record_get_length.cpp' , 'src/txt_record_buffer_to_object.cpp' , 'src/socket_watcher.cpp' ] , 'conditions': [ [ 'OS!="mac" and OS!="win"', { 'libraries': [ '-ldns_sd' ] }] , [ 'OS=="mac"', { 'defines': [ 'HAVE_DNSSERVICEGETADDRINFO' ] }] , ['OS=="win"', { 'include_dirs': [ '$(BONJOUR_SDK_HOME)Include' ] , 'defines': [ 'HAVE_DNSSERVICEGETADDRINFO' ] , 'libraries': [ '-l$(BONJOUR_SDK_HOME)Lib/$(Platform)/dnssd.lib' , '-lws2_32.lib' ] }] ] # The following breaks the debug build, so just ignore the warning for now. #, 'msbuild_settings': { # 'ClCompile': { 'ExceptionHandling': 'Sync' } # , 'Link' : { 'IgnoreSpecificDefaultLibraries': [ 'LIBCMT' ] } # } , 'configurations': { 'Release': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 3 } , 'cflags': [ '-O3' ] , 'ldflags': [ '-O3' ] } , 'Debug': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 0 } , 'cflags': [ '-g', '-O0', ] , 'ldflags': [ '-g', '-O0' ] } , 'Coverage': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 0 , 'OTHER_LDFLAGS': ['--coverage'] , 'OTHER_CFLAGS': ['--coverage'] } , 'cflags': [ '-O0', '--coverage' ] , 'ldflags': [ '--coverage' ] } } } ] } # vim: filetype=python shiftwidth=2 softtabstop=2 :
{ 'targets': [ { 'target_name': 'dns_sd_bindings' , 'sources': [ 'src/dns_sd.cpp' , 'src/dns_service_browse.cpp' , 'src/dns_service_enumerate_domains.cpp' , 'src/dns_service_get_addr_info.cpp' , 'src/dns_service_process_result.cpp' , 'src/dns_service_ref.cpp' , 'src/dns_service_ref_deallocate.cpp' , 'src/dns_service_ref_sock_fd.cpp' , 'src/dns_service_register.cpp' , 'src/dns_service_resolve.cpp' , 'src/mdns_utils.cpp' , 'src/txt_record_ref.cpp' , 'src/txt_record_create.cpp' , 'src/txt_record_deallocate.cpp' , 'src/txt_record_set_value.cpp' , 'src/txt_record_get_length.cpp' , 'src/txt_record_buffer_to_object.cpp' , 'src/socket_watcher.cpp' ] , 'conditions': [ [ 'OS!="mac" and OS!="win"', { 'libraries': [ '-ldns_sd' ] }] , ['OS=="win"', { 'include_dirs': [ '$(BONJOUR_SDK_HOME)Include' ] , 'defines': [ 'HAVE_DNSSERVICEGETADDRINFO' ] , 'libraries' : [ '-l$(BONJOUR_SDK_HOME)Lib/$(Platform)/dnssd.lib' , '-lws2_32.lib' ] }] ] # The following breaks the debug build, so just ignore the warning for now. #, 'msbuild_settings': { # 'ClCompile': { 'ExceptionHandling': 'Sync' } # , 'Link' : { 'IgnoreSpecificDefaultLibraries': [ 'LIBCMT' ] } # } , 'configurations': { 'Release': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 3 } , 'cflags': [ '-O3' ] , 'ldflags': [ '-O3' ] } , 'Debug': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 0 } , 'cflags': [ '-g', '-O0', ] , 'ldflags': [ '-g', '-O0' ] } , 'Coverage': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 0 , 'OTHER_LDFLAGS': ['--coverage'] , 'OTHER_CFLAGS': ['--coverage'] } , 'cflags': [ '-O0', '--coverage' ] , 'ldflags': [ '--coverage' ] } } } ] } # vim: filetype=python shiftwidth=2 softtabstop=2 :
Python
0
c7764ac8c1363701b4e7fab1d8ae0e3197853b48
Update __init__.py
pylsy/__init__.py
pylsy/__init__.py
#__init__.py from .pylsy import PylsyTable __version__="1.003"
#__init__.py from .pylsy import PylsyTable __version__="1.001"
Python
0.000072
8eae324c0030221a93b202a419db3f7301ad486c
read config only if file exists
pymzn/__init__.py
pymzn/__init__.py
""" PyMzn is a Python library that wraps and enhances the MiniZinc tools for CSP modelling and solving. It is built on top of the libminizinc library (version 2.0) and provides a number of off-the-shelf functions to readily solve problems encoded in MiniZinc and parse the solutions into Python objects. """ import ast import yaml import appdirs import logging from . import _utils from . import bin from . import _dzn from ._dzn import * from . import _mzn from ._mzn import * __version__ = '0.10.8' __all__ = ['debug', 'config', 'bin', 'gecode'] __all__.extend(_dzn.__all__) __all__.extend(_mzn.__all__) # TODO: update python2 branch # TODO: config solver function and default arguments to solver # TODO: mzn2doc # TODO: check the import of other files in minizinc # TODO: make it work on windows # TODO: check the ctrl+C thing which seems to not work anymore _debug_handler = None _pymzn_logger = logging.getLogger(__name__) _pymzn_logger.addHandler(logging.NullHandler()) def debug(dbg=True): global _debug_handler if dbg and _debug_handler is None: _debug_handler = logging.StreamHandler() _pymzn_logger.addHandler(_debug_handler) _pymzn_logger.setLevel(logging.DEBUG) elif not dbg and _debug_handler is not None: _pymzn_logger.removeHandler(_debug_handler) _debug_handler = None _pymzn_logger.setLevel(logging.WARNING) config = {} cfg_file = os.path.join(appdirs.user_config_dir(__name__), 'config.yml') if os.path.isfile(cfg_file): with open(cfg_file) as f: config = yaml.load(f) # Solvers gecode = Gecode(path=config.get('gecode')) def main(): import argparse desc = 'PyMzn is a wrapper for the MiniZinc tool pipeline.' p = argparse.ArgumentParser(description=desc) p.add_argument('--debug', action='store_true', help='display debug messages on standard output') p.add_argument('mzn', help='the mzn file to solve') p.add_argument('dzn_files', nargs='*', help='additional dzn files') p.add_argument('--data', type=ast.literal_eval, help='additional inline data') p.add_argument('-k', '--keep', action='store_true', help='whether to keep generated files') p.add_argument('-o', '--output-base', help='base name for generated files') p.add_argument('-G', '--mzn-globals-dir', help='directory of global files in the standard library') p.add_argument('-f', '--fzn-fn', help='name of proxy function for the solver') p.add_argument('--fzn-args', type=ast.literal_eval, default={}, help='arguments to pass to the solver') args = p.parse_args() if args.debug: debug() other_args = {**{'data': args.data, 'keep': args.keep, 'output_base': args.output_base, 'mzn_globals_dir': args.mzn_globals_dir, 'fzn_fn': args.fzn_fn}, **args.fzn_args} print(minizinc(args.mzn, *args.dzn_files, raw_output=True, **other_args))
""" PyMzn is a Python library that wraps and enhances the MiniZinc tools for CSP modelling and solving. It is built on top of the libminizinc library (version 2.0) and provides a number of off-the-shelf functions to readily solve problems encoded in MiniZinc and parse the solutions into Python objects. """ import ast import yaml import appdirs import logging from . import _utils from . import bin from . import _dzn from ._dzn import * from . import _mzn from ._mzn import * __version__ = '0.10.8' __all__ = ['debug', 'config', 'bin', 'gecode'] __all__.extend(_dzn.__all__) __all__.extend(_mzn.__all__) # TODO: update python2 branch # TODO: config solver function and default arguments to solver # TODO: mzn2doc # TODO: check the import of other files in minizinc # TODO: make it work on windows # TODO: check the ctrl+C thing which seems to not work anymore _debug_handler = None _pymzn_logger = logging.getLogger(__name__) _pymzn_logger.addHandler(logging.NullHandler()) def debug(dbg=True): global _debug_handler if dbg and _debug_handler is None: _debug_handler = logging.StreamHandler() _pymzn_logger.addHandler(_debug_handler) _pymzn_logger.setLevel(logging.DEBUG) elif not dbg and _debug_handler is not None: _pymzn_logger.removeHandler(_debug_handler) _debug_handler = None _pymzn_logger.setLevel(logging.WARNING) with open(os.path.join(appdirs.user_config_dir(__name__), 'config.yml')) as f: config = yaml.load(f) # Solvers gecode = Gecode(path=config.get('gecode')) def main(): import argparse desc = 'PyMzn is a wrapper for the MiniZinc tool pipeline.' p = argparse.ArgumentParser(description=desc) p.add_argument('--debug', action='store_true', help='display debug messages on standard output') p.add_argument('mzn', help='the mzn file to solve') p.add_argument('dzn_files', nargs='*', help='additional dzn files') p.add_argument('--data', type=ast.literal_eval, help='additional inline data') p.add_argument('-k', '--keep', action='store_true', help='whether to keep generated files') p.add_argument('-o', '--output-base', help='base name for generated files') p.add_argument('-G', '--mzn-globals-dir', help='directory of global files in the standard library') p.add_argument('-f', '--fzn-fn', help='name of proxy function for the solver') p.add_argument('--fzn-args', type=ast.literal_eval, default={}, help='arguments to pass to the solver') args = p.parse_args() if args.debug: debug() other_args = {**{'data': args.data, 'keep': args.keep, 'output_base': args.output_base, 'mzn_globals_dir': args.mzn_globals_dir, 'fzn_fn': args.fzn_fn}, **args.fzn_args} print(minizinc(args.mzn, *args.dzn_files, raw_output=True, **other_args))
Python
0.000001
45141fe7f34e0522b2270047af796644406213dc
Add user help text to error output of do_fish_indent
do_fish_indent.py
do_fish_indent.py
import sublime, sublime_plugin import os.path import subprocess # Only a TextCommand can use replace() class DoFishIndentCommand(sublime_plugin.TextCommand): def is_enabled(self): # We are very incompatible with ST1 and probably ST4 one day return 2 <= int(sublime.version()[0]) <= 3 def is_visible(self): return 'source.shell.fish' in self.view.scope_name(self.view.sel()[0].begin()) def description(self): return 'Indent and Prettify' def run(self, edit): versionAPI = int(sublime.version()[0]) # Check for executable exe = 'fish_indent' pathToDir = self.view.settings().get('fish_indent_directory') if pathToDir: exe = os.path.join(pathToDir, exe) # Select the entire contents of the file fileRegion = sublime.Region(0, self.view.size()) fileContent = self.view.substr(fileRegion) # Note the file encoding, converting to lowercase as expected by Python # However, fish_indent assumes UTF-8 encoding so the user may get unexpected results if this file's encoding is different enc = self.view.encoding().lower() if enc == 'undefined': # ie, temp file enc = 'utf-8' print('Running {0} on file with encoding {1}'.format(exe, enc)) # Run the program, which is searched for on PATH if necessary try: # Pipe the file content into fish_indent and catch the outputs p = subprocess.Popen(exe, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate(input = fileContent.encode(enc)) except OSError: # Soft compatibility with Python 2 msg = "Couldn't find {0}".format(exe) if not pathToDir: msg += ". Specify a nonstandard install location in Preferences > Package Settings > friendly interactive shell > Settings" sublime.error_message(msg) return if err: sublime.message_dialog(err.decode('utf-8')) # Create a copy of all current cursor positions pos = list( self.view.sel() ); # Replace the entire contents of the file with the output of fish_indent self.view.replace(edit, fileRegion, out.decode(enc)) # Note the user's current settings for this buffer indentUsingSpaces = self.view.settings().get('translate_tabs_to_spaces') tabSize = self.view.settings().get('tab_size') # Convert the format to the user's preferred format if indentUsingSpaces and tabSize == 4: # Do nothing as this is the format produced by fish_indent pass else: # Convert sets of 4 spaces to tabs # Note that running unexpand_tabs will set translate_tabs_to_spaces to False self.view.settings().set('tab_size', 4) self.view.run_command('unexpand_tabs') if not indentUsingSpaces: # User prefers tabs if tabSize == 4: # Conversion finished pass else: # Resize self.view.settings().set('tab_size', tabSize) else: # User prefers spaces, so reset to True self.view.settings().set('translate_tabs_to_spaces', True) # Resize tabs, then convert back into spaces self.view.settings().set('tab_size', tabSize) self.view.run_command('expand_tabs') # Revert back to the old cursor positions and centre on the first one self.view.sel().clear() if versionAPI == 3: self.view.sel().add_all(pos) elif versionAPI == 2: map(self.view.sel().add, pos) self.view.show_at_center(pos[0])
import sublime, sublime_plugin import os.path import subprocess # Only a TextCommand can use replace() class DoFishIndentCommand(sublime_plugin.TextCommand): def is_enabled(self): # We are very incompatible with ST1 and probably ST4 one day return 2 <= int(sublime.version()[0]) <= 3 def is_visible(self): return 'source.shell.fish' in self.view.scope_name(self.view.sel()[0].begin()) def description(self): return 'Indent and Prettify' def run(self, edit): versionAPI = int(sublime.version()[0]) # Check for executable exe = 'fish_indent' pathToDir = self.view.settings().get('fish_indent_directory') if pathToDir: exe = os.path.join(pathToDir, exe) # Select the entire contents of the file fileRegion = sublime.Region(0, self.view.size()) fileContent = self.view.substr(fileRegion) # Note the file encoding, converting to lowercase as expected by Python # However, fish_indent assumes UTF-8 encoding so the user may get unexpected results if this file's encoding is different enc = self.view.encoding().lower() if enc == 'undefined': # ie, temp file enc = 'utf-8' print('Running {0} on file with encoding {1}'.format(exe, enc)) # Run the program, which is searched for on PATH if necessary try: # Pipe the file content into fish_indent and catch the outputs p = subprocess.Popen(exe, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate(input = fileContent.encode(enc)) except OSError: # Soft compatibility with Python 2 msg = "Couldn't find {0}".format(exe) sublime.error_message(msg) return if err: sublime.message_dialog(err.decode('utf-8')) # Create a copy of all current cursor positions pos = list( self.view.sel() ); # Replace the entire contents of the file with the output of fish_indent self.view.replace(edit, fileRegion, out.decode(enc)) # Note the user's current settings for this buffer indentUsingSpaces = self.view.settings().get('translate_tabs_to_spaces') tabSize = self.view.settings().get('tab_size') # Convert the format to the user's preferred format if indentUsingSpaces and tabSize == 4: # Do nothing as this is the format produced by fish_indent pass else: # Convert sets of 4 spaces to tabs # Note that running unexpand_tabs will set translate_tabs_to_spaces to False self.view.settings().set('tab_size', 4) self.view.run_command('unexpand_tabs') if not indentUsingSpaces: # User prefers tabs if tabSize == 4: # Conversion finished pass else: # Resize self.view.settings().set('tab_size', tabSize) else: # User prefers spaces, so reset to True self.view.settings().set('translate_tabs_to_spaces', True) # Resize tabs, then convert back into spaces self.view.settings().set('tab_size', tabSize) self.view.run_command('expand_tabs') # Revert back to the old cursor positions and centre on the first one self.view.sel().clear() if versionAPI == 3: self.view.sel().add_all(pos) elif versionAPI == 2: map(self.view.sel().add, pos) self.view.show_at_center(pos[0])
Python
0.000009
eaa17491581cbb52242fbe543dd09929f537a8bc
Add option to ignore static.
mysettings.py
mysettings.py
from src.markdown.makrdown import jinja_aware_markdown PREFERRED_URL_SCHEME = 'http' SERVER_NAME = 'localhost:5000' FLATPAGES_EXTENSION = '.md' FLATPAGES_HTML_RENDERER = jinja_aware_markdown FREEZER_IGNORE_404_NOT_FOUND = True FLATPAGES_AUTO_RELOAD = True FREEZER_STATIC_IGNORE = ["*"] GITHUB_URL = 'https://github.com/JetBrains/kotlin' TWITTER_URL = 'https://twitter.com/kotlin' EDIT_ON_GITHUB_URL = 'https://github.com/JetBrains/kotlin-web-site/edit/master/' PDF_URL = '/docs/kotlin-docs.pdf' FORUM_URL = 'http://devnet.jetbrains.com/community/kotlin' SITE_GITHUB_URL = 'http://github.com/JetBrains/kotlin-web-site' CODE_URL = 'https://github.com/JetBrains/kotlin-examples/tree/master' TEXT_USING_GRADLE = "In this tutorial we're going to be using Gradle but the same can be accomplished using either IntelliJ IDEA project structure or Maven. For details on setting up Gradle to work with Kotlin, see [Using Gradle](/docs/reference/using-gradle.html)."
from src.markdown.makrdown import jinja_aware_markdown PREFERRED_URL_SCHEME = 'http' SERVER_NAME = 'localhost:5000' FLATPAGES_EXTENSION = '.md' FLATPAGES_HTML_RENDERER = jinja_aware_markdown FREEZER_IGNORE_404_NOT_FOUND = True FLATPAGES_AUTO_RELOAD = True GITHUB_URL = 'https://github.com/JetBrains/kotlin' TWITTER_URL = 'https://twitter.com/kotlin' EDIT_ON_GITHUB_URL = 'https://github.com/JetBrains/kotlin-web-site/edit/master/' PDF_URL = '/docs/kotlin-docs.pdf' FORUM_URL = 'http://devnet.jetbrains.com/community/kotlin' SITE_GITHUB_URL = 'http://github.com/JetBrains/kotlin-web-site' CODE_URL = 'https://github.com/JetBrains/kotlin-examples/tree/master' TEXT_USING_GRADLE = "In this tutorial we're going to be using Gradle but the same can be accomplished using either IntelliJ IDEA project structure or Maven. For details on setting up Gradle to work with Kotlin, see [Using Gradle](/docs/reference/using-gradle.html)."
Python
0
9a40bd0d82c5215a8978a7d1c95f2910ee8f7f09
add UserToken model
api/models.py
api/models.py
from django.db import models from django.db.models import Q from django.utils import timezone from django.contrib.auth.models import User class MaintenanceRecord(models.Model): start_date = models.DateTimeField() end_date = models.DateTimeField(blank=True, null=True) title = models.CharField(max_length=256) message = models.TextField() disable_login = models.BooleanField(default=True) created_date = models.DateTimeField(auto_now_add=True) modified_date = models.DateTimeField(auto_now=True) @classmethod def active(cls, provider=None): """ Return records that are active """ now = timezone.now() records = MaintenanceRecord.objects.filter( Q(start_date__lt=now), Q(end_date__gt=now) | Q(end_date__isnull=True)) return records.all() @classmethod def disable_login_access(cls, request): """ Return true if any active record wants login disabled """ disable_login = False records = MaintenanceRecord.active() for record in records: if record.disable_login: disable_login = True return disable_login class UserToken(models.Model): token = models.CharField(max_length=128) user = models.ForeignKey(User) created_date = models.DateTimeField(auto_now_add=True) modified_date = models.DateTimeField(auto_now=True)
from django.db import models from django.db.models import Q from django.utils import timezone class MaintenanceRecord(models.Model): start_date = models.DateTimeField() end_date = models.DateTimeField(blank=True, null=True) title = models.CharField(max_length=256) message = models.TextField() disable_login = models.BooleanField(default=True) created_date = models.DateTimeField(auto_now_add=True) modified_date = models.DateTimeField(auto_now=True) @classmethod def active(cls, provider=None): """ Return records that are active """ now = timezone.now() records = MaintenanceRecord.objects.filter( Q(start_date__lt=now), Q(end_date__gt=now) | Q(end_date__isnull=True)) return records.all() @classmethod def disable_login_access(cls, request): """ Return true if any active record wants login disabled """ disable_login = False records = MaintenanceRecord.active() for record in records: if record.disable_login: disable_login = True return disable_login
Python
0
379068d31623662c0b349f26d1cd610612963b82
add re module to be more reliable
joinstsfile.py
joinstsfile.py
#!/usr/bin/env python3 import os, re path=r'/home/ruan/git/stm/' namespace={} data=[] for file in os.listdir(path): if re.match('A\d{6}\.\d{6}\.L\d{4}\.VERT',file): namespace[int(file.split('.')[2][2:])]=file keys=sorted([x for x in namespace.keys()]) with open(os.path.join(path,namespace[keys[0]]),'rb') as fo: for line in fo.readlines()[526:]: data.append([line.decode('ascii').split('\t')[1],]) for i in keys: with open(os.path.join(path,namespace[i]),'rb') as fo: j=0 for line in fo.readlines()[526:]: data[j].append(line.decode('ascii').split('\t')[5]) j+=1 with open(os.path.join(path,'final.txt'),'w') as fout: for line in data: for num in line: fout.write(num+'\t') fout.write('\n')
#!/usr/bin/env python3 import os path='/home/ruan/git/stm/' #path为文件所在目录,windows下如‘D:\\data\\’,直接覆盖源文件,请注意保存原始数据 for file in os.listdir(path): os.rename(os.path.join(path,file),os.path.join(path,file.split('.')[2][2:])) filenu = len(os.listdir(path)) + 1 data=[] with open(os.path.join(path,'001'),'rb') as fo: for line in fo.readlines()[526:]: data.append([line.decode('ascii').split('\t')[1],line.decode('ascii').split('\t')[5]]) j=2 while j<filenu : with open(os.path.join(path,str(j).zfill(3)),'rb') as fo: i=0 for line in fo.readlines()[526:]: data[i].append(line.decode('ascii').split('\t')[5]) i+=1 j+=1 with open(os.path.join(path,'final.txt'),'w') as fout: i=len(data) j=len(data[0]) k=0 while k<i: l=0 while l<j: fout.write(data[k][l]) fout.write('\t') l+=1 fout.write('\n') k=k+1
Python
0
85d5712fa1dde952783cbc8d78f904e08cfc9b50
Remove duplicated dependency
server/setup.py
server/setup.py
from pathlib import Path from setuptools import Command, find_packages, setup class GenerateCommand(Command): description = "generates manticore_server server protobuf + grpc code from protobuf specification file" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): from grpc.tools import protoc protoc.main( [ "grpc_tools.protoc", "-I.", "--python_out=.", "--grpc_python_out=.", "--mypy_out=.", "./manticore_server/ManticoreServer.proto", ] ) setup( name="manticore_server", version="0.0.1", packages=find_packages(exclude=["tests", "tests.*"]), python_requires=">=3.7", install_requires=[ f"manticore[native] @ file://{Path(__file__).parent.resolve()}/..", "protobuf~=3.20", "grpcio~=1.46", "crytic-compile>=0.2.2", ], extras_require={ "dev": [ "grpcio-tools", "mypy-protobuf", "shiv~=1.0.1", "types-setuptools", "black~=22.0", "isort==5.10.1", "mypy==0.942", ] }, entry_points={ "console_scripts": [ "manticore_server=manticore_server.manticore_server:main", ], "distutils.commands": ["generate = GenerateCommand"], }, cmdclass={ "generate": GenerateCommand, }, )
from pathlib import Path from setuptools import Command, find_packages, setup class GenerateCommand(Command): description = "generates manticore_server server protobuf + grpc code from protobuf specification file" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): from grpc.tools import protoc protoc.main( [ "grpc_tools.protoc", "-I.", "--python_out=.", "--grpc_python_out=.", "--mypy_out=.", "./manticore_server/ManticoreServer.proto", ] ) setup( name="manticore_server", version="0.0.1", packages=find_packages(exclude=["tests", "tests.*"]), python_requires=">=3.7", install_requires=[ f"manticore[native] @ file://{Path(__file__).parent.resolve()}/..", "protobuf~=3.20", "grpcio~=1.46", "crytic-compile>=0.2.2", ], extras_require={ "dev": [ "grpcio-tools", "mypy-protobuf", "shiv~=1.0.1", "types-setuptools", "mypy-protobuf", "black~=22.0", "isort==5.10.1", "mypy==0.942", ] }, entry_points={ "console_scripts": [ "manticore_server=manticore_server.manticore_server:main", ], "distutils.commands": ["generate = GenerateCommand"], }, cmdclass={ "generate": GenerateCommand, }, )
Python
0
ba43de958266a2906f3ee4cad23b20361db2637a
Add arguments to job
scripts/submitJob.py
scripts/submitJob.py
#!/usr/bin/env python # SIM-CITY client # # Copyright 2015 Netherlands eScience Center # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ''' Combines createTask and startJob, to create a task from a command and then start a job. ''' from __future__ import print_function import simcity import argparse import sys import json if __name__ == '__main__': parser = argparse.ArgumentParser(description="start a job") parser.add_argument('host', help="host to run pilot job on") parser.add_argument('command', help="command to run") parser.add_argument('args', nargs='*', help="command arguments") parser.add_argument( '-m', '--max', help="only run if there are less than MAX jobs running", default=2) parser.add_argument( '-c', '--config', help="configuration file", default=None) parser.add_argument( '-i', '--input', help="JSON parameter file", default=None) args = parser.parse_args() simcity.init(config=args.config) try: properties = { 'command': args.command, 'arguments': args.args, } try: with open(args.input) as f: properties['input'] = json.load(f) except TypeError: pass task, job = simcity.run_task(properties, args.host, int(args.max)) except Exception as ex: print("Task could not be added to the database: %s" % str(ex), file=sys.stderr) sys.exit(1) print("Task %s added to the database" % task.id) if job is None: print("Let task be processed by existing pilot-job scripts") else: print("Job %s (ID: %s) will process task" % (job['batch_id'], job.id))
#!/usr/bin/env python # SIM-CITY client # # Copyright 2015 Netherlands eScience Center # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ''' Combines createTask and startJob, to create a task from a command and then start a job. ''' from __future__ import print_function import simcity import argparse import sys import json if __name__ == '__main__': parser = argparse.ArgumentParser(description="start a job") parser.add_argument('command', help="command to run") parser.add_argument('host', help="host to run pilot job on") parser.add_argument( '-m', '--max', help="only run if there are less than MAX jobs running", default=2) parser.add_argument( '-c', '--config', help="configuration file", default=None) parser.add_argument( '-i', '--input', help="JSON parameter file", default=None) args = parser.parse_args() simcity.init(config=args.config) try: properties = {'command': args.command} try: with open(args.input) as f: properties['input'] = json.load(f) except TypeError: pass task, job = simcity.run_task(properties, args.host, int(args.max)) except Exception as ex: print("Task could not be added to the database: %s" % str(ex), file=sys.stderr) sys.exit(1) print("Task %s added to the database" % task.id) if job is None: print("Let task be processed by existing pilot-job scripts") else: print("Job %s (ID: %s) will process task" % (job['batch_id'], job.id))
Python
0.00008
8d56a45d0b01dff3e8cd041e7ba09c882d7cbb30
add logging to file and stdout
phabricator-proxy/main.py
phabricator-proxy/main.py
from cmath import log from flask.logging import default_handler from urllib.parse import urlparse, parse_qs import flask import json import logging import logging.handlers import os import requests buildkite_api_token = os.getenv("BUILDKITE_API_TOKEN", "") app = flask.Flask(__name__) app.config["DEBUG"] = False formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') errHandler = logging.FileHandler('error.log', encoding='utf-8',) errHandler.setLevel(logging.ERROR) errHandler.setFormatter(formatter) app.logger.addHandler(errHandler) rotatingHandler = logging.handlers.TimedRotatingFileHandler('info.log', when='D', encoding='utf-8', backupCount=8) rotatingHandler.setFormatter(formatter) app.logger.addHandler(rotatingHandler) app.logger.setLevel(logging.INFO) stdoutLog = logging.StreamHandler() stdoutLog.setFormatter(formatter) app.logger.addHandler(stdoutLog) app.logger.removeHandler(default_handler) @app.route('/', methods=['GET']) def home(): return "Hi LLVM!" @app.route('/build', methods=['POST', 'GET']) def build(): app.logger.info('request: %s %s', flask.request, flask.request.url) app.logger.info('headers: %s', flask.request.headers) if flask.request.method == 'POST': app.logger.info('data: %s', flask.request.data) app.logger.info('form: %s', flask.request.form) url = urlparse(flask.request.url) params = parse_qs(url.query) build_env = {} for k, v in params.items(): if len(v) == 1: build_env['ph_' + k] = v[0] refspec = 'main' if 'ph_scripts_refspec' in build_env: refspec = build_env['ph_scripts_refspec'] build_request = { 'commit': 'HEAD', 'branch': refspec, 'env': build_env, 'message': f'D{build_env["ph_buildable_revision"]}', } app.logger.info('buildkite request: %s', build_request) headers = {'Authorization': f'Bearer {buildkite_api_token}'} response = requests.post( 'https://api.buildkite.com/v2/organizations/llvm-project' '/pipelines/diff-checks/builds', json=build_request, headers=headers) app.logger.info('buildkite response: %s %s', response.status_code, response.text) rjs = json.loads(response.text) return rjs['web_url'] else: return "expected POST request" if __name__ == '__main__': app.run(host='0.0.0.0:8080')
import flask import requests import os from urllib.parse import urlparse, parse_qs import json app = flask.Flask(__name__) app.config["DEBUG"] = False buildkite_api_token = os.getenv("BUILDKITE_API_TOKEN", "") @app.route('/', methods=['GET']) def home(): return "Hi LLVM!" @app.route('/build', methods=['POST', 'GET']) def build(): app.logger.info('request: %s %s', flask.request, flask.request.url) app.logger.info('headers: %s', flask.request.headers) if flask.request.method == 'POST': app.logger.info('data: %s', flask.request.data) app.logger.info('form: %s', flask.request.form) url = urlparse(flask.request.url) params = parse_qs(url.query) build_env = {} for k, v in params.items(): if len(v) == 1: build_env['ph_' + k] = v[0] refspec = 'main' if 'ph_scripts_refspec' in build_env: refspec = build_env['ph_scripts_refspec'] build_request = { 'commit': 'HEAD', 'branch': refspec, 'env': build_env, 'message': f'D{build_env["ph_buildable_revision"]}', } app.logger.info('buildkite request: %s', build_request) headers = {'Authorization': f'Bearer {buildkite_api_token}'} response = requests.post( 'https://api.buildkite.com/v2/organizations/llvm-project' '/pipelines/diff-checks/builds', json=build_request, headers=headers) app.logger.info('buildkite response: %s %s', response.status_code, response.text) rjs = json.loads(response.text) return rjs['web_url'] else: return "expected POST request" if __name__ == '__main__': app.run(host='0.0.0.0:8080')
Python
0
c90dbc5007b5627b264493c2d16af79cff9c2af0
Add better custom has_permission check.
joku/checks.py
joku/checks.py
""" Specific checks. """ from discord.ext.commands import CheckFailure, check def is_owner(ctx): if not ctx.bot.owner_id == ctx.message.author.id: raise CheckFailure(message="You are not the owner.") return True def has_permissions(**perms): def predicate(ctx): if ctx.bot.owner_id == ctx.message.author.id: return True msg = ctx.message ch = msg.channel permissions = ch.permissions_for(msg.author) if all(getattr(permissions, perm, None) == value for perm, value in perms.items()): return True # Raise a custom error message raise CheckFailure(message="You do not have any of the required permissions: {}".format( ', '.join([perm.upper() for perm in perms]) )) return check(predicate)
""" Specific checks. """ from discord.ext.commands import CheckFailure def is_owner(ctx): if not ctx.bot.owner_id == ctx.message.author.id: raise CheckFailure(message="You are not the owner.") return True
Python
0
f9a827b41ed925e22bf1e873e5989bdd327fabbf
Add RefugeeCamp name formatting
api/models.py
api/models.py
from django.db import models class RefugeeCamp(models.Model): # Location city = models.CharField(max_length=64) postcode = models.CharField(max_length=16) street = models.CharField(max_length=128) streetnumber = models.CharField(max_length=32) def __str__(self): return "{0} {1}: {2} {3}".format(self.postcode, self.city, self.street, self.streetnumber) class ObjectCategory(models.Model): title = models.CharField(max_length=64) def __str__(self): return self.title class ObjectSubCategory(models.Model): title = models.CharField(max_length=64) parent = models.ForeignKey(ObjectCategory) def __str__(self): return "{0}/{1}".format(self.parent, self.title) class SimpleOffer(models.Model): category = models.ForeignKey(ObjectCategory, null=True) title = models.CharField(max_length=64) description = models.CharField(max_length=4096) create_time = models.DateTimeField(auto_now_add=True) image = models.ImageField(upload_to='api.UploadedFile/bytes/filename/mimetype', blank=True, null=True) # Owner's info city = models.CharField(max_length=64) telephone = models.CharField(max_length=64) email = models.CharField(max_length=128) class HelpTimeSearch(models.Model): start_time = models.DateTimeField() end_time = models.DateTimeField() camp = models.ForeignKey(RefugeeCamp) class UploadedFile(models.Model): bytes = models.TextField() filename = models.CharField(max_length=255) mimetype = models.CharField(max_length=50)
from django.db import models class RefugeeCamp(models.Model): # Location city = models.CharField(max_length=64) postcode = models.CharField(max_length=16) street = models.CharField(max_length=128) streetnumber = models.CharField(max_length=32) class ObjectCategory(models.Model): title = models.CharField(max_length=64) def __str__(self): return self.title class ObjectSubCategory(models.Model): title = models.CharField(max_length=64) parent = models.ForeignKey(ObjectCategory) def __str__(self): return "{0}/{1}".format(self.parent, self.title) class SimpleOffer(models.Model): category = models.ForeignKey(ObjectCategory, null=True) title = models.CharField(max_length=64) description = models.CharField(max_length=4096) create_time = models.DateTimeField(auto_now_add=True) image = models.ImageField(upload_to='api.UploadedFile/bytes/filename/mimetype', blank=True, null=True) # Owner's info city = models.CharField(max_length=64) telephone = models.CharField(max_length=64) email = models.CharField(max_length=128) class HelpTimeSearch(models.Model): start_time = models.DateTimeField() end_time = models.DateTimeField() camp = models.ForeignKey(RefugeeCamp) class UploadedFile(models.Model): bytes = models.TextField() filename = models.CharField(max_length=255) mimetype = models.CharField(max_length=50)
Python
0.000001
c720f9c385a785b8905991465fb74c75fca42220
fix bug
cloudify_cloudinit/__init__.py
cloudify_cloudinit/__init__.py
# Copyright (c) 2017-2018 Cloudify Platform Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import yaml import base64 from cloudify import ctx class CloudInit(object): def __init__(self, operation_inputs): """ Sets the properties that all operations need. :param operation_inputs: The inputs from the operation. """ self.config = self.get_config(operation_inputs) @staticmethod def get_external_resource(config): for f in config.get('write_files', []): if not isinstance(f, dict): break try: content = f.get('content') if isinstance(content, dict): resource_type = content.get('resource_type', '') resource_name = content.get('resource_name', '') template_variables = content.get('template_variables', {}) if 'file_resource' == resource_type: f['content'] = ctx.get_resource_and_render( resource_name, template_variables) except ValueError: ctx.logger.debug('No external resource recognized.') pass return config def get_config(self, inputs): config = ctx.node.properties.get('resource_config', {}) config.update( ctx.instance.runtime_properties.get('resource_config', {})) config.update(inputs.get('resource_config', {})) config.update(self.get_external_resource(config.copy())) return config @property def __str__(self): """Override the string implementation of object.""" cloud_init = yaml.dump(self.config) cloud_init_string = str(cloud_init).replace('!!python/unicode ', '') header = ctx.node.properties.get('header') if header: cloud_init_string = \ header + '\n' + cloud_init_string if ctx.node.properties.get('encode_base64'): cloud_init_string = \ base64.encodestring(cloud_init_string) return cloud_init_string def update(self, **_): ctx.instance.runtime_properties['resource_config'] = self.config ctx.instance.runtime_properties['cloud_config'] = self.__str__ def delete(self, **_): # cleanup runtime properties keys = ctx.instance.runtime_properties.keys() for key in keys: del ctx.instance.runtime_properties[key]
# Copyright (c) 2017-2018 Cloudify Platform Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import yaml import base64 from cloudify import ctx class CloudInit(object): def __init__(self, operation_inputs): """ Sets the properties that all operations need. :param operation_inputs: The inputs from the operation. """ self.config = self.get_config(operation_inputs) @staticmethod def get_external_resource(config): for f in config.get('write_files', []): if not isinstance(f, dict): break try: if 'content' not in f: continue resource_type = f['content'].get('resource_type', '') resource_name = f['content'].get('resource_name', '') template_variables = f['content'].get('template_variables', {}) if 'file_resource' == resource_type: f['content'] = ctx.get_resource_and_render( resource_name, template_variables) except ValueError: ctx.logger.debug('No external resource recognized.') pass return config def get_config(self, inputs): config = ctx.node.properties.get('resource_config', {}) config.update( ctx.instance.runtime_properties.get('resource_config', {})) config.update(inputs.get('resource_config', {})) config.update(self.get_external_resource(config.copy())) return config @property def __str__(self): """Override the string implementation of object.""" cloud_init = yaml.dump(self.config) cloud_init_string = str(cloud_init).replace('!!python/unicode ', '') header = ctx.node.properties.get('header') if header: cloud_init_string = \ header + '\n' + cloud_init_string if ctx.node.properties.get('encode_base64'): cloud_init_string = \ base64.encodestring(cloud_init_string) return cloud_init_string def update(self, **_): ctx.instance.runtime_properties['resource_config'] = self.config ctx.instance.runtime_properties['cloud_config'] = self.__str__ def delete(self, **_): # cleanup runtime properties keys = ctx.instance.runtime_properties.keys() for key in keys: del ctx.instance.runtime_properties[key]
Python
0.000001
ee1f958cb3611ecc3af0329deda7fde5d5281c32
remove obsolete model creation
core/models/__init__.py
core/models/__init__.py
# -*- coding: utf-8 -*- # flake8: noqa """ Collection of models """ from core.models.allocation_strategy import Allocation, AllocationStrategy from core.models.application import Application, ApplicationMembership,\ ApplicationScore, ApplicationBookmark from core.models.application_tag import ApplicationTag from core.models.application_version import ApplicationVersion, ApplicationVersionMembership from core.models.cloud_admin import CloudAdministrator from core.models.credential import Credential, ProviderCredential from core.models.export_request import ExportRequest from core.models.group import Group, IdentityMembership,\ InstanceMembership, Leadership from core.models.identity import Identity from core.models.instance_tag import InstanceTag from core.models.profile import UserProfile from core.models.project import Project from core.models.project_instance import ProjectInstance from core.models.project_volume import ProjectVolume from core.models.provider import AccountProvider, ProviderType, PlatformType,\ Provider, ProviderInstanceAction, ProviderDNSServerIP from core.models.license import LicenseType, License, ApplicationVersionLicense from core.models.machine import ProviderMachine, ProviderMachineMembership from core.models.machine_request import MachineRequest from core.models.match import PatternMatch, MatchType from core.models.maintenance import MaintenanceRecord from core.models.instance import Instance, InstanceStatusHistory,\ InstanceStatus, InstanceAction, InstanceSource from core.models.node import NodeController from core.models.boot_script import ScriptType, BootScript, ApplicationVersionBootScript from core.models.quota import Quota from core.models.resource_request import ResourceRequest from core.models.size import Size from core.models.t import T from core.models.tag import Tag from core.models.user import AtmosphereUser from core.models.volume import Volume
from core.models.allocation_strategy import Allocation, AllocationStrategy from core.models.application import Application, ApplicationMembership,\ ApplicationScore, ApplicationBookmark from core.models.application_tag import ApplicationTag from core.models.application_version import ApplicationVersion, ApplicationVersionMembership from core.models.cloud_admin import CloudAdministrator from core.models.credential import Credential, ProviderCredential from core.models.export_request import ExportRequest from core.models.group import Group, IdentityMembership,\ InstanceMembership, Leadership from core.models.identity import Identity from core.models.instance_tag import InstanceTag from core.models.profile import UserProfile from core.models.project import Project from core.models.project_instance import ProjectInstance from core.models.project_volume import ProjectVolume from core.models.provider import AccountProvider, ProviderType, PlatformType,\ Provider, ProviderInstanceAction, ProviderDNSServerIP from core.models.license import LicenseType, License, ApplicationVersionLicense from core.models.machine import ProviderMachine, ProviderMachineMembership from core.models.machine_request import MachineRequest from core.models.match import PatternMatch, MatchType from core.models.maintenance import MaintenanceRecord from core.models.instance import Instance, InstanceStatusHistory,\ InstanceStatus, InstanceAction, InstanceSource from core.models.node import NodeController from core.models.boot_script import ScriptType, BootScript, ApplicationVersionBootScript from core.models.quota import Quota from core.models.resource_request import ResourceRequest from core.models.size import Size from core.models.t import T from core.models.tag import Tag from core.models.user import AtmosphereUser from core.models.volume import Volume def get_or_create(Model, *args, **kwargs): return Model.objects.get_or_create(*args, **kwargs)[0] def create_machine_model(name, provider, provider_alias, created_by, description): name = _get_valid_name(name, provider_alias) new_machine = get_or_create(Application, name=name, description=description, created_by=created_by) provider_machine = get_or_create(ProviderMachine, machine=new_machine, provider=provider, identifier=provider_alias) return (new_machine, provider_machine) def get_or_create_instance_model(name, provider, provider_alias, image_alias, ip_address, created_by): name = _get_valid_name(name, provider_alias) provider_machine = _get_or_create_provider_machine( provider, image_alias, created_by ) return get_or_create(Instance, name=name, provider_alias=provider_alias, provider_machine=provider_machine, ip_address=ip_address, created_by=created_by) def _get_valid_name(name, alias): """ Make sure there is a good default name if no name exists. """ if name is None or len(name) == 0: name = alias return name def _get_or_create_provider_machine(provider, image_alias, created_by): """ Get or create a ProviderMachine. If ProviderMachine does not already exist create a new Machine and related ProviderMachine. """ provider_machine = None filtered_machines = ProviderMachine.objects.filter(identifier=image_alias) if filtered_machines: provider_machine = filtered_machines[0] else: (created, provider_machine) = create_machine_model( None, provider, image_alias, created_by, "Created to support instanceModel") return provider_machine
Python
0.000003
1c3e8def9f46ee0f21d1172287af0b4fadf67884
Add some more backwards compatibility: also add intent to outcome
src/wit_ros/wit_node.py
src/wit_ros/wit_node.py
#!/usr/bin/env python """ROS node for the Wit.ai API""" global APIKEY APIKEY = None import rospy import requests import json from wit import Wit from wit_ros.srv import Interpret, InterpretResponse, ListenAndInterpret, ListenAndInterpretResponse from wit_ros.msg import Outcome, Entity class WitRos(object): def __init__(self, api_key): self.wit = Wit(api_key) self.pub = rospy.Publisher('stt', Outcome, queue_size=1) def start(self): rospy.Service('wit/interpret', Interpret, self.interpret) # rospy.Service('wit/listen_interpret', ListenAndInterpret, self.listen_and_interpret) def parse_response(self, response, klass): rospy.logdebug("Data: '{0}'".format(json.dumps(response, indent=4, separators=(',', ': ')))) ros_entities = [] if "WARNING" in response: rospy.logwarn("Response contains a warning: {warn}".format(warn=response["WARNING"])) outcome = None entities = [] if "entities" in response: entities = response["entities"] elif "outcomes" in response: outcome = response["outcomes"][0] entities = outcome["entities"] for entity_name, entity_properties in entities.iteritems(): entity_properties = entity_properties[0] rospy.logdebug("Entity '{name}' has properties{prop}".format(name=entity_name, prop=entity_properties)) entity = Entity(name=str(entity_name)) if 'type' in entity_properties: entity.type = str(entity_properties["type"]) if 'value' in entity_properties: entity.value = str(entity_properties["value"]) if 'unit' in entity_properties: entity.unit = str(entity_properties["unit"]) if 'suggested' in entity_properties: entity.suggested = str(entity_properties["suggested"]) if 'confidence' in entity_properties: entity.confidence = float(entity_properties["confidence"]) rospy.logdebug("Adding {ent}".format(ent=entity)) ros_entities += [entity] outcome = Outcome(entities = ros_entities, intent = str(outcome["intent"]) if outcome else None, text = str(response["_text"])) response = klass( msg_body = str(response), msg_id = str(response["msg_id"]), outcome = outcome) self.pub.publish(outcome) return response def interpret(self, rosrequest): sentence = rosrequest.sentence rospy.logdebug("Interpreting '{0}'".format(sentence)) wit_response = self.wit.message(sentence) rospy.logdebug("WitResponse: {0}".format(wit_response)) #response = json.loads(wit_response) #rospy.logdebug("Response: {0}".format(response)) return self.parse_response(wit_response, InterpretResponse) # TODO: wit.voice_query_auto used to take care of oudio recording, now it needs an audio file or encoded audio byte # def listen_and_interpret(self, rosrequest): # rospy.logdebug("About to record audio") # response = json.loads(self.wit.voice_query_auto(APIKEY)) # rospy.logdebug("Response: {0}".format(response)) # if not response: # return None # # return self.parse_response(response, ListenAndInterpretResponse) if __name__ == "__main__": rospy.init_node("wit_ros", log_level=rospy.INFO) if rospy.has_param('~api_key'): APIKEY = rospy.get_param("~api_key") wr = WitRos(APIKEY) wr.start() rospy.spin() else: rospy.logerr("No API key set (via parameter server). Please set one. " + "API keys can be obtained via the http://www.wit.ai")
#!/usr/bin/env python """ROS node for the Wit.ai API""" global APIKEY APIKEY = None import rospy import requests import json from wit import Wit from wit_ros.srv import Interpret, InterpretResponse, ListenAndInterpret, ListenAndInterpretResponse from wit_ros.msg import Outcome, Entity class WitRos(object): def __init__(self, api_key): self.wit = Wit(api_key) self.pub = rospy.Publisher('stt', Outcome, queue_size=1) def start(self): rospy.Service('wit/interpret', Interpret, self.interpret) # rospy.Service('wit/listen_interpret', ListenAndInterpret, self.listen_and_interpret) def parse_response(self, response, klass): rospy.logdebug("Data: '{0}'".format(json.dumps(response, indent=4, separators=(',', ': ')))) ros_entities = [] if "WARNING" in response: rospy.logwarn("Response contains a warning: {warn}".format(warn=response["WARNING"])) entities = [] if "entities" in response: entities = response["entities"] elif "outcomes" in response: entities = response["outcomes"][0]["entities"] for entity_name, entity_properties in entities.iteritems(): entity_properties = entity_properties[0] rospy.logdebug("Entity '{name}' has properties{prop}".format(name=entity_name, prop=entity_properties)) entity = Entity(name=str(entity_name)) if 'type' in entity_properties: entity.type = str(entity_properties["type"]) if 'value' in entity_properties: entity.value = str(entity_properties["value"]) if 'unit' in entity_properties: entity.unit = str(entity_properties["unit"]) if 'suggested' in entity_properties: entity.suggested = str(entity_properties["suggested"]) if 'confidence' in entity_properties: entity.confidence = float(entity_properties["confidence"]) rospy.logdebug("Adding {ent}".format(ent=entity)) ros_entities += [entity] outcome = Outcome(entities = ros_entities, intent = str(response.get("intent", "")), text = str(response["_text"])) response = klass( msg_body = str(response), msg_id = str(response["msg_id"]), outcome = outcome) self.pub.publish(outcome) return response def interpret(self, rosrequest): sentence = rosrequest.sentence rospy.logdebug("Interpreting '{0}'".format(sentence)) wit_response = self.wit.message(sentence) rospy.logdebug("WitResponse: {0}".format(wit_response)) #response = json.loads(wit_response) #rospy.logdebug("Response: {0}".format(response)) return self.parse_response(wit_response, InterpretResponse) # TODO: wit.voice_query_auto used to take care of oudio recording, now it needs an audio file or encoded audio byte # def listen_and_interpret(self, rosrequest): # rospy.logdebug("About to record audio") # response = json.loads(self.wit.voice_query_auto(APIKEY)) # rospy.logdebug("Response: {0}".format(response)) # if not response: # return None # # return self.parse_response(response, ListenAndInterpretResponse) if __name__ == "__main__": rospy.init_node("wit_ros", log_level=rospy.INFO) if rospy.has_param('~api_key'): APIKEY = rospy.get_param("~api_key") wr = WitRos(APIKEY) wr.start() rospy.spin() else: rospy.logerr("No API key set (via parameter server). Please set one. " + "API keys can be obtained via the http://www.wit.ai")
Python
0
1b2f0be67a8372a652b786c8b183cd5edf1807cd
Swap back to Fuzzer, no monkey patching
config/fuzz_pox_mesh.py
config/fuzz_pox_mesh.py
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer, Interactive from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose openflow.debug ''' #sts.syncproto.pox_syncer ''' '''forwarding.l2_multi ''' #'''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=__address__ --port=__port__''') controllers = [ControllerConfig(command_line, cwd="betta")] topology_class = MeshTopology topology_params = "num_switches=2" dataplane_trace = "dataplane_traces/ping_pong_same_subnet.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=False) control_flow = Fuzzer(simulation_config, check_interval=80, halt_on_violation=False, input_logger=InputLogger(), invariant_check=InvariantChecker.check_connectivity) #control_flow = Interactive(simulation_config, input_logger=InputLogger())
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer ''' '''samples.topo forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=../sts_socket_pipe''') controllers = [ControllerConfig(command_line, address="sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=4" dataplane_trace = "dataplane_traces/ping_pong_same_subnet_4_switches.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=1, halt_on_violation=True, input_logger=InputLogger(), invariant_check=InvariantChecker.check_liveness)
Python
0
ffd14af829bd3f7bf52cb0af5306550b51ab8712
Remove mox from tests/unit/compute/test_compute_xen.py
nova/tests/unit/compute/test_compute_xen.py
nova/tests/unit/compute/test_compute_xen.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for expectations of behaviour from the Xen driver.""" import mock from oslo_utils import importutils from nova.compute import power_state import nova.conf from nova import context from nova import objects from nova.objects import instance as instance_obj from nova.tests.unit.compute import eventlet_utils from nova.tests.unit import fake_instance from nova.tests.unit.virt.xenapi import stubs from nova.virt.xenapi import vm_utils CONF = nova.conf.CONF CONF.import_opt('compute_manager', 'nova.service') class ComputeXenTestCase(stubs.XenAPITestBaseNoDB): def setUp(self): super(ComputeXenTestCase, self).setUp() self.flags(compute_driver='xenapi.XenAPIDriver') self.flags(connection_url='test_url', connection_password='test_pass', group='xenserver') stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests) self.compute = importutils.import_object(CONF.compute_manager) # execute power syncing synchronously for testing: self.compute._sync_power_pool = eventlet_utils.SyncPool() def test_sync_power_states_instance_not_found(self): db_instance = fake_instance.fake_db_instance() ctxt = context.get_admin_context() instance_list = instance_obj._make_instance_list(ctxt, objects.InstanceList(), [db_instance], None) instance = instance_list[0] @mock.patch.object(vm_utils, 'lookup') @mock.patch.object(objects.InstanceList, 'get_by_host') @mock.patch.object(self.compute.driver, 'get_num_instances') @mock.patch.object(self.compute, '_sync_instance_power_state') def do_test(mock_compute_sync_powerstate, mock_compute_get_num_instances, mock_instance_list_get_by_host, mock_vm_utils_lookup): mock_instance_list_get_by_host.return_value = instance_list mock_compute_get_num_instances.return_value = 1 mock_vm_utils_lookup.return_value = None self.compute._sync_power_states(ctxt) mock_instance_list_get_by_host.assert_called_once_with( ctxt, self.compute.host, expected_attrs=[], use_slave=True) mock_compute_get_num_instances.assert_called_once_with() mock_compute_sync_powerstate.assert_called_once_with( ctxt, instance, power_state.NOSTATE, use_slave=True) mock_vm_utils_lookup.assert_called_once_with( self.compute.driver._session, instance['name'], False) do_test()
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for expectations of behaviour from the Xen driver.""" from oslo_utils import importutils from nova.compute import power_state import nova.conf from nova import context from nova import objects from nova.objects import instance as instance_obj from nova.tests.unit.compute import eventlet_utils from nova.tests.unit import fake_instance from nova.tests.unit.virt.xenapi import stubs from nova.virt.xenapi import vm_utils CONF = nova.conf.CONF CONF.import_opt('compute_manager', 'nova.service') class ComputeXenTestCase(stubs.XenAPITestBaseNoDB): def setUp(self): super(ComputeXenTestCase, self).setUp() self.flags(compute_driver='xenapi.XenAPIDriver') self.flags(connection_url='test_url', connection_password='test_pass', group='xenserver') stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests) self.compute = importutils.import_object(CONF.compute_manager) # execute power syncing synchronously for testing: self.compute._sync_power_pool = eventlet_utils.SyncPool() def test_sync_power_states_instance_not_found(self): db_instance = fake_instance.fake_db_instance() ctxt = context.get_admin_context() instance_list = instance_obj._make_instance_list(ctxt, objects.InstanceList(), [db_instance], None) instance = instance_list[0] self.mox.StubOutWithMock(objects.InstanceList, 'get_by_host') self.mox.StubOutWithMock(self.compute.driver, 'get_num_instances') self.mox.StubOutWithMock(vm_utils, 'lookup') self.mox.StubOutWithMock(self.compute, '_sync_instance_power_state') objects.InstanceList.get_by_host(ctxt, self.compute.host, expected_attrs=[], use_slave=True).AndReturn(instance_list) self.compute.driver.get_num_instances().AndReturn(1) vm_utils.lookup(self.compute.driver._session, instance['name'], False).AndReturn(None) self.compute._sync_instance_power_state(ctxt, instance, power_state.NOSTATE) self.mox.ReplayAll() self.compute._sync_power_states(ctxt)
Python
0.000002
6eeadf2246c5aa09bbec6fd5b6bb0d9fde25d348
Remove dots from rendered maze
bin/maze.py
bin/maze.py
# Use case: A randomly generated maze won when the user reaches the end # Example: from vengeance.game import Direction from vengeance.game import Game from vengeance.game import Location import random width = 8 height = 8 north = Direction('north') south = Direction('south') north.opposite = south east = Direction('east') west = Direction('west') east.opposite = west def set_exits(x, y, location_grid, visited_locations): location = location_grid[x][y] allowed_location_coords = [] if x in range(0, width-1) and not_visited(location_grid[x+1][y]): allowed_location_coords.append([x+1, y]) if x in range(1, width) and not_visited(location_grid[x-1][y]): allowed_location_coords.append([x-1, y]) if y in range(0, height-1) and not_visited(location_grid[x][y+1]): allowed_location_coords.append([x, y+1]) if y in range(1, height) and not_visited(location_grid[x][y-1]): allowed_location_coords.append([x, y-1]) count = len(allowed_location_coords) if count == 0: if len(visited_locations) != 0: previous_location = visited_locations.pop() for i in range(width): for j in range(height): current_location = location_grid[i][j] if previous_location.name == current_location.name: set_exits(i, j, location_grid, visited_locations) return visited_locations.append(location) location_coords = allowed_location_coords[random.randrange(count)] new_x = location_coords[0] new_y = location_coords[1] new_location = location_grid[new_x][new_y] direction = None if new_x < x: direction = west elif new_x > x: direction = east elif new_y < y: direction = south else: direction = north location.add_exit(direction, new_location) set_exits(new_x, new_y, location_grid, visited_locations) def not_visited(location): return not location.exits def render_maze(location_grid): result = ' ' + width * '_ ' result += '\n' for y in range(height-1, -1, -1): result += '|' for x in range(width): location = location_grid[x][y] if y == 0 or has_south_wall(location): result += '_' else: result += ' ' if x == width-1 or has_east_wall(location): result += '|' else: result += ' ' result += '\n' return result def has_south_wall(location): for exit in location.exits: if exit.direction.name == south.name: return False return True def has_east_wall(location): for exit in location.exits: if exit.direction.name == east.name: return False return True def random_coords(): return random.randrange(width), random.randrange(height) # Create maze (a grid of locations) location_grid = [] for x in range(width): locations_at_x = [] location_grid.append(locations_at_x) for y in range(height): locations_at_x.append(Location('' + str(x) + ', ' + str(y))) # Pick a random starting location starting_x, starting_y = random_coords() visited_locations = [] set_exits(starting_x, starting_y, location_grid, visited_locations) print(render_maze(location_grid)) locations = [] for x in range(width): for y in range(height): locations.append(location_grid[x][y]) game = Game(locations) game.run()
# Use case: A randomly generated maze won when the user reaches the end # Example: from vengeance.game import Direction from vengeance.game import Game from vengeance.game import Location import random width = 8 height = 8 north = Direction('north') south = Direction('south') north.opposite = south east = Direction('east') west = Direction('west') east.opposite = west def set_exits(x, y, location_grid, visited_locations): location = location_grid[x][y] allowed_location_coords = [] if x in range(0, width-1) and not_visited(location_grid[x+1][y]): allowed_location_coords.append([x+1, y]) if x in range(1, width) and not_visited(location_grid[x-1][y]): allowed_location_coords.append([x-1, y]) if y in range(0, height-1) and not_visited(location_grid[x][y+1]): allowed_location_coords.append([x, y+1]) if y in range(1, height) and not_visited(location_grid[x][y-1]): allowed_location_coords.append([x, y-1]) count = len(allowed_location_coords) if count == 0: if len(visited_locations) != 0: previous_location = visited_locations.pop() for i in range(width): for j in range(height): current_location = location_grid[i][j] if previous_location.name == current_location.name: set_exits(i, j, location_grid, visited_locations) return visited_locations.append(location) location_coords = allowed_location_coords[random.randrange(count)] new_x = location_coords[0] new_y = location_coords[1] new_location = location_grid[new_x][new_y] direction = None if new_x < x: direction = west elif new_x > x: direction = east elif new_y < y: direction = south else: direction = north location.add_exit(direction, new_location) set_exits(new_x, new_y, location_grid, visited_locations) def not_visited(location): return not location.exits def render_maze(location_grid): result = '.' + width * '_.' result += '\n' for y in range(height-1, -1, -1): result += '|' for x in range(width): location = location_grid[x][y] if y == 0 or has_south_wall(location): result += '_' else: result += ' ' if x == width-1 or has_east_wall(location): result += '|' else: result += '.' result += '\n' return result def has_south_wall(location): for exit in location.exits: if exit.direction.name == south.name: return False return True def has_east_wall(location): for exit in location.exits: if exit.direction.name == east.name: return False return True def random_coords(): return random.randrange(width), random.randrange(height) # Create maze (a grid of locations) location_grid = [] for x in range(width): locations_at_x = [] location_grid.append(locations_at_x) for y in range(height): locations_at_x.append(Location('' + str(x) + ', ' + str(y))) # Pick a random starting location starting_x, starting_y = random_coords() visited_locations = [] set_exits(starting_x, starting_y, location_grid, visited_locations) print(render_maze(location_grid)) locations = [] for x in range(width): for y in range(height): locations.append(location_grid[x][y]) game = Game(locations) game.run()
Python
0.000002
d0ca9aa6cf39c4743e398f65e4c7f5bbc3c03d78
Clarify API sample
api_sample.py
api_sample.py
# Add ./lib to the path for importing nassl import os import sys sys.path.insert(1, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib')) from sslyze.plugins_finder import PluginsFinder from sslyze.plugins_process_pool import PluginsProcessPool from sslyze.server_connectivity import ServerConnectivityInfo, ServerConnectivityError from sslyze.ssl_settings import TlsWrappedProtocolEnum # Setup the servers to scan and ensure they are reachable hostname = 'smtp.gmail.com' try: server_info = ServerConnectivityInfo(hostname=hostname, port=587, tls_wrapped_protocol=TlsWrappedProtocolEnum.STARTTLS_SMTP) server_info.test_connectivity_to_server() except ServerConnectivityError as e: # Could not establish an SSL connection to the server raise RuntimeError('Error when connecting to {}: {}'.format(hostname, e.error_msg)) # Get the list of available plugins sslyze_plugins = PluginsFinder() # Create a process pool to run scanning commands concurrently plugins_process_pool = PluginsProcessPool(sslyze_plugins) # Queue some scan commands; the commands are same as what is described in the SSLyze CLI --help text. print '\nQueuing some commands...' plugins_process_pool.queue_plugin_task(server_info, 'sslv3') plugins_process_pool.queue_plugin_task(server_info, 'reneg') plugins_process_pool.queue_plugin_task(server_info, 'certinfo_basic') # Process the results reneg_result = None print '\nProcessing results...' for server_info, plugin_command, plugin_result in plugins_process_pool.get_results(): # Each plugin result has attributes with the information you're looking for, specific to each plugin # All these attributes are documented within each plugin's module if plugin_result.plugin_command == 'sslv3': # Do something with the result print 'SSLV3 cipher suites' for cipher in plugin_result.accepted_cipher_list: print ' {}'.format(cipher.name) elif plugin_result.plugin_command == 'reneg': reneg_result = plugin_result print 'Client renegotiation: {}'.format(plugin_result.accepts_client_renegotiation) print 'Secure renegotiation: {}'.format(plugin_result.supports_secure_renegotiation) elif plugin_result.plugin_command == 'certinfo_basic': print 'Server Certificate CN: {}'.format(plugin_result.certificate_chain[0].as_dict['subject']['commonName']) # All plugin results also always expose two APIs: # What the SSLyze CLI would output to the console print '\nSSLyze text output' for line in reneg_result.as_text(): print line print '\nSSLyze XML node' # The XML node for the SSLyze CLI XML output print reneg_result.as_xml() # You should use the process pool to make scans quick, but you can also call plugins directly from sslyze.plugins.openssl_cipher_suites_plugin import OpenSslCipherSuitesPlugin print '\nCalling a plugin directly...' plugin = OpenSslCipherSuitesPlugin() plugin_result = plugin.process_task(server_info, 'tlsv1') for cipher in plugin_result.accepted_cipher_list: print ' {}'.format(cipher.name)
# Add ./lib to the path for importing nassl import os import sys sys.path.insert(1, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib')) from sslyze.plugins_finder import PluginsFinder from sslyze.plugins_process_pool import PluginsProcessPool from sslyze.server_connectivity import ServerConnectivityInfo, ServerConnectivityError from sslyze.ssl_settings import TlsWrappedProtocolEnum # Setup the servers to scan and ensure they are reachable hostname = 'smtp.gmail.com' try: server_info = ServerConnectivityInfo(hostname=hostname, port=587, tls_wrapped_protocol=TlsWrappedProtocolEnum.STARTTLS_SMTP) server_info.test_connectivity_to_server() except ServerConnectivityError as e: # Could not establish an SSL connection to the server raise RuntimeError('Error when connecting to {}: {}'.format(hostname, e.error_msg)) # Get the list of available plugins sslyze_plugins = PluginsFinder() # Create a process pool to run scanning commands concurrently plugins_process_pool = PluginsProcessPool(sslyze_plugins) # Queue some scan commands print '\nQueuing some commands...' plugins_process_pool.queue_plugin_task(server_info, 'sslv3') plugins_process_pool.queue_plugin_task(server_info, 'reneg') plugins_process_pool.queue_plugin_task(server_info, 'certinfo_basic') # Process the results reneg_result = None print '\nProcessing results...' for server_info, plugin_command, plugin_result in plugins_process_pool.get_results(): # Each plugin result has attributes with the information you're looking for, specific to each plugin # All these attributes are documented within each plugin's module if plugin_result.plugin_command == 'sslv3': # Do something with the result print 'SSLV3 cipher suites' for cipher in plugin_result.accepted_cipher_list: print ' {}'.format(cipher.name) elif plugin_result.plugin_command == 'reneg': reneg_result = plugin_result print 'Client renegotiation: {}'.format(plugin_result.accepts_client_renegotiation) print 'Secure renegotiation: {}'.format(plugin_result.supports_secure_renegotiation) elif plugin_result.plugin_command == 'certinfo_basic': print 'Server Certificate CN: {}'.format(plugin_result.certificate_chain[0].as_dict['subject']['commonName']) # All plugin results also always expose two APIs: # What the SSLyze CLI would output to the console print '\nSSLyze text output' for line in reneg_result.as_text(): print line print '\nSSLyze XML node' # The XML node for the SSLyze CLI XML output print reneg_result.as_xml() # You should use the process pool to make scans quick, but you can also call plugins directly from sslyze.plugins.openssl_cipher_suites_plugin import OpenSslCipherSuitesPlugin print '\nCalling a plugin directly...' plugin = OpenSslCipherSuitesPlugin() plugin_result = plugin.process_task(server_info, 'tlsv1') for cipher in plugin_result.accepted_cipher_list: print ' {}'.format(cipher.name)
Python
0.000003
0e2548637d9726dc549b13abc3a6b38c51e300bd
not count , and . values in allele count
franklin/snv/readers.py
franklin/snv/readers.py
''' Created on 2011 aza 21 @author: peio ''' class VcfParser(object): 'A vcf reader' def __init__(self, fpath): 'Class initiator' self._fpath = fpath self.header = None self._get_header() self._index = None def _get_version(self): 'version of the vcf' version_unformat = self.header['format'] return version_unformat.split('v')[1] version = property(_get_version) def _get_header(self): 'it returns the header' if self.header is not None: return self.header headers = {} for line in open(self._fpath): if not line.startswith('#'): break if line.startswith('##'): line = line.strip() line = line.lstrip('##') kind, value = line.split('=', 1) if kind == 'FILTER': if kind not in headers: headers[kind] = {} filter_type, filter_info = value.split(',', 1) headers[kind][filter_type] = filter_info.strip('"') elif kind in ('FORMAT', 'INFO'): if kind not in headers: headers[kind] = {} name, example, type_, desc = value.split(',') headers[kind][name] = {'type':type_, 'example':example, 'description':desc} else: headers[kind] = value else: line = line.lstrip('#') headers['colnames'] = line.split() self.header = headers def _get_vcfs(self): 'vcf generator' colnames = self.header['colnames'] for line in open(self._fpath): if line.startswith('#'): continue yield self._parse_vcf_line(line, colnames) vcfs = property(_get_vcfs) def _parse_vcf_line(self, line, colnames): '''It parses the cvf svn line''' vcf_items = line.split() vcf = dict(zip(colnames[:9], vcf_items[:9])) # reformat FILTER vcf['FILTER'] = vcf['FILTER'].split(';') # REformat INFO info = vcf['INFO'] vcf['INFO'] = {} for info_ in info.split(';'): info_key, info_value = info_.split('=') vcf['INFO'][info_key] = info_value # reformat FORMAT format_string = vcf['FORMAT'] vcf['FORMAT'] = {} for format_ in format_string.split(';'): format_key, format_value = format_.split(':') vcf['FORMAT'][format_key] = format_value vcf['samples'] = {} for samples in zip(colnames[9:], vcf_items[9:]): allele_count = {} alleles, values = samples[1].split(':') for index , allele in enumerate(alleles.split('|')): allele = vcf['REF'] if allele == 0 else vcf['ALT'] try: count_ = int(values.split(',')[index]) except ValueError: continue allele_count[allele] = count_ vcf['samples'][samples[0]] = allele_count return vcf def _make_index(self): '''it makes an index of the vcf file. It takes the vcf position (chrom, position) as index''' if self._index is not None: return self._index index = {} fhand = open(self._fpath, 'rt') rawline = 'filled' while len(rawline) != 0: prior_tell = fhand.tell() rawline = fhand.readline() if rawline and rawline[0] == '#': continue index[tuple(rawline.split()[:2])] = prior_tell self._index = index def get_snv(self, position): 'It returns an snv giving it position' colnames = self.header['colnames'] if self._index is None: self._make_index() fhand = open(self._fpath) file_position = self._index[position] fhand.seek(file_position) return self._parse_vcf_line(fhand.readline(), colnames)
''' Created on 2011 aza 21 @author: peio ''' class VcfParser(object): 'A vcf reader' def __init__(self, fpath): 'Class initiator' self._fpath = fpath self.header = None self._get_header() self._index = None def _get_version(self): 'version of the vcf' version_unformat = self.header['format'] return version_unformat.split('v')[1] version = property(_get_version) def _get_header(self): 'it returns the header' if self.header is not None: return self.header headers = {} for line in open(self._fpath): if not line.startswith('#'): break if line.startswith('##'): line = line.strip() line = line.lstrip('##') kind, value = line.split('=', 1) if kind == 'FILTER': if kind not in headers: headers[kind] = {} filter_type, filter_info = value.split(',', 1) headers[kind][filter_type] = filter_info.strip('"') elif kind in ('FORMAT', 'INFO'): if kind not in headers: headers[kind] = {} name, example, type_, desc = value.split(',') headers[kind][name] = {'type':type_, 'example':example, 'description':desc} else: headers[kind] = value else: line = line.lstrip('#') headers['colnames'] = line.split() self.header = headers def _get_vcfs(self): 'vcf generator' colnames = self.header['colnames'] for line in open(self._fpath): if line.startswith('#'): continue yield self._parse_vcf_line(line, colnames) vcfs = property(_get_vcfs) def _parse_vcf_line(self, line, colnames): '''It parses the cvf svn line''' vcf_items = line.split() vcf = dict(zip(colnames[:9], vcf_items[:9])) # reformat FILTER vcf['FILTER'] = vcf['FILTER'].split(';') # REformat INFO info = vcf['INFO'] vcf['INFO'] = {} for info_ in info.split(';'): info_key, info_value = info_.split('=') vcf['INFO'][info_key] = info_value # reformat FORMAT format_string = vcf['FORMAT'] vcf['FORMAT'] = {} for format_ in format_string.split(';'): format_key, format_value = format_.split(':') vcf['FORMAT'][format_key] = format_value vcf['samples'] = {} for samples in zip(colnames[9:], vcf_items[9:]): allele_count = {} alleles, values = samples[1].split(':') for index , allele in enumerate(alleles.split('|')): allele = vcf['REF'] if allele == 0 else vcf['ALT'] try: count_ = int(values[index]) except ValueError: continue allele_count[allele] = count_ vcf['samples'][samples[0]] = allele_count return vcf def _make_index(self): '''it makes an index of the vcf file. It takes the vcf position (chrom, position) as index''' if self._index is not None: return self._index index = {} fhand = open(self._fpath, 'rt') rawline = 'filled' while len(rawline) != 0: prior_tell = fhand.tell() rawline = fhand.readline() if rawline and rawline[0] == '#': continue index[tuple(rawline.split()[:2])] = prior_tell self._index = index def get_snv(self, position): 'It returns an snv giving it position' colnames = self.header['colnames'] if self._index is None: self._make_index() fhand = open(self._fpath) file_position = self._index[position] fhand.seek(file_position) return self._parse_vcf_line(fhand.readline(), colnames)
Python
0.999987
b47143d38027a7bafc73376de01bd2fa2196ac60
Add test for file interface in put_attachment
couchdb/tests/client.py
couchdb/tests/client.py
# -*- coding: utf-8 -*- # # Copyright (C) 2007 Christopher Lenz # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. import doctest import os import unittest import StringIO from couchdb import client class DatabaseTestCase(unittest.TestCase): def setUp(self): uri = os.environ.get('COUCHDB_URI', 'http://localhost:5984/') self.server = client.Server(uri) if 'python-tests' in self.server: del self.server['python-tests'] self.db = self.server.create('python-tests') def tearDown(self): if 'python-tests' in self.server: del self.server['python-tests'] def test_doc_id_quoting(self): self.db['foo/bar'] = {'foo': 'bar'} self.assertEqual('bar', self.db['foo/bar']['foo']) del self.db['foo/bar'] self.assertEqual(None, self.db.get('foo/bar')) def test_unicode(self): self.db[u'føø'] = {u'bår': u'Iñtërnâtiônàlizætiøn', 'baz': 'ASCII'} self.assertEqual(u'Iñtërnâtiônàlizætiøn', self.db[u'føø'][u'bår']) self.assertEqual(u'ASCII', self.db[u'føø'][u'baz']) def test_doc_revs(self): doc = {'bar': 42} self.db['foo'] = doc old_rev = doc['_rev'] doc['bar'] = 43 self.db['foo'] = doc new_rev = doc['_rev'] new_doc = self.db.get('foo') self.assertEqual(new_rev, new_doc['_rev']) new_doc = self.db.get('foo', rev=new_rev) self.assertEqual(new_rev, new_doc['_rev']) old_doc = self.db.get('foo', rev=old_rev) self.assertEqual(old_rev, old_doc['_rev']) def test_attachment_crud(self): doc = {'bar': 42} self.db['foo'] = doc old_rev = doc['_rev'] self.db.put_attachment(doc, 'foo.txt', 'Foo bar', 'text/plain') self.assertNotEquals(old_rev, doc['_rev']) doc = self.db['foo'] attachment = doc['_attachments']['foo.txt'] self.assertEqual(len('Foo bar'), attachment['length']) self.assertEqual('text/plain', attachment['content_type']) self.assertEqual('Foo bar', self.db.get_attachment(doc, 'foo.txt')) self.assertEqual('Foo bar', self.db.get_attachment('foo', 'foo.txt')) old_rev = doc['_rev'] self.db.delete_attachment(doc, 'foo.txt') self.assertNotEquals(old_rev, doc['_rev']) self.assertEqual(None, self.db['foo'].get('_attachments')) def test_attachment_crud_with_files(self): doc = {'bar': 42} self.db['foo'] = doc old_rev = doc['_rev'] f = StringIO.StringIO('Foo bar baz') self.db.put_attachment(doc, 'foo.txt', f, 'text/plain') self.assertNotEquals(old_rev, doc['_rev']) doc = self.db['foo'] attachment = doc['_attachments']['foo.txt'] self.assertEqual(len('Foo bar baz'), attachment['length']) self.assertEqual('text/plain', attachment['content_type']) self.assertEqual('Foo bar baz', self.db.get_attachment(doc, 'foo.txt')) self.assertEqual('Foo bar baz', self.db.get_attachment('foo', 'foo.txt')) old_rev = doc['_rev'] self.db.delete_attachment(doc, 'foo.txt') self.assertNotEquals(old_rev, doc['_rev']) self.assertEqual(None, self.db['foo'].get('_attachments')) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(DatabaseTestCase, 'test')) suite.addTest(doctest.DocTestSuite(client)) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
# -*- coding: utf-8 -*- # # Copyright (C) 2007 Christopher Lenz # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. import doctest import os import unittest from couchdb import client class DatabaseTestCase(unittest.TestCase): def setUp(self): uri = os.environ.get('COUCHDB_URI', 'http://localhost:5984/') self.server = client.Server(uri) if 'python-tests' in self.server: del self.server['python-tests'] self.db = self.server.create('python-tests') def tearDown(self): if 'python-tests' in self.server: del self.server['python-tests'] def test_doc_id_quoting(self): self.db['foo/bar'] = {'foo': 'bar'} self.assertEqual('bar', self.db['foo/bar']['foo']) del self.db['foo/bar'] self.assertEqual(None, self.db.get('foo/bar')) def test_unicode(self): self.db[u'føø'] = {u'bår': u'Iñtërnâtiônàlizætiøn', 'baz': 'ASCII'} self.assertEqual(u'Iñtërnâtiônàlizætiøn', self.db[u'føø'][u'bår']) self.assertEqual(u'ASCII', self.db[u'føø'][u'baz']) def test_doc_revs(self): doc = {'bar': 42} self.db['foo'] = doc old_rev = doc['_rev'] doc['bar'] = 43 self.db['foo'] = doc new_rev = doc['_rev'] new_doc = self.db.get('foo') self.assertEqual(new_rev, new_doc['_rev']) new_doc = self.db.get('foo', rev=new_rev) self.assertEqual(new_rev, new_doc['_rev']) old_doc = self.db.get('foo', rev=old_rev) self.assertEqual(old_rev, old_doc['_rev']) def test_attachment_crud(self): doc = {'bar': 42} self.db['foo'] = doc old_rev = doc['_rev'] self.db.put_attachment(doc, 'foo.txt', 'Foo bar', 'text/plain') self.assertNotEquals(old_rev, doc['_rev']) doc = self.db['foo'] attachment = doc['_attachments']['foo.txt'] self.assertEqual(len('Foo bar'), attachment['length']) self.assertEqual('text/plain', attachment['content_type']) self.assertEqual('Foo bar', self.db.get_attachment(doc, 'foo.txt')) self.assertEqual('Foo bar', self.db.get_attachment('foo', 'foo.txt')) old_rev = doc['_rev'] self.db.delete_attachment(doc, 'foo.txt') self.assertNotEquals(old_rev, doc['_rev']) self.assertEqual(None, self.db['foo'].get('_attachments')) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(DatabaseTestCase, 'test')) suite.addTest(doctest.DocTestSuite(client)) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
Python
0.000001
43f67a09d0e194ef3012bad97e0cb45db7c34d35
test travis
binding.gyp
binding.gyp
{ "targets": [ { "target_name": "addon", "sources": [ "src/addon.cc", "src/object.cc", "src/async.cc", "src/engine.cc", "src/results.cc" ], #"cflags": [ "-Werror", "-Wall", "-Wextra", "-Wpedantic", "-Wunused-parameter", "-funroll-loops", "-Ofast" ],#targets all files, c and c++ #"cflags_c": [ "-hello" ],# does this do anything? #"cflags_cc": [ "-Werror", "-Wall", "-Wextra", "-Wpedantic", "-Wunused-parameter", "-funroll-loops", "-Ofast" ],#target c++ only "cflags": [ "-O2", "-std=gnu++14"], "cflags!": [ "-fno-exceptions", "-Wno-unused-parameter", "-O3"], "cflags_cc!": [ "-fno-exceptions", "-Wno-unused-parameter" ], "include_dirs": ["<!@(node -p \"require('node-addon-api').include\")"], "dependencies": ["<!(node -p \"require('node-addon-api').gyp\")"], "defines": [ "NAPI_CPP_EXCEPTIONS", "NODE_ADDON_API_DISABLE_DEPRECATED" ], "conditions": [ ["OS==\"win\"", { "msvs_settings": { "VCCLCompilerTool": { "ExceptionHandling": 1 } } }], ["OS==\"mac\"", { "xcode_settings": { "CLANG_CXX_LANGUAGE_STANDARD": 'c++14', "CLANG_CXX_LIBRARY": "libc++", "GCC_ENABLE_CPP_EXCEPTIONS": "YES", "MACOSX_DEPLOYMENT_TARGET": "10.7", "GCC_ENABLE_CPP_RTTI" : "YES", # options to test: "fast", "3", "2", "1", "0", "", "s" #"GCC_OPTIMIZATION_LEVEL": "1", # only passed to C files "OTHER_CFLAGS" : [], # only passed to C++ files "OTHER_CPLUSPLUSFLAGS": [ "-Werror", "-Wextra", "-Wpedantic", "-Wunused-parameter", #"-Weverything" #"-fdiagnostics-show-hotness", #"-fsave-optimization-record" ], } }] ] } ] }
{ "targets": [ { "target_name": "addon", "sources": [ "src/addon.cc", "src/object.cc", "src/async.cc", "src/engine.cc", "src/results.cc" ], #"cflags": [ "-Werror", "-Wall", "-Wextra", "-Wpedantic", "-Wunused-parameter", "-funroll-loops", "-Ofast" ],#targets all files, c and c++ #"cflags_c": [ "-hello" ],# does this do anything? #"cflags_cc": [ "-Werror", "-Wall", "-Wextra", "-Wpedantic", "-Wunused-parameter", "-funroll-loops", "-Ofast" ],#target c++ only "cflags": [ "-O2", ], "cflags!": [ "-fno-exceptions", "-Wno-unused-parameter", "-O3", "-std=gnu++1y"], "cflags_cc!": [ "-fno-exceptions", "-Wno-unused-parameter" ], "include_dirs": ["<!@(node -p \"require('node-addon-api').include\")"], "dependencies": ["<!(node -p \"require('node-addon-api').gyp\")"], "defines": [ "NAPI_CPP_EXCEPTIONS", "NODE_ADDON_API_DISABLE_DEPRECATED" ], "conditions": [ ["OS==\"win\"", { "msvs_settings": { "VCCLCompilerTool": { "ExceptionHandling": 1 } } }], ["OS==\"mac\"", { "xcode_settings": { "CLANG_CXX_LANGUAGE_STANDARD": 'c++14', "CLANG_CXX_LIBRARY": "libc++", "GCC_ENABLE_CPP_EXCEPTIONS": "YES", "MACOSX_DEPLOYMENT_TARGET": "10.7", "GCC_ENABLE_CPP_RTTI" : "YES", # options to test: "fast", "3", "2", "1", "0", "", "s" #"GCC_OPTIMIZATION_LEVEL": "1", # only passed to C files "OTHER_CFLAGS" : [], # only passed to C++ files "OTHER_CPLUSPLUSFLAGS": [ "-Werror", "-Wextra", "-Wpedantic", "-Wunused-parameter", #"-Weverything" #"-fdiagnostics-show-hotness", #"-fsave-optimization-record" ], } }] ] } ] }
Python
0.000002
ff9822c7776cdef1e14e80a2cc56700bbc4f24f2
Fix Mac OS build warnings on old node versions
binding.gyp
binding.gyp
{ "targets": [ { "target_name": "anitomy-js", "sources": [ "lib/anitomy/anitomy/anitomy.cpp", "lib/anitomy/anitomy/anitomy.h", "lib/anitomy/anitomy/element.cpp", "lib/anitomy/anitomy/element.h", "lib/anitomy/anitomy/keyword.cpp", "lib/anitomy/anitomy/keyword.h", "lib/anitomy/anitomy/options.h", "lib/anitomy/anitomy/parser.cpp", "lib/anitomy/anitomy/parser.h", "lib/anitomy/anitomy/parser_helper.cpp", "lib/anitomy/anitomy/parser_number.cpp", "lib/anitomy/anitomy/string.cpp", "lib/anitomy/anitomy/string.h", "lib/anitomy/anitomy/token.cpp", "lib/anitomy/anitomy/token.h", "lib/anitomy/anitomy/tokenizer.cpp", "lib/anitomy/anitomy/tokenizer.h", "src/anitomy_js.h", "src/anitomy_js.cpp", "src/worker.h", "src/worker.cpp", "src/addon.cpp" ], "xcode_settings": { "CLANG_CXX_LANGUAGE_STANDARD": "c++14", "CLANG_CXX_LIBRARY": "libc++", "MACOSX_DEPLOYMENT_TARGET": "10.9", "GCC_ENABLE_CPP_EXCEPTIONS": "NO", "GCC_ENABLE_CPP_RTTI": "NO", "OTHER_CPLUSPLUSFLAGS": [ "-Wall", "-O3" ] }, "cflags_cc": [ "-std=c++14", "-Wall", "-O3" ], "cflags_cc!": [ "-std=gnu++0x" ], "include_dirs": [ "<!(node -e \"require('nan')\")", "lib/anitomy" ] } ] }
{ "targets": [ { "target_name": "anitomy-js", "sources": [ "lib/anitomy/anitomy/anitomy.cpp", "lib/anitomy/anitomy/anitomy.h", "lib/anitomy/anitomy/element.cpp", "lib/anitomy/anitomy/element.h", "lib/anitomy/anitomy/keyword.cpp", "lib/anitomy/anitomy/keyword.h", "lib/anitomy/anitomy/options.h", "lib/anitomy/anitomy/parser.cpp", "lib/anitomy/anitomy/parser.h", "lib/anitomy/anitomy/parser_helper.cpp", "lib/anitomy/anitomy/parser_number.cpp", "lib/anitomy/anitomy/string.cpp", "lib/anitomy/anitomy/string.h", "lib/anitomy/anitomy/token.cpp", "lib/anitomy/anitomy/token.h", "lib/anitomy/anitomy/tokenizer.cpp", "lib/anitomy/anitomy/tokenizer.h", "src/anitomy_js.h", "src/anitomy_js.cpp", "src/worker.h", "src/worker.cpp", "src/addon.cpp" ], "xcode_settings": { "OTHER_CFLAGS": [ "-mmacosx-version-min=10.7", "-stdlib=libc++", "-std=c++14" ] }, "cflags": [ "-std=c++14" ], "cflags_cc!": [ "-fno-rtti", "-fno-exceptions", "-std=gnu++0x" ], "include_dirs": [ "<!(node -e \"require('nan')\")", "lib/anitomy" ] } ] }
Python
0
a57e38233679bf6d95dad533d87ce1c69c00cc26
Include process name
docker-memusage.py
docker-memusage.py
#!/usr/bin/env python from collections import OrderedDict import os.path import re def parse_mem_file(filename): data = OrderedDict() with open(filename, 'rb') as f: for line in f: splittage = line.split(':') data[splittage[0]] = splittage[1].strip() return data def get_system_mem_usage(): """Return the information in /proc/meminfo as a dictionary.""" return parse_mem_file('/proc/meminfo') def get_process_mem_usage(): re_pid = re.compile(r'^\d+$') re_mem = re.compile(r'^(\d+) .+$') pid2usage = {} for pid in [d for d in os.listdir('/proc') if re_pid.match(d)]: fpath = os.path.join('/proc', pid, 'status') try: data = parse_mem_file(fpath) except IOError: continue try: name = data['name'] pid2usage[(pid, name)] = int( re_mem.match(data['VmHWM']).group(1)) / 1024. except KeyError: continue return OrderedDict( sorted(pid2usage.iteritems(), key=lambda x: x[1], reverse=True)) pid2usage = get_process_mem_usage() total_usage = sum(pid2usage.values()) print('Total memory usage: {:.2f}'.format(total_usage)) for pid_etc, usage in pid2usage.iteritems(): [pid, name] = pid print('{} ({}): {:.2f} MB'.format(name, pid, usage))
#!/usr/bin/env python from collections import OrderedDict from pprint import pprint import os.path import re import sys def parse_mem_file(filename): data = OrderedDict() with open(filename, 'rb') as f: for line in f: splittage = line.split(':') data[splittage[0]] = splittage[1].strip() return data def get_system_mem_usage(): """Return the information in /proc/meminfo as a dictionary.""" return parse_mem_file('/proc/meminfo') def get_process_mem_usage(): re_pid = re.compile(r'^\d+$') re_mem = re.compile(r'^(\d+) .+$') pid2usage = {} for pid in [d for d in os.listdir('/proc') if re_pid.match(d)]: fpath = os.path.join('/proc', pid, 'status') try: data = parse_mem_file(fpath) except IOError: continue try: pid2usage[pid] = int(re_mem.match(data['VmHWM']).group(1)) / 1024. except KeyError: continue return OrderedDict( sorted(pid2usage.iteritems(), key=lambda x: x[1], reverse=True)) pid2usage = get_process_mem_usage() total_usage = sum(pid2usage.values()) print('Total memory usage: {:.2f}'.format(total_usage)) for pid, usage in pid2usage.iteritems(): print('{}: {:.2f} MB'.format(pid, usage))
Python
0.000002
57b707b7f7e7076f8c1f84e57ba3a3db45135340
Fix compilations for macos mountain lion
binding.gyp
binding.gyp
{ "targets": [ { "target_name": "protobuf_for_node", "include_dirs": ["protobuf/src"], "dependencies": ["protobuf/protobuf.gyp:protobuf_full_do_not_use"], "sources": [ "protobuf_for_node.cc", "addon.cc" ], 'conditions': [ [ 'OS =="mac"',{ 'xcode_settings':{ 'OTHER_CFLAGS' : [ '-mmacosx-version-min=10.7' ] } } ] ] } ] }
{ "targets": [ { "target_name": "protobuf_for_node", "include_dirs": ["protobuf/src"], "dependencies": ["protobuf/protobuf.gyp:protobuf_full_do_not_use"], "sources": [ "protobuf_for_node.cc", "addon.cc" ] } ] }
Python
0.000006
644fbef7030f0685be7dd056606ab23daaefdc72
Fix typo in error message variable
app/gitlab.py
app/gitlab.py
from __future__ import absolute_import from __future__ import unicode_literals from .webhooks import WebHook from werkzeug.exceptions import BadRequest, NotImplemented EVENTS = { 'Push Hook': 'push', 'Tag Push Hook': 'tag_push', 'Issue Hook': 'issue', 'Note Hook': 'note', 'Merge Request Hook': 'merge_request' } class GitlabWebHook(WebHook): def event(self, request): gitlab_header = request.headers.get('X-Gitlab-Event', None) if not gitlab_header: raise BadRequest('Gitlab requests must provide a X-Gitlab-Event header') event = EVENTS.get(gitlab_header, None) if not event: raise NotImplemented('Header not understood %s' % gitlab_header) if event == 'note': if 'commit' in request.json: event = 'commit_comment' elif 'merge_request' in request.json: event = 'merge_request_comment' elif 'issue' in request.json: event = 'issue_comment' elif 'snippet' in request.json: event = 'snippet_comment' return event
from __future__ import absolute_import from __future__ import unicode_literals from .webhooks import WebHook from werkzeug.exceptions import BadRequest, NotImplemented EVENTS = { 'Push Hook': 'push', 'Tag Push Hook': 'tag_push', 'Issue Hook': 'issue', 'Note Hook': 'note', 'Merge Request Hook': 'merge_request' } class GitlabWebHook(WebHook): def event(self, request): gitlab_header = request.headers.get('X-Gitlab-Event', None) if not gitlab_header: raise BadRequest('Gitlab requests must provide a X-Gitlab-Event header') event = EVENTS.get(gitlab_header, None) if not event: raise NotImplemented('Header not understood %s' % githab_header) if event == 'note': if 'commit' in request.json: event = 'commit_comment' elif 'merge_request' in request.json: event = 'merge_request_comment' elif 'issue' in request.json: event = 'issue_comment' elif 'snippet' in request.json: event = 'snippet_comment' return event
Python
0.000178
893b9947ef8d884ff67c84a60ea2c251b408a6d0
update build_db.py script
build_db.py
build_db.py
import json import os import sqlite3 WEEKDAYS = 0x1 SATURDAY = 0x2 SUNDAY = 0x3 def setup(conn): cursor = conn.cursor() cursor.execute( ''' CREATE TABLE IF NOT EXISTS visit ( stop_num text, visit_day_type integer, route_num integer, hour integer, minute integer ) ''' ) cursor.execute('CREATE INDEX visit_stop_num_idx ON visit (stop_num);') cursor.execute( 'CREATE TABLE "android_metadata" ("locale" TEXT DEFAULT "en_US");' ) cursor.execute( 'INSERT INTO "android_metadata" VALUES ("en_US")' ) def dump_data(data, conn): cursor = conn.cursor() for stop_num, day_types in data.items(): types = zip([WEEKDAYS, SATURDAY, SUNDAY], day_types) for day_type_num, day_type in types: for visit in day_type: hour, minute = map(int, visit[1].split(':')) cursor.execute( 'INSERT INTO visit VALUES (?, ?, ?, ?, ?)', ( str(stop_num), day_type_num, visit[0], hour, minute ) ) def main(): db = 'assets/transperthcache.db' if os.path.exists(db): os.unlink(db) conn = sqlite3.connect(db) setup(conn) with open('transperthcache.json') as fh: dump_data( json.load(fh), conn ) conn.commit() conn.close() if __name__ == '__main__': main()
import json import os import sqlite3 WEEKDAYS = 0x1 SATURDAY = 0x2 SUNDAY = 0x3 def setup(conn): cursor = conn.cursor() cursor.execute( ''' CREATE TABLE IF NOT EXISTS visit ( stop_num text, visit_day_type integer, route_num integer, hour integer, minute integer ) ''' ) cursor.execute('CREATE INDEX visit_stop_num_idx ON visit (stop_num);') def dump_data(data, conn): cursor = conn.cursor() for stop_num, day_types in data.items(): types = zip([WEEKDAYS, SATURDAY, SUNDAY], day_types) for day_type_num, day_type in types: for visit in day_type: hour, minute = map(int, visit[1].split(':')) cursor.execute( 'INSERT INTO visit VALUES (?, ?, ?, ?, ?)', ( str(stop_num), day_type_num, visit[0], hour, minute ) ) def main(): db = 'Assets/transperthcache.db' if os.path.exists(db): os.unlink(db) conn = sqlite3.connect(db) setup(conn) with open('transperthcache.json') as fh: dump_data( json.load(fh), conn ) conn.commit() if __name__ == '__main__': main()
Python
0.000001
7f01aa6deaa9a13ca388fb4c84849bce53d34d5f
Make sure C++11 is used under Mac OS
binding.gyp
binding.gyp
{ "targets": [{ "target_name": "mmap-io", "sources": [ "src/mmap-io.cc" ], "include_dirs": [ "<!(node -e \"require('nan')\")" ], "cflags_cc": [ "-std=c++11" ], "conditions": [ [ 'OS=="mac"', { "xcode_settings": { 'OTHER_CPLUSPLUSFLAGS' : ['-std=c++11','-stdlib=libc++'], 'OTHER_LDFLAGS': ['-stdlib=libc++'], 'MACOSX_DEPLOYMENT_TARGET': '10.7' }} ] ] }] }
{ "targets": [{ "target_name": "mmap-io", "sources": [ "src/mmap-io.cc" ], "include_dirs": [ "<!(node -e \"require('nan')\")" ], "cflags_cc": [ "-std=c++11" ] }] }
Python
0
6b6948b4dcf7400eefcfb2a499c0180d03052550
Remove unnecessary string formatting
sympy/matrices/expressions/dotproduct.py
sympy/matrices/expressions/dotproduct.py
from __future__ import print_function, division from sympy.core import Basic from sympy.core.sympify import _sympify from sympy.matrices.expressions.transpose import transpose from sympy.matrices.expressions.matexpr import MatrixExpr class DotProduct(MatrixExpr): """ Dot Product of vector matrices """ def __new__(cls, arg1, arg2): arg1, arg2 = _sympify((arg1, arg2)) if not arg1.is_Matrix: raise TypeError("Argument 1 of DotProduct is not a matrix") if not arg2.is_Matrix: raise TypeError("Argument 2 of DotProduct is not a matrix") if not (1 in arg1.shape): raise TypeError("Argument 1 of DotProduct is not a vector") if not (1 in arg2.shape): raise TypeError("Argument 2 of DotProduct is not a vector") if arg1.shape != arg2.shape: raise TypeError("Input to Dot Product, %s and %s, are not of same dimensions" % (str(arg1), str(arg2))) return Basic.__new__(cls, arg1, arg2) def doit(self, expand=False): try: if self.args[0].shape[0] == 1: return (self.args[0]*transpose(self.args[1])).doit()[0] else: return (transpose(self.args[0])*self.args[1]).doit()[0] except (AttributeError, NotImplementedError): return self
from __future__ import print_function, division from sympy.core import Basic from sympy.core.sympify import _sympify from sympy.matrices.expressions.transpose import transpose from sympy.matrices.expressions.matexpr import MatrixExpr class DotProduct(MatrixExpr): """ Dot Product of vector matrices """ def __new__(cls, arg1, arg2): arg1, arg2 = _sympify((arg1, arg2)) if not arg1.is_Matrix: raise TypeError("Argument 1 of DotProduct is not a matrix" % str(arg1)) if not arg2.is_Matrix: raise TypeError("Argument 2 of DotProduct is not a matrix" % str(arg2)) if not (1 in arg1.shape): raise TypeError("Argument 1 of DotProduct is not a vector" % str(arg1)) if not (1 in arg2.shape): raise TypeError("Argument 2 of DotProduct is not a vector" % str(arg1)) if arg1.shape != arg2.shape: raise TypeError("Input to Dot Product, %s and %s, are not of same dimensions" % (str(arg1), str(arg2))) return Basic.__new__(cls, arg1, arg2) def doit(self, expand=False): try: if self.args[0].shape[0] == 1: return (self.args[0]*transpose(self.args[1])).doit()[0] else: return (transpose(self.args[0])*self.args[1]).doit()[0] except (AttributeError, NotImplementedError): return self
Python
0.005099
a6dff532d75d0a63c59db0cbf800587845d587a1
add compiler flag
binding.gyp
binding.gyp
{ "targets": [ { "target_name": "addon", "sources": [ "src/addon.cc", "src/object.cc", "src/async.cc", "src/engine.cc", "src/results.cc" ], "cflags": [ "-O2", "-Wendif-labels", "-Werror", "-Wpedantic", "-Wunused-parameter", "-finline-functions", "-funswitch-loops", "-fpredictive-commoning", "-fgcse-after-reload", "-ftree-vectorize", "-fvect-cost-model", "-ftree-partial-pre", "-fipa-cp-clone" ], "cflags!": [ "-Wno-unused-parameter", "-O3" ], "cflags_cc": [ "-std=gnu++11" ], "cflags_cc!": [ "-fno-exceptions", "-fno-rtti", "-std=gnu++1y", "-std=gnu++0x" ], "include_dirs": ["<!@(node -p \"require('node-addon-api').include\")"], "dependencies": ["<!(node -p \"require('node-addon-api').gyp\")"], "defines": [ "NAPI_CPP_EXCEPTIONS", "NODE_ADDON_API_DISABLE_DEPRECATED" ], "conditions": [ ["OS==\"win\"", { "msvs_settings": { "VCCLCompilerTool": { "ExceptionHandling": 1 } } }], ["OS==\"mac\"", { "xcode_settings": { "CLANG_CXX_LANGUAGE_STANDARD": 'c++11', "CLANG_CXX_LIBRARY": "libc++", "GCC_ENABLE_CPP_EXCEPTIONS": "YES", "MACOSX_DEPLOYMENT_TARGET": "10.7", "GCC_ENABLE_CPP_RTTI" : "YES", # "fast", "3", "2", "1", "0", "", "s" "GCC_OPTIMIZATION_LEVEL": "2", # only passed to C files "OTHER_CFLAGS" : [], # remove defaults passed to C files "OTHER_CFLAGS!": [ "-fno-strict-aliasing" ], # only passed to C++ files "OTHER_CPLUSPLUSFLAGS": [], # remove defaults passed to C++ files "OTHER_CPLUSPLUSFLAGS!": [], # passed to both C/C++ files "WARNING_CFLAGS": [ "-Werror", "-Wextra", "-Wpedantic", "-Wunused-parameter" ], # remove defaults passed to both C/C++ files "WARNING_CFLAGS!": [ "-Wno-unused-parameter", "-W" ] } }] ] } ] } # several options for flags, cflags_cc(appends), cflags_cc+(prepends), cflags_cc!(negates) # if -O3 and -O2 are both passed, whichever is passed last wins the race # see https://github.com/nodejs/node/blob/master/common.gypi
{ "targets": [ { "target_name": "addon", "sources": [ "src/addon.cc", "src/object.cc", "src/async.cc", "src/engine.cc", "src/results.cc" ], "cflags": [ "-O2", "-Wendif-labels", "-Werror", "-Wpedantic", "-Wunused-parameter", "-finline-functions", "-funswitch-loops", "-fpredictive-commoning", "-fgcse-after-reload", "-ftree-vectorize", "-fvect-cost-model", "-ftree-partial-pre" ], "cflags!": [ "-Wno-unused-parameter", "-O3" ], "cflags_cc": [ "-std=gnu++11" ], "cflags_cc!": [ "-fno-exceptions", "-fno-rtti", "-std=gnu++1y", "-std=gnu++0x" ], "include_dirs": ["<!@(node -p \"require('node-addon-api').include\")"], "dependencies": ["<!(node -p \"require('node-addon-api').gyp\")"], "defines": [ "NAPI_CPP_EXCEPTIONS", "NODE_ADDON_API_DISABLE_DEPRECATED" ], "conditions": [ ["OS==\"win\"", { "msvs_settings": { "VCCLCompilerTool": { "ExceptionHandling": 1 } } }], ["OS==\"mac\"", { "xcode_settings": { "CLANG_CXX_LANGUAGE_STANDARD": 'c++11', "CLANG_CXX_LIBRARY": "libc++", "GCC_ENABLE_CPP_EXCEPTIONS": "YES", "MACOSX_DEPLOYMENT_TARGET": "10.7", "GCC_ENABLE_CPP_RTTI" : "YES", # "fast", "3", "2", "1", "0", "", "s" "GCC_OPTIMIZATION_LEVEL": "2", # only passed to C files "OTHER_CFLAGS" : [], # remove defaults passed to C files "OTHER_CFLAGS!": [ "-fno-strict-aliasing" ], # only passed to C++ files "OTHER_CPLUSPLUSFLAGS": [], # remove defaults passed to C++ files "OTHER_CPLUSPLUSFLAGS!": [], # passed to both C/C++ files "WARNING_CFLAGS": [ "-Werror", "-Wextra", "-Wpedantic", "-Wunused-parameter" ], # remove defaults passed to both C/C++ files "WARNING_CFLAGS!": [ "-Wno-unused-parameter", "-W" ] } }] ] } ] } # several options for flags, cflags_cc(appends), cflags_cc+(prepends), cflags_cc!(negates) # if -O3 and -O2 are both passed, whichever is passed last wins the race # see https://github.com/nodejs/node/blob/master/common.gypi
Python
0.000002
5e2ef9885a65d61edcdffaef9e4f8a960bef567e
Refactor CAS tests.
fridge/test/test_cas.py
fridge/test/test_cas.py
import pytest from fridge.cas import ContentAddressableStorage from fridge.fstest import ( assert_file_content_equal, assert_open_raises, write_file) from fridge.memoryfs import MemoryFS @pytest.fixture def fs(): return MemoryFS() @pytest.fixture def cas(fs): return ContentAddressableStorage('cas', fs) class TestContentAddressableStorage(object): def has_root_property(self, fs): cas = ContentAddressableStorage(root='cas_root', fs=fs) assert cas.root == 'cas_root' def test_allows_to_store_and_retrieve_files(self, fs): write_file(fs, 'testfile', u'dummy content') cas = ContentAddressableStorage('cas', fs=fs) key = cas.store('testfile') # Close and reopen del cas cas = ContentAddressableStorage('cas', fs=fs) with fs.open(cas.get_path(key), 'r') as f: content = f.read() assert content == u'dummy content' def test_file_removed_after_store(self, fs, cas): with fs.open('testfile', 'w') as f: f.write(u'dummy content') cas.store('testfile') assert not fs.exists('testfile') def test_writing_original_files_keeps_stored_file_unchanged(self, fs): write_file(fs, 'testfile', u'dummy content') cas = ContentAddressableStorage('cas', fs=fs) key = cas.store('testfile') del cas # Close write_file(fs, 'testfile', u'replaced content') cas = ContentAddressableStorage('cas', fs=fs) assert_file_content_equal(fs, cas.get_path(key), u'dummy content')
import pytest from fridge.cas import ContentAddressableStorage from fridge.memoryfs import MemoryFS class TestContentAddressableStorage(object): def create_cas(self, fs=None, path='cas'): if fs is None: fs = MemoryFS() return ContentAddressableStorage(path, fs) def has_root_property(self): cas = self.create_cas(path='cas_root') assert cas.root == 'cas_root' def test_allows_to_store_and_retrieve_files(self): fs = MemoryFS() cas = self.create_cas(fs) with fs.open('testfile', 'w') as f: f.write(u'dummy content') key = cas.store('testfile') # Close and reopen del cas cas = self.create_cas(fs) with fs.open(cas.get_path(key), 'r') as f: content = f.read() assert content == u'dummy content' def test_file_removed_after_store(self): fs = MemoryFS() cas = self.create_cas(fs) with fs.open('testfile', 'w') as f: f.write(u'dummy content') cas.store('testfile') assert not fs.exists('testfile') def test_writing_original_files_keeps_stored_file_unchanged(self): fs = MemoryFS() cas = self.create_cas(fs) with fs.open('testfile', 'w') as f: f.write(u'dummy content') key = cas.store('testfile') del cas # Close with fs.open('testfile', 'w') as f: f.write(u'replaced content') cas = self.create_cas(fs) with fs.open(cas.get_path(key), 'r') as f: content = f.read() assert content == u'dummy content' def test_stores_blobs_write_protected(self): fs = MemoryFS() cas = self.create_cas(fs) with fs.open('testfile', 'w') as f: f.write(u'dummy content') key = cas.store('testfile') with pytest.raises(OSError): with fs.open(cas.get_path(key), 'w'): pass
Python
0
167101baa4d57d22bc6a40d7ff8afd3688e23580
fix ControlText focusout bug
pyforms/gui/Controls/ControlText.py
pyforms/gui/Controls/ControlText.py
#!/usr/bin/python # -*- coding: utf-8 -*- ''' @author: Ricardo Ribeiro @credits: Ricardo Ribeiro @license: MIT @version: 0.0 @maintainer: Ricardo Ribeiro @email: ricardojvr@gmail.com @status: Development @lastEditedBy: Carlos Mão de Ferro (carlos.maodeferro@neuro.fchampalimaud.org) ''' from pyforms.gui.Controls.ControlBase import ControlBase import pyforms.Utils.tools as tools from PyQt4 import uic class ControlText(ControlBase): def initForm(self): control_path = tools.getFileInSameDirectory(__file__, "textInput.ui") self._form = uic.loadUi(control_path) self.form.label.setText(self._label) self.form.lineEdit.setText(self._value) self.form.setToolTip(self.help) super(ControlText, self).initForm() self.form.lineEdit.editingFinished.connect(self.finishEditing) def finishEditing(self): """Function called when the lineEdit widget is edited""" self.changed() ################################################################### ############ Properties ########################################### ################################################################### @property def value(self): self._value = str(self._form.lineEdit.text()) return self._value @value.setter def value(self, value): self._form.lineEdit.setText(value) ControlBase.value.fset(self, value) @property def label(self): return self.form.label.text() @label.setter def label(self, value): self.form.label.setText(value) ControlBase.label.fset(self, value)
#!/usr/bin/python # -*- coding: utf-8 -*- ''' @author: Ricardo Ribeiro @credits: Ricardo Ribeiro @license: MIT @version: 0.0 @maintainer: Ricardo Ribeiro @email: ricardojvr@gmail.com @status: Development @lastEditedBy: Carlos Mão de Ferro (carlos.maodeferro@neuro.fchampalimaud.org) ''' from pyforms.gui.Controls.ControlBase import ControlBase import pyforms.Utils.tools as tools from PyQt4 import uic class ControlText(ControlBase): def initForm(self): control_path = tools.getFileInSameDirectory(__file__, "textInput.ui") self._form = uic.loadUi(control_path) self.form.label.setText(self._label) self.form.lineEdit.setText(self._value) self.form.setToolTip(self.help) super(ControlText, self).initForm() self.form.lineEdit.editingFinished.connect(self.finishEditing) def finishEditing(self): """Function called when the lineEdit widget is edited""" self.changed() self.form.lineEdit.focusNextChild() ################################################################### ############ Properties ########################################### ################################################################### @property def value(self): self._value = str(self._form.lineEdit.text()) return self._value @value.setter def value(self, value): self._form.lineEdit.setText(value) ControlBase.value.fset(self, value) @property def label(self): return self.form.label.text() @label.setter def label(self, value): self.form.label.setText(value) ControlBase.label.fset(self, value)
Python
0
22cf663731bc556ef625695ab3213e87432ed4f9
fix docs link
pyvex/__init__.py
pyvex/__init__.py
""" PyVEX provides an interface that translates binary code into the VEX intermediate represenation (IR). For an introduction to VEX, take a look here: https://docs.angr.io/advanced-topics/ir """ __version__ = (8, 19, 4, 5) if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") import os import sys import cffi import pkg_resources from .vex_ffi import ffi_str as _ffi_str ffi = cffi.FFI() import logging logging.getLogger("pyvex").addHandler(logging.NullHandler()) def _find_c_lib(): # Load the c library for calling into VEX if sys.platform in ('win32', 'cygwin'): library_file = 'pyvex.dll' elif sys.platform == 'darwin': library_file = "libpyvex.dylib" else: library_file = "libpyvex.so" pyvex_path = pkg_resources.resource_filename(__name__, os.path.join('lib', library_file)) ffi.cdef(_ffi_str) # RTLD_GLOBAL used for sim_unicorn.so lib = ffi.dlopen(pyvex_path) lib.vex_init() # this looks up all the definitions (wtf) dir(lib) return lib pvc = _find_c_lib() # pylint: disable=wildcard-import from .enums import * from . import stmt, expr, const from .block import IRSB, IRTypeEnv from .expr import get_op_retty from .const import tag_to_const_class, get_type_size, get_type_spec_size from .lifting import lift, lifters from .errors import PyVEXError # aliases.... IRStmt = stmt IRExpr = expr IRConst = const
""" PyVEX provides an interface that translates binary code into the VEX intermediate represenation (IR). For an introduction to VEX, take a look here: https://docs.angr.io/docs/ir.html """ __version__ = (8, 19, 4, 5) if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") import os import sys import cffi import pkg_resources from .vex_ffi import ffi_str as _ffi_str ffi = cffi.FFI() import logging logging.getLogger("pyvex").addHandler(logging.NullHandler()) def _find_c_lib(): # Load the c library for calling into VEX if sys.platform in ('win32', 'cygwin'): library_file = 'pyvex.dll' elif sys.platform == 'darwin': library_file = "libpyvex.dylib" else: library_file = "libpyvex.so" pyvex_path = pkg_resources.resource_filename(__name__, os.path.join('lib', library_file)) ffi.cdef(_ffi_str) # RTLD_GLOBAL used for sim_unicorn.so lib = ffi.dlopen(pyvex_path) lib.vex_init() # this looks up all the definitions (wtf) dir(lib) return lib pvc = _find_c_lib() # pylint: disable=wildcard-import from .enums import * from . import stmt, expr, const from .block import IRSB, IRTypeEnv from .expr import get_op_retty from .const import tag_to_const_class, get_type_size, get_type_spec_size from .lifting import lift, lifters from .errors import PyVEXError # aliases.... IRStmt = stmt IRExpr = expr IRConst = const
Python
0
d9a034e74bf03a5a9837201d2e358d51e759f112
add dc_aware_policy
binding.gyp
binding.gyp
{ "targets": [ { "target_name": "cassandra-native", "sources": [ "cpp-driver/src/address.cpp", "cpp-driver/src/auth.cpp", "cpp-driver/src/auth_requests.cpp", "cpp-driver/src/auth_responses.cpp", "cpp-driver/src/batch_request.cpp", "cpp-driver/src/buffer.cpp", "cpp-driver/src/buffer_collection.cpp", "cpp-driver/src/cluster.cpp", "cpp-driver/src/cluster_metadata.cpp", "cpp-driver/src/collection_iterator.cpp", "cpp-driver/src/common.cpp", "cpp-driver/src/connection.cpp", "cpp-driver/src/control_connection.cpp", "cpp-driver/src/dc_aware_policy.cpp", "cpp-driver/src/error_response.cpp", "cpp-driver/src/event_response.cpp", "cpp-driver/src/execute_request.cpp", "cpp-driver/src/future.cpp", "cpp-driver/src/get_time.cpp", "cpp-driver/src/handler.cpp", "cpp-driver/src/io_worker.cpp", "cpp-driver/src/iterator.cpp", "cpp-driver/src/logger.cpp", "cpp-driver/src/map_iterator.cpp", "cpp-driver/src/md5.cpp", "cpp-driver/src/multiple_request_handler.cpp", "cpp-driver/src/murmur3.cpp", "cpp-driver/src/pool.cpp", "cpp-driver/src/prepare_handler.cpp", "cpp-driver/src/prepare_request.cpp", "cpp-driver/src/prepared.cpp", "cpp-driver/src/query_request.cpp", "cpp-driver/src/register_request.cpp", "cpp-driver/src/replication_strategy.cpp", "cpp-driver/src/request_handler.cpp", "cpp-driver/src/response.cpp", "cpp-driver/src/result_metadata.cpp", "cpp-driver/src/result_response.cpp", "cpp-driver/src/ring_buffer.cpp", "cpp-driver/src/row.cpp", "cpp-driver/src/schema_change_handler.cpp", "cpp-driver/src/schema_metadata.cpp", "cpp-driver/src/session.cpp", "cpp-driver/src/set_keyspace_handler.cpp", "cpp-driver/src/ssl.cpp", "cpp-driver/src/startup_request.cpp", "cpp-driver/src/statement.cpp", "cpp-driver/src/supported_response.cpp", "cpp-driver/src/testing.cpp", "cpp-driver/src/token_aware_policy.cpp", "cpp-driver/src/token_map.cpp", "cpp-driver/src/type_parser.cpp", "cpp-driver/src/types.cpp", "cpp-driver/src/uuids.cpp", "cpp-driver/src/value.cpp" ], "include_dirs": [ "<!(node -e \"require('nan')\")", "cpp-driver/include", "cpp-driver/src/third_party/boost", "cpp-driver/src/third_party/rapidjson" ], "defines": [ "DISABLE_UUID_GENERATION" ] } ] }
{ "targets": [ { "target_name": "cassandra-native", "sources": [ "cpp-driver/src/address.cpp", "cpp-driver/src/auth.cpp", "cpp-driver/src/auth_requests.cpp", "cpp-driver/src/auth_responses.cpp", "cpp-driver/src/batch_request.cpp", "cpp-driver/src/buffer.cpp", "cpp-driver/src/buffer_collection.cpp", "cpp-driver/src/cluster.cpp", "cpp-driver/src/cluster_metadata.cpp", "cpp-driver/src/collection_iterator.cpp", "cpp-driver/src/common.cpp", "cpp-driver/src/connection.cpp", "cpp-driver/src/control_connection.cpp", "cpp-driver/src/error_response.cpp", "cpp-driver/src/event_response.cpp", "cpp-driver/src/execute_request.cpp", "cpp-driver/src/future.cpp", "cpp-driver/src/get_time.cpp", "cpp-driver/src/handler.cpp", "cpp-driver/src/io_worker.cpp", "cpp-driver/src/iterator.cpp", "cpp-driver/src/logger.cpp", "cpp-driver/src/map_iterator.cpp", "cpp-driver/src/md5.cpp", "cpp-driver/src/multiple_request_handler.cpp", "cpp-driver/src/murmur3.cpp", "cpp-driver/src/pool.cpp", "cpp-driver/src/prepare_handler.cpp", "cpp-driver/src/prepare_request.cpp", "cpp-driver/src/prepared.cpp", "cpp-driver/src/query_request.cpp", "cpp-driver/src/register_request.cpp", "cpp-driver/src/replication_strategy.cpp", "cpp-driver/src/request_handler.cpp", "cpp-driver/src/response.cpp", "cpp-driver/src/result_metadata.cpp", "cpp-driver/src/result_response.cpp", "cpp-driver/src/ring_buffer.cpp", "cpp-driver/src/row.cpp", "cpp-driver/src/schema_change_handler.cpp", "cpp-driver/src/schema_metadata.cpp", "cpp-driver/src/session.cpp", "cpp-driver/src/set_keyspace_handler.cpp", "cpp-driver/src/ssl.cpp", "cpp-driver/src/startup_request.cpp", "cpp-driver/src/statement.cpp", "cpp-driver/src/supported_response.cpp", "cpp-driver/src/testing.cpp", "cpp-driver/src/token_aware_policy.cpp", "cpp-driver/src/token_map.cpp", "cpp-driver/src/type_parser.cpp", "cpp-driver/src/types.cpp", "cpp-driver/src/uuids.cpp", "cpp-driver/src/value.cpp" ], "include_dirs": [ "<!(node -e \"require('nan')\")", "cpp-driver/include", "cpp-driver/src/third_party/boost", "cpp-driver/src/third_party/rapidjson" ], "defines": [ "DISABLE_UUID_GENERATION" ] } ] }
Python
0.000039
73b66a32763b7efe36612db7f3a3b4566d8e44a2
set uid=197610(OIdiot) gid=197610 groups=197610 as primary_key instead of
app/models.py
app/models.py
from django.db import models # Create your models here. class Person(models.Model): id = models.AutoField(verbose_name = '索引', primary_key = True, unique = True) student_number = models.CharField(verbose_name = '学号', max_length = 12, unique = True) name = models.CharField(verbose_name = '姓名', max_length = 10) pinyin = models.CharField(verbose_name = '拼音', max_length = 25) gender = models.CharField(verbose_name = '性别', choices = (('F', 'Female'), ('M', 'Male')), max_length = 2) native_province = models.CharField(verbose_name = '籍贯', max_length = 10, blank = True) dormitory = models.CharField(verbose_name = '寝室', blank = True, max_length = 7) birthday = models.DateField(verbose_name = '生日', blank = True) phone_number = models.CharField(verbose_name = '手机号码', max_length = 11, blank = True) position = models.CharField(verbose_name = '职务', max_length = 20, blank = True) participation = models.PositiveSmallIntegerField(verbose_name = '活动参与分', default = 0) def __unicode__(self): return self.name def __str__(self): return self.name class Activity(models.Model): id = models.AutoField(verbose_name = '索引', primary_key = True, unique = True) name = models.CharField(verbose_name = '活动名称', max_length = 15) date = models.DateField(verbose_name = '日期', blank = True) time = models.TimeField(verbose_name = '开始时间', blank = True) place = models.CharField(verbose_name = '地点', max_length = 15, blank = True) content = models.TextField(verbose_name = '内容', blank = True) participation = models.SmallIntegerField(verbose_name = '参与得分', default = 0) participator = models.TextField(verbose_name = '参与者学号', blank = True) images = models.TextField(verbose_name = '相关图片urls', blank = True) def __unicode__(self): return self.name def __str__(self): return self.name
from django.db import models # Create your models here. class Person(models.Model): student_number = models.CharField(verbose_name = '学号', max_length = 12, unique = True, primary_key = True) name = models.CharField(verbose_name = '姓名', max_length = 10) pinyin = models.CharField(verbose_name = '拼音', max_length = 25) gender = models.CharField(verbose_name = '性别', choices = (('F', 'Female'), ('M', 'Male')), max_length = 2) native_province = models.CharField(verbose_name = '籍贯', max_length = 10, blank = True) dormitory = models.CharField(verbose_name = '寝室', blank = True, max_length = 7) birthday = models.DateField(verbose_name = '生日', blank = True) phone_number = models.CharField(verbose_name = '手机号码', max_length = 11, blank = True) position = models.CharField(verbose_name = '职务', max_length = 20, blank = True) participation = models.PositiveSmallIntegerField(verbose_name = '活动参与分', default = 0) def __unicode__(self): return self.name def __str__(self): return self.name class Activity(models.Model): id = models.AutoField(verbose_name = '索引', primary_key = True, unique = True) name = models.CharField(verbose_name = '活动名称', max_length = 15) date = models.DateField(verbose_name = '日期', blank = True) time = models.TimeField(verbose_name = '开始时间', blank = True) place = models.CharField(verbose_name = '地点', max_length = 15, blank = True) content = models.TextField(verbose_name = '内容', blank = True) participation = models.SmallIntegerField(verbose_name = '参与得分', default = 0) participator = models.TextField(verbose_name = '参与者学号', blank = True) images = models.TextField(verbose_name = '相关图片urls', blank = True) def __unicode__(self): return self.name def __str__(self): return self.name
Python
0.000001
68170652d104873ea4fa210daaedb05ba9bf3b44
Wrong syntax
config/gunicorn_conf.py
config/gunicorn_conf.py
import os import psutil import math GIGS_OF_MEMORY = psutil.virtual_memory().total/1024/1024/1024. NUM_CPUS = psutil.cpu_count() bind = "0.0.0.0:8000" pidfile = "/srv/newsblur/logs/gunicorn.pid" logfile = "/srv/newsblur/logs/production.log" accesslog = "/srv/newsblur/logs/production.log" errorlog = "/srv/newsblur/logs/errors.log" loglevel = "info" name = "newsblur" timeout = 120 max_requests = 1000 x_forwarded_for_header = "X-FORWARDED-FOR" forwarded_allow_ips = "*" limit_request_line = 16000 limit_request_fields = 1000 worker_tmp_dir = "/dev/shm" if GIGS_OF_MEMORY > NUM_CPUS: workers = NUM_CPUS else: workers = int(NUM_CPUS / 2) if workers <= 4: workers = max(int(math.floor(GIGS_OF_MEMORY * 1000 / 512)), 4) if workers > 8: workers = 8 if os.environ.get('DOCKERBUILD', False): workers = 1
import os import psutil import math GIGS_OF_MEMORY = psutil.virtual_memory().total/1024/1024/1024. NUM_CPUS = psutil.cpu_count() bind = "0.0.0.0:8000" pidfile = "/srv/newsblur/logs/gunicorn.pid" logfile = "/srv/newsblur/logs/production.log" accesslog = "/srv/newsblur/logs/production.log" errorlog = "/srv/newsblur/logs/errors.log" loglevel = "info" name = "newsblur" timeout = 120 max_requests = 1000 x_forwarded_for_header = "X-FORWARDED-FOR" forwarded_allow_ips = "*" limit_request_line = 16000 limit_request_fields = 1000 worker_tmp_dir = /dev/shm if GIGS_OF_MEMORY > NUM_CPUS: workers = NUM_CPUS else: workers = int(NUM_CPUS / 2) if workers <= 4: workers = max(int(math.floor(GIGS_OF_MEMORY * 1000 / 512)), 4) if workers > 8: workers = 8 if os.environ.get('DOCKERBUILD', False): workers = 1
Python
0.930983
c908db488f3e1d7aab0993780b38baaf4c995eb1
add docstrings
Lib/fontelemetry/datastructures/source.py
Lib/fontelemetry/datastructures/source.py
# Copyright 2019 Fontelemetry Authors and Contributors # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Library version from fontelemetry import __version__ # ----------------------- # Base classes # ----------------------- class Source(object): """A source specification specific object that holds source data. The Source object is instantiated with an external library object that is instantiated on source read and used to manipulate source file data along with object attributes that maintain the original source file path and define a retrievable calling code defined unique ID field. Attributes: obj: (instance-specific) A source file object that is instantiated with an external library path: (string) source file or directory path id: (string) unique ID for an instantiated Source object For glyphs source, the object is a glyphsLib.GSFont object. For UFO source, the object is a fontTools.ufoLib.glifLib.GlyphSet object """ def __init__(self, source_object, path=None, source_id=None): """Inits Source object with source file read data from external libraries. Args: source_object: (instance-specific) A source file object that is instantiated with an external library path: (string) path to file or directory used to instantiate source_object source_id: (string) unique ID value for this object """ self.obj = source_object self.path = path self.id = source_id def __repr__(self): return "({} v{} is defined as: {})".format(self.__class__, __version__, self.__dict__) def __str__(self): return "{}".format(self.__dict__) def get_source_path(self): """Returns source path attribute string.""" return self.path def yield_ordered_glyphobj(self): """Generator that yields ordered external library glyph-level objects""" raise NotImplementedError # ------------------------------------ # Inherited classes # ------------------------------------ class GlyphsSource(Source): """See base class.""" def __init__(self, source_object, path=None, source_id=None): Source.__init__(self, source_object, path=path, source_id=source_id) def yield_ordered_glyphobj(self): for glyph in self.obj.glyphs: yield glyph class UFOSource(Source): """See base class.""" def __init__(self, source_object, path=None, source_id=None): Source.__init__(self, source_object, path=path, source_id=source_id) def yield_ordered_glyphobj(self): # TODO pass
# Copyright 2019 Fontelemetry Authors and Contributors # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Library version from fontelemetry import __version__ # ----------------------- # Base classes # ----------------------- class Source(object): def __init__(self, source_object, path=None, source_id=None): self.obj = source_object self.path = path self.id = source_id def __repr__(self): return "({} v{} is defined as: {})".format(self.__class__, __version__, self.__dict__) def __str__(self): return "{}".format(self.__dict__) def get_source_path(self): return self.path def yield_ordered_glyphobj(self): raise NotImplementedError # ------------------------------------ # Inherited classes # ------------------------------------ class GlyphsSource(Source): def __init__(self, source_object, path=None, source_id=None): Source.__init__(self, source_object, path=path, source_id=source_id) def yield_ordered_glyphobj(self): for glyph in self.obj.glyphs: yield glyph class UFOSource(Source): def __init__(self, source_object, path=None, source_id=None): Source.__init__(self, source_object, path=path, source_id=source_id) def yield_ordered_glyphobj(self): # TODO pass
Python
0
0e913b3fc20e69a6ff77bafcc144e00175f8ed83
Put new classes to submodule level import
indra/assemblers/english/__init__.py
indra/assemblers/english/__init__.py
from .assembler import EnglishAssembler, AgentWithCoordinates, SentenceBuilder
from .assembler import EnglishAssembler
Python
0.000001
6566ef14ff19640c238ba935ff21643d554b4654
Fix breakage when celery is running
indico/core/celery/__init__.py
indico/core/celery/__init__.py
# This file is part of Indico. # Copyright (C) 2002 - 2018 European Organization for Nuclear Research (CERN). # # Indico is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # Indico is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from datetime import timedelta from celery.schedules import crontab from celery.signals import beat_init, import_modules from flask import session import indico from indico.core import signals from indico.core.celery.core import IndicoCelery from indico.core.config import config from indico.core.db import db from indico.core.settings import SettingsProxy from indico.core.settings.converters import DatetimeConverter from indico.util.date_time import now_utc from indico.util.i18n import _ from indico.web.flask.templating import template_hook from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem __all__ = ('celery',) #: The Celery instance for all Indico tasks celery = IndicoCelery('indico') celery_settings = SettingsProxy('celery', { 'last_ping': None, 'last_ping_version': None }, converters={ 'last_ping': DatetimeConverter }) @signals.app_created.connect def _load_default_modules(app, **kwargs): celery.loader.import_default_modules() # load all tasks @import_modules.connect def _import_modules(*args, **kwargs): import indico.core.emails import indico.util.tasks signals.import_tasks.send() @beat_init.connect def _send_initial_heartbeat(*args, **kwargs): heartbeat.delay(initial=True) @signals.menu.items.connect_via('admin-sidemenu') def _extend_admin_menu(sender, **kwargs): if session.user.is_admin: return SideMenuItem('celery', _("Tasks"), url_for('celery.index'), 20, icon='time') @template_hook('global-announcement', priority=-100, markup=False) def _inject_announcement_header(**kwargs): if not session.user or not session.user.is_admin or config.DISABLE_CELERY_CHECK: return last_ping = celery_settings.get('last_ping') last_ping_version = celery_settings.get('last_ping_version') down = not last_ping or (now_utc() - last_ping) > timedelta(hours=1) mismatch = last_ping_version and last_ping_version != indico.__version__ if down: text = _("The Celery task scheduler does not seem to be running. This means that email sending and periodic " "tasks such as event reminders do not work.") elif mismatch: text = _("The Celery task scheduler is running a different Indico version.") else: return return 'warning', text, True @celery.periodic_task(name='heartbeat', run_every=crontab(minute='*/30')) def heartbeat(initial=False): celery_settings.set('last_ping', now_utc()) if initial: celery_settings.set('last_ping_version', indico.__version__) db.session.commit()
# This file is part of Indico. # Copyright (C) 2002 - 2018 European Organization for Nuclear Research (CERN). # # Indico is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # Indico is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from datetime import timedelta from celery.schedules import crontab from celery.signals import beat_init, import_modules from flask import session import indico from indico.core import signals from indico.core.celery.core import IndicoCelery from indico.core.config import config from indico.core.db import db from indico.core.settings import SettingsProxy from indico.core.settings.converters import DatetimeConverter from indico.util.date_time import now_utc from indico.util.i18n import _ from indico.web.flask.templating import template_hook from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem __all__ = ('celery',) #: The Celery instance for all Indico tasks celery = IndicoCelery('indico') celery_settings = SettingsProxy('celery', { 'last_ping': None, 'last_ping_version': None }, converters={ 'last_ping': DatetimeConverter }) @signals.app_created.connect def _load_default_modules(app, **kwargs): celery.loader.import_default_modules() # load all tasks @import_modules.connect def _import_modules(*args, **kwargs): import indico.core.emails import indico.util.tasks signals.import_tasks.send() @beat_init.connect def _send_initial_heartbeat(*args, **kwargs): heartbeat.delay(initial=True) @signals.menu.items.connect_via('admin-sidemenu') def _extend_admin_menu(sender, **kwargs): if session.user.is_admin: return SideMenuItem('celery', _("Tasks"), url_for('celery.index'), 20, icon='time') @template_hook('global-announcement', priority=-100, markup=False) def _inject_announcement_header(**kwargs): if not session.user or not session.user.is_admin or config.DISABLE_CELERY_CHECK: return last_ping = celery_settings.get('last_ping') last_ping_version = celery_settings.get('last_ping_version') down = not last_ping or (now_utc() - last_ping) > timedelta(hours=1) mismatch = last_ping_version and last_ping_version != indico.__version__ if down: text = _("The Celery task scheduler does not seem to be running. This means that email sending and periodic " "tasks such as event reminders do not work.") elif mismatch: text = _("The Celery task scheduler is running a different Indico version.") return ('warning', text, True) @celery.periodic_task(name='heartbeat', run_every=crontab(minute='*/30')) def heartbeat(initial=False): celery_settings.set('last_ping', now_utc()) if initial: celery_settings.set('last_ping_version', indico.__version__) db.session.commit()
Python
0.000172
a8e3a0b7896403d5d9de9edf147693befc90493d
Use SSL.
securelayer/views.py
securelayer/views.py
# -*- coding: utf-8 -*- # (c) 2010-2011 Ruslan Popov <ruslan.popov@gmail.com> from django.conf import settings from django import forms from django.http import Http404 from django.utils import simplejson from django.utils.translation import ugettext_lazy as _ from django.shortcuts import redirect from securelayer.http import Http import gnupg class NextStep(forms.Form): """ This form is used to redirect a client to SecureLayer site.""" data = forms.CharField(widget=forms.HiddenInput) def sign_this(data): """ Converts the data into GPG signed JSON. """ jsonified = simplejson.dumps(data) gpg = gnupg.GPG(gnupghome=settings.GPG_HOMEDIR) signed = gpg.sign(jsonified, passphrase=settings.GPG_PASSPHRASE) return signed.data.decode('utf-8') def secured_request(url, params={}, session_key=None): """ Realizes data transfer through SSL. Sends params to URL. Uses Cookies.""" http = Http(settings.SECURELAYER_HOST, settings.SECURELAYER_PORT, 'https') if session_key: http.session_id = session_key if http.request(url, 'POST', {'data': sign_this(params)}): response = http.parse() if response.get('status', None) == 200: return (True, response, http.session_id) else: response = { 'status': 598, 'desc': _('Request error.') } return (False, response, None) def use_secured_form(request, form, context, caption, desc): """ Processes client's data through SecureLayer site.""" if request.method == 'GET': session_key = request.GET.get('ss', None) if session_key: ready, response, cookie = secured_request( '/api/', {'service': 'data'}, session_key) form.import_json(response.get('data', None)) return form else: context.update( { 'action': 'https://%s:%s/show/' % ( settings.SECURELAYER_HOST, settings.SECURELAYER_PORT), 'button_list': [{'title': _(u'Redirect'), 'name': 'redirect', 'type': 'submit'},], 'body': _(u'You will be redirected on SecureLayer ' 'for secure data entering.')} ) params = { 'return_to': request.build_absolute_uri(), 'form': form.as_json(caption=caption, desc=desc) } return NextStep(initial={'data': sign_this(params)}) else: # пост придти в эту форму не может raise Http404 def form(local_form, caption=None, desc=None): """ SecureLayer's Decorator. """ def renderer(view): def wrapper(request, *args, **kwargs): context = { 'action': '.', 'body': _(u'The data would be transferred by open channel.'), } check = ready, status, session_key = \ secured_request('/api/', {'service': 'check'}) if not ready: form = local_form(request.POST or None, *args, **kwargs) else: form = use_secured_form(request, local_form(), context, caption, desc) form.request = request return view(request, form, context, check, *args, **kwargs) return wrapper return renderer
# -*- coding: utf-8 -*- # (c) 2010-2011 Ruslan Popov <ruslan.popov@gmail.com> from django.conf import settings from django import forms from django.http import Http404 from django.utils import simplejson from django.utils.translation import ugettext_lazy as _ from django.shortcuts import redirect from securelayer.http import Http import gnupg class NextStep(forms.Form): """ This form is used to redirect a client to SecureLayer site.""" data = forms.CharField(widget=forms.HiddenInput) def sign_this(data): """ Converts the data into GPG signed JSON. """ jsonified = simplejson.dumps(data) gpg = gnupg.GPG(gnupghome=settings.GPG_HOMEDIR) signed = gpg.sign(jsonified, passphrase=settings.GPG_PASSPHRASE) return signed.data.decode('utf-8') def secured_request(url, params={}, session_key=None): """ Realizes data transfer through SSL. Sends params to URL. Uses Cookies.""" http = Http(settings.SECURELAYER_HOST, settings.SECURELAYER_PORT) if session_key: http.session_id = session_key if http.request(url, 'POST', {'data': sign_this(params)}): response = http.parse() if response.get('status', None) == 200: return (True, response, http.session_id) else: response = { 'status': 598, 'desc': _('Request error.') } return (False, response, None) def use_secured_form(request, form, context, caption, desc): """ Processes client's data through SecureLayer site.""" if request.method == 'GET': session_key = request.GET.get('ss', None) if session_key: ready, response, cookie = secured_request( '/api/', {'service': 'data'}, session_key) form.import_json(response.get('data', None)) return form else: context.update( { 'action': 'http://%s:%s/show/' % ( settings.SECURELAYER_HOST, settings.SECURELAYER_PORT), 'button_list': [{'title': _(u'Redirect'), 'name': 'redirect', 'type': 'submit'},], 'body': _(u'You will be redirected on SecureLayer ' 'for secure data entering.')} ) params = { 'return_to': request.build_absolute_uri(), 'form': form.as_json(caption=caption, desc=desc) } return NextStep(initial={'data': sign_this(params)}) else: # пост придти в эту форму не может raise Http404 def form(local_form, caption=None, desc=None): """ SecureLayer's Decorator. """ def renderer(view): def wrapper(request, *args, **kwargs): context = { 'action': '.', 'body': _(u'The data would be transferred by open channel.'), } check = ready, status, session_key = \ secured_request('/api/', {'service': 'check'}) if not ready: form = local_form(request.POST or None, *args, **kwargs) else: form = use_secured_form(request, local_form(), context, caption, desc) form.request = request return view(request, form, context, check, *args, **kwargs) return wrapper return renderer
Python
0
36608c6bd0035e4a78da2cd30d9fcca2c660ec3a
Add prepare in rpc client
common/numeter/queue/client.py
common/numeter/queue/client.py
from oslo import messaging from oslo.config import cfg import logging LOG = logging.getLogger(__name__) class BaseAPIClient(messaging.RPCClient): def __init__(self, transport): target = messaging.Target(topic='default_topic') super(BaseAPIClient, self).__init__(transport, target) def ping(self, context, topic, args=None): print 'Launch ping topic=%s' % topic cctxt = self.prepare(topic=topic) #return cctxt.call(context,'ping', args=args) return cctxt.cast(context,'ping', args=args) def poller_msg(self, context, topic, args=None): LOG.info('Send message %s context %s' % (topic, context)) cctxt = self.prepare(topic=topic) return cctxt.cast(context,'poller_msg', args=args) def get_rpc_client(hosts=[]): conf = cfg.CONF conf.transport_url = 'rabbit://' conf.rabbit_max_retries = 1 conf.rabbit_hosts = hosts transport = messaging.get_transport(conf) return BaseAPIClient(transport)
from oslo import messaging from oslo.config import cfg import logging LOG = logging.getLogger(__name__) class BaseAPIClient(messaging.RPCClient): def __init__(self, transport): target = messaging.Target(topic='default_topic') super(BaseAPIClient, self).__init__(transport, target) def ping(self, context, topic, args=None): print 'Launch ping topic=%s' % topic cctxt = self.prepare(topic=topic) #return cctxt.call(context,'ping', args=args) return cctxt.cast(context,'ping', args=args) def poller_msg(self, context, topic, args=None): LOG.info('Send message %s context %s' % (topic, context)) args['topic'] = topic return self.cast(context,'poller_msg', args=args) def get_rpc_client(hosts=[]): conf = cfg.CONF conf.transport_url = 'rabbit://' conf.rabbit_max_retries = 1 conf.rabbit_hosts = hosts transport = messaging.get_transport(conf) return BaseAPIClient(transport)
Python
0
bead9f9051ca1ca9b1823547732e847dd86e1ea1
Add verbose
pysteps/advection/semilagrangian.py
pysteps/advection/semilagrangian.py
"""Implementation of the semi-Lagrangian method of Germann et al (2002). """ import numpy as np import scipy.ndimage.interpolation as ip import time def extrapolate(R, V, num_timesteps, outval=np.nan, **kwargs): """Apply semi-Lagrangian extrapolation to a two-dimensional precipitation field. Parameters ---------- R : array-like Array of shape (m,n) containing the input precipitation field. V : array-like Array of shape (2,m,n) containing the x- and y-components of the m*n advection field. num_timesteps : int Number of time steps to extrapolate. outval : float Optional argument for specifying the value for pixels advected from outside the domain. If outval is set to 'min', the value is taken as the minimum value of R. Default : np.nan Optional kwargs: --------------- D_prev : array-like Optional initial displacement vector field of shape (2,m,n) for the extrapolation. Default : None n_iter : int Number of inner iterations in the semi-Lagrangian scheme. Default : 3 inverse : bool If True, the extrapolation trajectory is computed backward along the flow (default), forward otherwise. Default : True return_displacement : bool If True, return the total advection velocity (displacement) between the initial input field and the advected one integrated along the trajectory. Default : False Returns ------- out : array or tuple If return_displacement=False, return a time series extrapolated fields of shape (num_timesteps,m,n). Otherwise, return a tuple containing the extrapolated fields and the total displacement along the advection trajectory. """ if len(R.shape) != 2: raise ValueError("R must be a two-dimensional array") if len(V.shape) != 3: raise ValueError("V must be a three-dimensional array") # defaults verbose = kwargs.get("verbose", False) D_prev = kwargs.get("D_prev", None) n_iter = kwargs.get("n_iter", 3) inverse = kwargs.get("inverse", True) return_displacement = kwargs.get("return_displacement", False) if verbose: print("Computing the advection with the semi-lagrangian scheme.") t0 = time.time() if outval == "min": outval = np.nanmin(R) coeff = 1.0 if not inverse else -1.0 X,Y = np.meshgrid(np.arange(V.shape[2]), np.arange(V.shape[1])) XY = np.stack([X, Y]) R_e = [] if D_prev is None: D = np.zeros((2, V.shape[1], V.shape[2])) else: D = D_prev.copy() for t in range(num_timesteps): V_inc = np.zeros(D.shape) for k in range(n_iter): if t > 0 or k > 0 or D_prev is not None: XYW = XY + D - V_inc / 2.0 XYW = [XYW[1, :, :], XYW[0, :, :]] VWX = ip.map_coordinates(V[0, :, :], XYW, mode="nearest", order=0, prefilter=False) VWY = ip.map_coordinates(V[1, :, :], XYW, mode="nearest", order=0, prefilter=False) else: VWX = V[0, :, :] VWY = V[1, :, :] V_inc[0, :, :] = VWX / n_iter V_inc[1, :, :] = VWY / n_iter D += coeff * V_inc XYW = XY + D XYW = [XYW[1, :, :], XYW[0, :, :]] IW = ip.map_coordinates(R, XYW, mode="constant", cval=outval, order=0, prefilter=False) R_e.append(np.reshape(IW, R.shape)) if verbose: print("--- %s seconds ---" % (time.time() - t0)) if not return_displacement: return np.stack(R_e) else: return np.stack(R_e), D
"""Implementation of the semi-Lagrangian method of Germann et al (2002). """ import numpy as np import scipy.ndimage.interpolation as ip def extrapolate(R, V, num_timesteps, outval=np.nan, **kwargs): """Apply semi-Lagrangian extrapolation to a two-dimensional precipitation field. Parameters ---------- R : array-like Array of shape (m,n) containing the input precipitation field. V : array-like Array of shape (2,m,n) containing the x- and y-components of the m*n advection field. num_timesteps : int Number of time steps to extrapolate. outval : float Optional argument for specifying the value for pixels advected from outside the domain. If outval is set to 'min', the value is taken as the minimum value of R. Default : np.nan Optional kwargs: --------------- D_prev : array-like Optional initial displacement vector field of shape (2,m,n) for the extrapolation. Default : None n_iter : int Number of inner iterations in the semi-Lagrangian scheme. Default : 3 inverse : bool If True, the extrapolation trajectory is computed backward along the flow (default), forward otherwise. Default : True return_displacement : bool If True, return the total advection velocity (displacement) between the initial input field and the advected one integrated along the trajectory. Default : False Returns ------- out : array or tuple If return_displacement=False, return a time series extrapolated fields of shape (num_timesteps,m,n). Otherwise, return a tuple containing the extrapolated fields and the total displacement along the advection trajectory. """ if len(R.shape) != 2: raise ValueError("R must be a two-dimensional array") if len(V.shape) != 3: raise ValueError("V must be a three-dimensional array") # defaults D_prev = kwargs.get("D_prev", None) n_iter = kwargs.get("n_iter", 3) inverse = kwargs.get("inverse", True) return_displacement = kwargs.get("return_displacement", False) if outval == "min": outval = np.nanmin(R) coeff = 1.0 if not inverse else -1.0 X,Y = np.meshgrid(np.arange(V.shape[2]), np.arange(V.shape[1])) XY = np.stack([X, Y]) R_e = [] if D_prev is None: D = np.zeros((2, V.shape[1], V.shape[2])) else: D = D_prev.copy() for t in range(num_timesteps): V_inc = np.zeros(D.shape) for k in range(n_iter): if t > 0 or k > 0 or D_prev is not None: XYW = XY + D - V_inc / 2.0 XYW = [XYW[1, :, :], XYW[0, :, :]] VWX = ip.map_coordinates(V[0, :, :], XYW, mode="nearest", order=0, prefilter=False) VWY = ip.map_coordinates(V[1, :, :], XYW, mode="nearest", order=0, prefilter=False) else: VWX = V[0, :, :] VWY = V[1, :, :] V_inc[0, :, :] = VWX / n_iter V_inc[1, :, :] = VWY / n_iter D += coeff * V_inc XYW = XY + D XYW = [XYW[1, :, :], XYW[0, :, :]] IW = ip.map_coordinates(R, XYW, mode="constant", cval=outval, order=0, prefilter=False) R_e.append(np.reshape(IW, R.shape)) if not return_displacement: return np.stack(R_e) else: return np.stack(R_e), D
Python
0.999999
34a811429e2025f396f8997aeb628253487537fb
Change Sparser call pattern along with actual exec
indra/sources/sparser/sparser_api.py
indra/sources/sparser/sparser_api.py
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import json import logging import subprocess import xml.etree.ElementTree as ET from indra.util import UnicodeXMLTreeBuilder as UTB from .processor import SparserXMLProcessor, SparserJSONProcessor logger = logging.getLogger('sparser') sparser_path_var = 'SPARSERPATH' sparser_path = os.environ.get(sparser_path_var) def process_xml(xml_str): try: tree = ET.XML(xml_str, parser=UTB()) except ET.ParseError as e: logger.error('Could not parse XML string') logger.error(e) return None sp = _process_elementtree(tree) return sp def process_nxml(fname, output_format='json'): if not sparser_path or not os.path.exists(sparser_path): logger.error('Sparser executable not set in %s' % sparser_path_var) return None if output_format == 'xml': format_flag = '-x' suffix = '.xml' elif output_format == 'json': format_flag = '-j' suffix = '.json' else: logger.error('Unknown output format: %s' % output_format) sparser_exec_path = os.path.join(sparser_path, 'save-semantics.sh') subprocess.call([sparser_exec_path, format_flag, fname]) output_fname = fname.split('.')[0] + '-semantics' + suffix with open(output_fname, 'rt') as fh: json_dict = json.load(fh) return process_json_dict(json_dict) def process_json_dict(json_dict): sp = SparserJSONProcessor(json_dict) sp.get_statements() return sp def _process_elementtree(tree): sp = SparserXMLProcessor(tree) sp.get_modifications() sp.get_activations() return sp
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os import logging import subprocess import xml.etree.ElementTree as ET from indra.util import UnicodeXMLTreeBuilder as UTB from .processor import SparserProcessor logger = logging.getLogger('sparser') sparser_path_var = 'SPARSERPATH' sparser_path = os.environ.get(sparser_path_var) def process_xml(xml_str): try: tree = ET.XML(xml_str, parser=UTB()) except ET.ParseError as e: logger.error('Could not parse XML string') logger.error(e) return None sp = _process_elementtree(tree) return sp def process_nxml(fname, output_format='json'): if not sparser_path or not os.path.exists(sparser_path): logger.error('Sparser executable not set in %s' % sparser_path_var) return None if output_format == 'xml': format_flag = '-x' suffix = '.xml' elif output_format == 'json': format_flag = '-j' suffix = '.json' else: logger.error('Unknown output format: %s' % output_format) subprocess.call([sparser_path, format_flag, fname]) output_fname = fname.split('.')[0] + '-semantics' + suffix with open(output_fname, 'rb') as fh: json_dict = json.load(fh) return process_json_dict(json_dict) def process_json_dict(json_dict): sp = SparserJSONProcessor(json_dict) sp.get_statements() return sp def _process_elementtree(tree): sp = SparserXMLProcessor(tree) sp.get_modifications() sp.get_activations() return sp
Python
0
9271eea8191a5be0fd74d9b3be72acf1f3d6a213
Store challenge, signature as str/unicode for JSON serialization
crossbar/router/auth.py
crossbar/router/auth.py
##################################################################################### # # Copyright (C) Tavendo GmbH # # Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you # have purchased a commercial license), the license terms below apply. # # Should you enter into a separate license agreement after having received a copy of # this software, then the terms of such license agreement replace the terms below at # the time at which such license agreement becomes effective. # # In case a separate license agreement ends, and such agreement ends without being # replaced by another separate license agreement, the license terms below apply # from the time at which said agreement ends. # # LICENSE TERMS # # This program is free software: you can redistribute it and/or modify it under the # terms of the GNU Affero General Public License, version 3, as published by the # Free Software Foundation. This program is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # See the GNU Affero General Public License Version 3 for more details. # # You should have received a copy of the GNU Affero General Public license along # with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>. # ##################################################################################### from __future__ import absolute_import import json from autobahn import util from autobahn.wamp import auth __all__ = ( 'PendingAuth', 'PendingAuthPersona', 'PendingAuthWampCra', 'PendingAuthTicket' ) class PendingAuth: """ Base class for pending WAMP authentications. """ class PendingAuthPersona(PendingAuth): """ Pending Mozilla Persona authentication. """ def __init__(self, provider, audience, role=None): self.authmethod = u"mozilla_persona" self.provider = provider self.audience = audience self.role = role class PendingAuthWampCra(PendingAuth): """ Pending WAMP-CRA authentication. """ def __init__(self, session, authid, authrole, authprovider, secret): """ :param session: The WAMP session ID of the session being authenticated. :type session: int :param authid: The authentication ID of the authenticating principal. :type authid: unicode :param authrole: The role under which the principal will be authenticated when the authentication succeeds. :type authrole: unicode :param authprovider: Optional authentication provider. :type authprovider: unicode or None :param secret: The secret of the principal being authenticated. Either a password or a salted password. :type secret: str """ self.session = session self.authmethod = u"wampcra" self.authid = authid self.authrole = authrole self.authprovider = authprovider challenge_obj = { 'authid': self.authid, 'authrole': self.authrole, 'authmethod': u'wampcra', 'authprovider': self.authprovider, 'session': self.session, 'nonce': util.newid(), 'timestamp': util.utcnow() } self.challenge = json.dumps(challenge_obj, ensure_ascii=False) self.signature = auth.compute_wcs(secret, self.challenge.encode('utf8')).decode('ascii') class PendingAuthTicket(PendingAuth): """ Pending Ticket-based authentication. """ def __init__(self, realm, authid, authrole, authprovider, ticket): """ :param authid: The authentication ID of the authenticating principal. :type authid: unicode :param authrole: The role under which the principal will be authenticated when the authentication succeeds. :type authrole: unicode :param authprovider: Optional authentication provider (URI of procedure to call). :type authprovider: unicode or None :param ticket: The secret/ticket the authenticating principal will need to provide (or `None` when using dynamic authenticator). :type ticket: bytes or None """ self.authmethod = u"ticket" self.realm = realm self.authid = authid self.authrole = authrole self.authprovider = authprovider self.ticket = ticket
##################################################################################### # # Copyright (C) Tavendo GmbH # # Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you # have purchased a commercial license), the license terms below apply. # # Should you enter into a separate license agreement after having received a copy of # this software, then the terms of such license agreement replace the terms below at # the time at which such license agreement becomes effective. # # In case a separate license agreement ends, and such agreement ends without being # replaced by another separate license agreement, the license terms below apply # from the time at which said agreement ends. # # LICENSE TERMS # # This program is free software: you can redistribute it and/or modify it under the # terms of the GNU Affero General Public License, version 3, as published by the # Free Software Foundation. This program is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # See the GNU Affero General Public License Version 3 for more details. # # You should have received a copy of the GNU Affero General Public license along # with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>. # ##################################################################################### from __future__ import absolute_import import json from autobahn import util from autobahn.wamp import auth __all__ = ( 'PendingAuth', 'PendingAuthPersona', 'PendingAuthWampCra', 'PendingAuthTicket' ) class PendingAuth: """ Base class for pending WAMP authentications. """ class PendingAuthPersona(PendingAuth): """ Pending Mozilla Persona authentication. """ def __init__(self, provider, audience, role=None): self.authmethod = u"mozilla_persona" self.provider = provider self.audience = audience self.role = role class PendingAuthWampCra(PendingAuth): """ Pending WAMP-CRA authentication. """ def __init__(self, session, authid, authrole, authprovider, secret): """ :param session: The WAMP session ID of the session being authenticated. :type session: int :param authid: The authentication ID of the authenticating principal. :type authid: unicode :param authrole: The role under which the principal will be authenticated when the authentication succeeds. :type authrole: unicode :param authprovider: Optional authentication provider. :type authprovider: unicode or None :param secret: The secret of the principal being authenticated. Either a password or a salted password. :type secret: str """ self.session = session self.authmethod = u"wampcra" self.authid = authid self.authrole = authrole self.authprovider = authprovider challenge_obj = { 'authid': self.authid, 'authrole': self.authrole, 'authmethod': u'wampcra', 'authprovider': self.authprovider, 'session': self.session, 'nonce': util.newid(), 'timestamp': util.utcnow() } # challenge must be bytes self.challenge = json.dumps(challenge_obj, ensure_ascii=False).encode('utf8') self.signature = auth.compute_wcs(secret, self.challenge) class PendingAuthTicket(PendingAuth): """ Pending Ticket-based authentication. """ def __init__(self, realm, authid, authrole, authprovider, ticket): """ :param authid: The authentication ID of the authenticating principal. :type authid: unicode :param authrole: The role under which the principal will be authenticated when the authentication succeeds. :type authrole: unicode :param authprovider: Optional authentication provider (URI of procedure to call). :type authprovider: unicode or None :param ticket: The secret/ticket the authenticating principal will need to provide (or `None` when using dynamic authenticator). :type ticket: bytes or None """ self.authmethod = u"ticket" self.realm = realm self.authid = authid self.authrole = authrole self.authprovider = authprovider self.ticket = ticket
Python
0
a9365aa4a32fbe358a6f74b5730a7a3a0a8b3cda
Convert journal to pickled extra
qualia/journal.py
qualia/journal.py
import base64 import datetime import pickle import sqlite3 class Journal: def __init__(self, filename): self.db = sqlite3.connect( filename, detect_types = sqlite3.PARSE_DECLTYPES ) self.upgrade_if_needed() self.f = open(filename, 'ab') def upgrade_if_needed(self): version = self.db.execute('PRAGMA user_version').fetchone()[0] if version < 1: self.db.executescript(""" CREATE TABLE journal ( serial INTEGER PRIMARY KEY, timestamp TIMESTAMP, source TEXT, file TEXT, op TEXT, extra BLOB ); """) self.db.execute("PRAGMA user_version = 1") def append(self, source, file, op, *args, time = None): cur = self.db.cursor() cur.execute(''' INSERT INTO journal(timestamp, source, file, op, extra) VALUES(?, ?, ?, ?, ?) ''', (time or datetime.datetime.now(), source, file, op, pickle.dumps(args))) self.db.commit()
import datetime import sqlite3 class Journal: def __init__(self, filename): self.db = sqlite3.connect( filename, detect_types = sqlite3.PARSE_DECLTYPES ) self.upgrade_if_needed() self.f = open(filename, 'ab') def upgrade_if_needed(self): version = self.db.execute('PRAGMA user_version').fetchone()[0] if version < 1: self.db.executescript(""" CREATE TABLE journal ( serial INTEGER PRIMARY KEY, timestamp TIMESTAMP, source TEXT, file TEXT, op TEXT, extra BLOB ); """) self.db.execute("PRAGMA user_version = 1") def append(self, source, file, op, *args, time = None): cur = self.db.cursor() cur.execute(''' INSERT INTO journal(timestamp, source, file, op, extra) VALUES(?, ?, ?, ?, ?) ''', (time or datetime.datetime.now(), source, file, op, (b'\t'.join(str(arg).encode('unicode-escape') for arg in args)))) self.db.commit()
Python
0.999999
ce6e67890b5860d89e9c3ea6628a7a94ad9e10b3
Update Default_Settings.py
components/Default_Settings.py
components/Default_Settings.py
#Sequences of actual rotors used in WWII, format is name, sequences, turnover notch(es) rotor_sequences = { 'I': ('EKMFLGDQVZNTOWYHXUSPAIBRCJ', ('Q')), 'II': ('AJDKSIRUXBLHWTMCQGZNPYFVOE', ('E')), 'III': ('BDFHJLCPRTXVZNYEIWGAKMUSQO', ('V')), 'IV': ('ESOVPZJAYQUIRHXLNFTGKDCMWB', ('J')), 'V': ('VZBRGITYUPSDNHLXAWMJQOFECK', ('Z')), 'VI': ('JPGVOUMFYQBENHZRDKASXLICTW', ('Z', 'M')), 'VII': ('NZJHGRCXMYSWBOUFAIVLPEKQDT', ('Z', 'M')), 'VIII': ('FKQHTLXOCBJSPDZRAMEWNIUYGV', ('Z', 'M')), 'IC': ('DMTWSILRUYQNKFEJCAZBPGXOHV', ('Q')), #civilian 'IIC': ('HQZGPJTMOBLNCIFDYAWVEUSRKX', ('Q')), #civilian 'IIIC': ('UQNTLSZFMREHDPXKIBVYGJCWOA', ('Q')), #civilian 'BETA': ('LEYJVCNIXWPBQMDRTAKZGFUHOS', None), #Position 4 Only 'GAMMA': ('FSOKANUERHMBTIYCWLQPZXVGJD', None) #Position 4 Only } #Simple letter substitutions before the sequence is sent back through the rotors. Notably a letter cannot be encoded as itself here. reflector_sequences = { 'A': 'EJMZALYXVBWFCRQUONTSPIKHGD', 'B': 'YRUHQSLDPXNGOKMIEBFZCWVJAT', 'C': 'FVPJIAOYEDRZXWGCTKUQSBNMHL', 'B Thin': 'ENKQAUYWJICOPBLMDXZVFTHRGS', 'C Thin': 'RDOBJNTKVEHMLFCWZAXGYIPSUQ', 'None': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' #Early models had no reflector } #Entry wheel for Enigma I ETW = { 'Standard': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', 'Navy': 'QWERTZUIOPASDFGHJKLYXCVBNM' }
#Sequences of actual rotors used in WWII, format is name, sequences, turnover notch(es) rotor_sequences = { 'I': ('EKMFLGDQVZNTOWYHXUSPAIBRCJ', ('Q')), 'II': ('AJDKSIRUXBLHWTMCQGZNPYFVOE', ('E')), 'III': ('BDFHJLCPRTXVZNYEIWGAKMUSQO', ('V')), 'IV': ('ESOVPZJAYQUIRHXLNFTGKDCMWB', ('J')), 'V': ('VZBRGITYUPSDNHLXAWMJQOFECK', ('Z')), 'VI': ('JPGVOUMFYQBENHZRDKASXLICTW', ('Z', 'M')), 'VII': ('NZJHGRCXMYSWBOUFAIVLPEKQDT', ('Z', 'M')), 'VIII': ('FKQHTLXOCBJSPDZRAMEWNIUYGV', ('Z', 'M')), 'IC': ('DMTWSILRUYQNKFEJCAZBPGXOHV', ('Q')), #civilian 'IIC': ('HQZGPJTMOBLNCIFDYAWVEUSRKX', ('Q')), #civilian 'IIIC': ('UQNTLSZFMREHDPXKIBVYGJCWOA', ('Q')), #civilian 'BETA': ('LEYJVCNIXWPBQMDRTAKZGFUHOS', None), #Position 4 Only 'GAMMA': ('FSOKANUERHMBTIYCWLQPZXVGJD', None) #Position 4 Only } #Simple letter substitutions before the sequence is sent back through the rotors. Notably a letter cannot be encoded as itself here. reflector_sequences = { 'A': 'EJMZALYXVBWFCRQUONTSPIKHGD', 'B': 'YRUHQSLDPXNGOKMIEBFZCWVJAT', 'C': 'FVPJIAOYEDRZXWGCTKUQSBNMHL', 'B Thin': 'ENKQAUYWJICOPBLMDXZVFTHRGS', 'C Thin': 'RDOBJNTKVEHMLFCWZAXGYIPSUQ', 'None': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' #Early models had no reflector } #Entry wheel for Enigma I ETW = { 'Standard': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', 'Navy': 'QWERTZIOPASDFGHJKLYXCVBNM' }
Python
0.000001
64bd44d4338d57a68ff07527d1d2c3b37960c63b
call parent filter, cleanup
web/impact/impact/v1/views/mentor_program_office_hour_list_view.py
web/impact/impact/v1/views/mentor_program_office_hour_list_view.py
# MIT License # Copyright (c) 2019 MassChallenge, Inc. from django.db.models import Value as V from django.db.models.functions import Concat from impact.v1.views.base_list_view import BaseListView from impact.v1.helpers import ( MentorProgramOfficeHourHelper, ) ID_FIELDS = ['mentor_id', 'finalist_id'] NAME_FIELDS = ['mentor_name', 'finalist_name'] class MentorProgramOfficeHourListView(BaseListView): view_name = "office_hour" helper_class = MentorProgramOfficeHourHelper def filter(self, qs): qs = super().filter(qs) if not self.request.query_params.keys(): return qs if self._has_participant_filter(NAME_FIELDS): return self._filter_by_participant_name(qs) if self._has_participant_filter(ID_FIELDS): param_items = self.request.query_params.dict().items() return self._filter_by_participant_id(qs, param_items) def _filter_by_participant_name(self, qs): params = self.request.query_params mentor_name = params.get('mentor_name', None) finalist_name = params.get('finalist_name', None) if mentor_name: return self._generate_name_query(qs, 'mentor', mentor_name) if finalist_name: return self._generate_name_query(qs, 'finalist', finalist_name) return qs.none() def _generate_name_query(self, qs, user, name_value): first_name_field = '{}__first_name'.format(user) last_name_field = '{}__last_name'.format(user) result = qs.annotate( full_name=Concat( first_name_field, V(' '), last_name_field)).filter( full_name__icontains=name_value) return result def _filter_by_participant_id(self, qs, param_items): filter_values = { key: value for key, value in param_items if key in ID_FIELDS and value.isdigit()} if filter_values: return qs.filter(**filter_values) return qs.none() def _has_participant_filter(self, fields): return any( field in self.request.query_params.keys() for field in fields)
# MIT License # Copyright (c) 2019 MassChallenge, Inc. from django.db.models import Value as V from django.db.models.functions import Concat from impact.v1.views.base_list_view import BaseListView from impact.v1.helpers import ( MentorProgramOfficeHourHelper, ) ID_FIELDS = ['mentor_id', 'finalist_id'] NAME_FIELDS = ['mentor_name', 'finalist_name'] class MentorProgramOfficeHourListView(BaseListView): view_name = "office_hour" helper_class = MentorProgramOfficeHourHelper def filter(self, qs): if not self.request.query_params.keys(): return qs if self._has_participant_filter(NAME_FIELDS): return self._filter_by_participant_name(qs) if self._has_participant_filter(ID_FIELDS): param_items = self.request.query_params.dict().items() return self._filter_by_participant_id(qs, param_items) def _filter_by_participant_name(self, qs): params = self.request.query_params mentor_name = params.get('mentor_name', None) finalist_name = params.get('finalist_name', None) if mentor_name: return self._generate_name_query(qs, 'mentor', mentor_name) if finalist_name: return self._generate_name_query(qs, 'finalist', finalist_name) return qs.none() def _generate_name_query(self, qs, user, name_value): first_name_field = '{}__first_name'.format(user) last_name_field = '{}__last_name'.format(user) result = qs.annotate( full_name=Concat( first_name_field, V(' '), last_name_field)).filter( full_name__icontains=name_value) return result def _filter_by_participant_id(self, qs, param_items): filter_values = { key: value for key, value in param_items if key in ID_FIELDS and value.isdigit()} if filter_values: return qs.filter(**filter_values) return qs.none() def _has_participant_filter(self, fields): return any( key in self.request.query_params.keys() for key in fields)
Python
0
30e2ab7568dc00b9a8617c87269310691c19ed95
variable-length fields are initialized with a width of None
serial/core/_util.py
serial/core/_util.py
""" Private utility functions. """ from collections import namedtuple Field = namedtuple("Field", ("name", "pos", "dtype", "width")) def field_type(name, pos, dtype): """ Create a Field tuple. """ try: pos = slice(*pos) except TypeError: # pos is an int width = 1 else: try: width = pos.stop - pos.start except TypeError: # stop is None # Variable-width field; width is determined during encode/decode. width = None return Field(name, pos, dtype, width) def strftime(time, timefmt): """ Return a datetime-like object as a formatted string. This is a replacement for datetime.strftime that handles years before 1900. Only the most basic fields are supported, and it is not locale-aware. """ datetime = [] pos = 0 while pos < len(timefmt): char = timefmt[pos] if char == strftime._esc: pos += 1 try: fmt, get = strftime._fields[timefmt[pos]] except KeyError: raise ValueError("unknown strftime field: {0:s}".format(s)) except IndexError: raise ValueError("timefmt cannot end with escape character") char = format(get(time), fmt) datetime.append(char) pos += 1 return "".join(datetime) # Iniitialize these values once instead of with every function call. strftime._esc = "%" strftime._fields = { strftime._esc: ("s", lambda time: strftime._esc), "d": ("02d", lambda time: time.day), "f": ("06d", lambda time: time.microsecond), "H": ("02d", lambda time: time.hour), "I": ("02d", lambda time: time.hour%12), "M": ("02d", lambda time: time.minute), "m": ("02d", lambda time: time.month), "p": ("s", lambda time: "AM" if t.hour < 12 else "PM"), # no locale "S": ("02d", lambda time: time.second), "Y": ("04d", lambda time: time.year), "y": ("02d", lambda time: time.year%100)}
""" Private utility functions. """ from collections import namedtuple Field = namedtuple("Field", ("name", "pos", "dtype", "width")) def field_type(name, pos, dtype): """ Create a Field tuple. """ try: pos = slice(*pos) width = pos.stop - pos.start except TypeError: # pos is an int width = 1 return Field(name, pos, dtype, width) def strftime(time, timefmt): """ Return a datetime-like object as a formatted string. This is a replacement for datetime.strftime that handles years before 1900. Only the most basic fields are supported, and it is not locale-aware. """ datetime = [] pos = 0 while pos < len(timefmt): char = timefmt[pos] if char == strftime._esc: pos += 1 try: fmt, get = strftime._fields[timefmt[pos]] except KeyError: raise ValueError("unknown strftime field: {0:s}".format(s)) except IndexError: raise ValueError("timefmt cannot end with escape character") char = format(get(time), fmt) datetime.append(char) pos += 1 return "".join(datetime) # Iniitialize these values once instead of with every function call. strftime._esc = "%" strftime._fields = { strftime._esc: ("s", lambda time: strftime._esc), "d": ("02d", lambda time: time.day), "f": ("06d", lambda time: time.microsecond), "H": ("02d", lambda time: time.hour), "I": ("02d", lambda time: time.hour%12), "M": ("02d", lambda time: time.minute), "m": ("02d", lambda time: time.month), "p": ("s", lambda time: "AM" if t.hour < 12 else "PM"), # no locale "S": ("02d", lambda time: time.second), "Y": ("04d", lambda time: time.year), "y": ("02d", lambda time: time.year%100)}
Python
0.998252
abb00ac993154071776488b5dcaef32cc2982f4c
Fix broken functional tests on windows
test/functional/master/test_endpoints.py
test/functional/master/test_endpoints.py
import os import tempfile import yaml from test.framework.functional.base_functional_test_case import BaseFunctionalTestCase from test.functional.job_configs import BASIC_JOB class TestMasterEndpoints(BaseFunctionalTestCase): def setUp(self): super().setUp() self._project_dir = tempfile.TemporaryDirectory() def _start_master_only_and_post_a_new_job(self): master = self.cluster.start_master() build_resp = master.post_new_build({ 'type': 'directory', 'config': yaml.safe_load(BASIC_JOB.config[os.name])['BasicJob'], 'project_directory': self._project_dir.name, }) build_id = build_resp['build_id'] return master, build_id def test_cancel_build(self): master, build_id = self._start_master_only_and_post_a_new_job() master.cancel_build(build_id) master.block_until_build_finished(build_id) self.assert_build_has_canceled_status(build_id=build_id) def test_get_artifact_before_it_is_ready(self): master, build_id = self._start_master_only_and_post_a_new_job() # Since we didn't start any slaves so the artifacts is actually not ready. _, status_code = master.get_build_artifacts(build_id) self.assertEqual(status_code, 202) # Cancel the started build just to speed up teardown (avoid teardown timeout waiting for empty queue) master.cancel_build(build_id)
import os import yaml from test.framework.functional.base_functional_test_case import BaseFunctionalTestCase from test.functional.job_configs import BASIC_JOB class TestMasterEndpoints(BaseFunctionalTestCase): def _start_master_only_and_post_a_new_job(self): master = self.cluster.start_master() build_resp = master.post_new_build({ 'type': 'directory', 'config': yaml.safe_load(BASIC_JOB.config[os.name])['BasicJob'], 'project_directory': '/tmp', }) build_id = build_resp['build_id'] return master, build_id def test_cancel_build(self): master, build_id = self._start_master_only_and_post_a_new_job() master.cancel_build(build_id) master.block_until_build_finished(build_id) self.assert_build_has_canceled_status(build_id=build_id) def test_get_artifact_before_it_is_ready(self): master, build_id = self._start_master_only_and_post_a_new_job() # Since we didn't start any slaves so the artifacts is actually not ready. _, status_code = master.get_build_artifacts(build_id) self.assertEqual(status_code, 202) # Cancel the started build just to speed up teardown (avoid teardown timeout waiting for empty queue) master.cancel_build(build_id)
Python
0.000002
2a243c893ac8a4ddadd98f6fbb4ef5628a6d7607
Support single-ended slices on Tries
dispatch/util/trie.py
dispatch/util/trie.py
from ..constructs import Instruction class Trie(object): BUCKET_LEN = 1 BUCKET_MASK = (2**BUCKET_LEN)-1 def __init__(self): self.children = [None for _ in range(2**Trie.BUCKET_LEN)] self.value = None def __setitem__(self, key, value): assert type(value) == Instruction node = self for bucket in [(key >> i) & Trie.BUCKET_MASK for \ i in range(64, -1, -Trie.BUCKET_LEN)]: if not node.children[bucket]: node.children[bucket] = Trie() node = node.children[bucket] node.value = value def __getitem__(self, item): if type(item) in (int, long): node = self for bucket in [(item >> i) & Trie.BUCKET_MASK for \ i in range(64, -1, -Trie.BUCKET_LEN)]: if not node.children[bucket]: raise KeyError() node = node.children[bucket] return node.value elif type(item) == slice: start = item.start stop = item.stop if start is None: start = 0 if stop is None: # 128 bits max address. Seems big enough for practical purposes stop = 0xFFFFFFFFFFFFFFFF uncommon_bits = (stop ^ start).bit_length() node = self for bucket in [(start >> i) & Trie.BUCKET_MASK for \ i in range(64, uncommon_bits, -Trie.BUCKET_LEN)]: if not node.children[bucket]: raise KeyError() node = node.children[bucket] return [v for v in iter(node) if start <= v.address < stop][::item.step] def __iter__(self): if self.value: yield self.value for child in filter(None, self.children): for v in child: yield v def __contains__(self, item): node = self for bucket in [(item >> i) & Trie.BUCKET_MASK for \ i in range(64, -1, -Trie.BUCKET_LEN)]: if not node.children[bucket]: return False node = node.children[bucket] return True def __delitem__(self, key): node = self for bucket in [(key >> i) & Trie.BUCKET_MASK for \ i in range(64, -1, -Trie.BUCKET_LEN)]: if not node.children[bucket]: raise KeyError() node = node.children[bucket] if node.value: del node.value
from ..constructs import Instruction class Trie(object): BUCKET_LEN = 1 BUCKET_MASK = (2**BUCKET_LEN)-1 def __init__(self): self.children = [None for _ in range(2**Trie.BUCKET_LEN)] self.value = None def __setitem__(self, key, value): assert type(value) == Instruction node = self for bucket in [(key >> i) & Trie.BUCKET_MASK for \ i in range(64, -1, -Trie.BUCKET_LEN)]: if not node.children[bucket]: node.children[bucket] = Trie() node = node.children[bucket] node.value = value def __getitem__(self, item): if type(item) in (int, long): node = self for bucket in [(item >> i) & Trie.BUCKET_MASK for \ i in range(64, -1, -Trie.BUCKET_LEN)]: if not node.children[bucket]: raise KeyError() node = node.children[bucket] return node.value elif type(item) == slice: uncommon_bits = (item.stop ^ item.start).bit_length() node = self for bucket in [(item.start >> i) & Trie.BUCKET_MASK for \ i in range(64, uncommon_bits, -Trie.BUCKET_LEN)]: if not node.children[bucket]: raise KeyError() node = node.children[bucket] return [v for v in iter(node) if item.start <= v.address < item.stop][::item.step] def __iter__(self): if self.value: yield self.value for child in filter(None, self.children): for v in child: yield v def __contains__(self, item): node = self for bucket in [(item >> i) & Trie.BUCKET_MASK for \ i in range(64, -1, -Trie.BUCKET_LEN)]: if not node.children[bucket]: return False node = node.children[bucket] return True def __delitem__(self, key): node = self for bucket in [(key >> i) & Trie.BUCKET_MASK for \ i in range(64, -1, -Trie.BUCKET_LEN)]: if not node.children[bucket]: raise KeyError() node = node.children[bucket] if node.value: del node.value
Python
0
8d4c7c94dba6708758732d74228e1337bd9f0b83
raise version number
yam/__init__.py
yam/__init__.py
__version__ = '0.2.2-dev' from yam.main import run from yam.commands import read_dicts
__version__ = '0.2.1' from yam.main import run from yam.commands import read_dicts
Python
0.000006
ce6a23206271f4e9a0dfd54e7a2663789d5237de
update test
accelerator_abstract/tests/test_startup_progress.py
accelerator_abstract/tests/test_startup_progress.py
from django.test import TestCase from accelerator.tests.factories import ( BusinessPropositionFactory, StartupFactory ) from accelerator.models import BusinessProposition from accelerator_abstract.models.base_startup import ( APPLICATION_READY, PROFILE_COMPLETE, ) from accelerator_abstract.models import EXCLUDED_FIELDS class TestStartupProgress(TestCase): def _business_proposition_data(self): fields = BusinessProposition._meta.get_fields(include_parents=False) characters = 'text input characters' data = {} for field in fields: if field.name not in EXCLUDED_FIELDS: data[field.name] = characters return data def test_application_ready_milestone_with_incomplete_data(self): startup = StartupFactory() BusinessPropositionFactory(startup=startup) progress = startup.profile_status() self.assertEqual(progress['milestone'], APPLICATION_READY) self.assertFalse(progress['bus-prop-complete']) self.assertFalse(progress['profile-complete']) self.assertGreater(1, progress['progress']) def test_business_prop_complete_startup_profile_incomplete(self): startup = StartupFactory() business_prop_data = self._business_proposition_data() BusinessPropositionFactory(startup=startup, **business_prop_data) progress = startup.profile_status() self.assertEqual(progress['milestone'], APPLICATION_READY) self.assertTrue(progress['bus-prop-complete']) self.assertFalse(progress['profile-complete']) self.assertGreater(1, progress['progress']) def test_profile_application_field_complete_business_prop_incomplete(self): startup = StartupFactory(video_elevator_pitch_url='https://video.com') BusinessPropositionFactory(startup=startup) progress = startup.profile_status() self.assertEqual(progress['milestone'], PROFILE_COMPLETE) self.assertFalse(progress['bus-prop-complete']) self.assertFalse(progress['profile-complete']) self.assertGreater(1, progress['progress']) def test_milestone_change_when_required_field_complete(self): business_proposition_data = self._business_proposition_data() startup = StartupFactory(video_elevator_pitch_url='https://video.com') BusinessPropositionFactory(startup=startup, **business_proposition_data) progress = startup.profile_status() self.assertEqual(progress['milestone'], PROFILE_COMPLETE) self.assertTrue(progress['bus-prop-complete']) self.assertFalse(progress['profile-complete']) self.assertGreater(1, progress['progress']) def test_business_prop_complete_startup_profile_complete(self): business_proposition_data = self._business_proposition_data() startup = StartupFactory(video_elevator_pitch_url='https://video.com', high_resolution_logo='logo.jpg') BusinessPropositionFactory(startup=startup, **business_proposition_data) progress = startup.profile_status() self.assertEqual(progress['milestone'], PROFILE_COMPLETE) self.assertTrue(progress['bus-prop-complete']) self.assertTrue(progress['profile-complete']) self.assertEqual(1, progress['progress'])
from django.test import TestCase from accelerator.tests.factories import ( BusinessPropositionFactory, StartupFactory ) from accelerator.models import BusinessProposition from accelerator_abstract.models.base_startup import ( APPLICATION_READY, PROFILE_COMPLETE, ) from accelerator_abstract.models import EXCLUDED_FIELDS class TestStartupProgress(TestCase): def _business_proposition_data(self): fields = BusinessProposition._meta.get_fields(include_parents=False) characters = 'text input characters' data = {} for field in fields: if field.name not in EXCLUDED_FIELDS: data[field.name] = characters return data def test_application_ready_milestone_with_incomplete_data(self): startup = StartupFactory() BusinessPropositionFactory(startup=startup) progress = startup.profile_status() self.assertEqual(progress['milestone'], APPLICATION_READY) self.assertFalse(progress['bus-prop-complete']) self.assertFalse(progress['profile-complete']) self.assertGreater(1, progress['progress']) def test_business_prop_complete_startup_profile_incomplete(self): startup = StartupFactory() business_prop_data = self._business_proposition_data() BusinessPropositionFactory(startup=startup, **business_prop_data) progress = startup.profile_status() self.assertEqual(progress['milestone'], APPLICATION_READY) self.assertTrue(progress['bus-prop-complete']) self.assertFalse(progress['profile-complete']) self.assertGreater(1, progress['progress']) def test_startup_profile_complete_business_prop_incomplete_(self): startup = StartupFactory(video_elevator_pitch_url='https://video.com') BusinessPropositionFactory(startup=startup) progress = startup.profile_status() self.assertEqual(progress['milestone'], APPLICATION_READY) self.assertFalse(progress['bus-prop-complete']) self.assertTrue(progress['profile-complete']) self.assertGreater(1, progress['progress']) def test_milestone_change_when_required_field_complete(self): business_proposition_data = self._business_proposition_data() startup = StartupFactory(video_elevator_pitch_url='https://video.com') BusinessPropositionFactory(startup=startup, **business_proposition_data) progress = startup.profile_status() self.assertEqual(progress['milestone'], PROFILE_COMPLETE) self.assertTrue(progress['bus-prop-complete']) self.assertFalse(progress['profile-complete']) self.assertGreater(1, progress['progress']) def test_business_prop_complete_startup_profile_complete(self): business_proposition_data = self._business_proposition_data() startup = StartupFactory(video_elevator_pitch_url='https://video.com', high_resolution_logo='logo.jpg') BusinessPropositionFactory(startup=startup, **business_proposition_data) progress = startup.profile_status() self.assertEqual(progress['milestone'], PROFILE_COMPLETE) self.assertTrue(progress['bus-prop-complete']) self.assertTrue(progress['profile-complete']) self.assertEqual(1, progress['progress'])
Python
0
3ea84302368818133b045d56a0c8c798872eedd1
Add default logger and log exception
influxdb_metrics/middleware.py
influxdb_metrics/middleware.py
"""Middlewares for the influxdb_metrics app.""" from django import VERSION as DJANGO_VERSION import inspect import time import logging try: from urllib import parse except ImportError: import urlparse as parse from django.conf import settings try: from django.utils.deprecation import MiddlewareMixin except ImportError: class MiddlewareMixin(object): pass from tld import get_tld from tld.exceptions import TldBadUrl, TldDomainNotFound, TldIOError from .loader import write_points if DJANGO_VERSION < (1, 10): def is_user_authenticated(user): return user.is_authenticated() else: def is_user_authenticated(user): return user.is_authenticated logger = logging.getLogger(__name__) class InfluxDBRequestMiddleware(MiddlewareMixin): """ Measures request time and sends metric to InfluxDB. Credits go to: https://github.com/andymckay/django-statsd/blob/master/django_statsd/middleware.py#L24 # NOQA """ def process_view(self, request, view_func, view_args, view_kwargs): view = view_func if not inspect.isfunction(view_func): view = view.__class__ try: request._view_module = view.__module__ request._view_name = view.__name__ request._start_time = time.time() except AttributeError: # pragma: no cover pass def process_response(self, request, response): self._record_time(request) return response def process_exception(self, request, exception): self._record_time(request) def _record_time(self, request): if hasattr(request, '_start_time'): ms = int((time.time() - request._start_time) * 1000) if request.is_ajax(): is_ajax = True else: is_ajax = False is_authenticated = False is_staff = False is_superuser = False if is_user_authenticated(request.user): is_authenticated = True if request.user.is_staff: is_staff = True if request.user.is_superuser: is_superuser = True referer = request.META.get('HTTP_REFERER') referer_tld = None referer_tld_string = '' if referer: try: referer_tld = get_tld(referer, as_object=True) except (TldBadUrl, TldDomainNotFound, TldIOError): pass if referer_tld: referer_tld_string = referer_tld.tld url = request.get_full_path() url_query = parse.parse_qs(parse.urlparse(url).query) # This allows you to measure click rates for ad-campaigns, just # make sure that your ads have `?campaign=something` in the URL campaign_keyword = getattr( settings, 'INFLUXDB_METRICS_CAMPAIGN_KEYWORD', 'campaign') campaign = '' if campaign_keyword in url_query: campaign = url_query[campaign_keyword][0] data = [{ 'measurement': 'django_request', 'tags': { 'host': settings.INFLUXDB_TAGS_HOST, 'is_ajax': is_ajax, 'is_authenticated': is_authenticated, 'is_staff': is_staff, 'is_superuser': is_superuser, 'method': request.method, 'module': request._view_module, 'view': request._view_name, 'referer': referer, 'referer_tld': referer_tld_string, 'full_path': url, 'path': request.path, 'campaign': campaign, }, 'fields': {'value': ms, }, }] try: write_points(data) except Exception as err: logger.exception(err, extra={"request": request}) # sadly, when using celery, there can be issues with the connection to the MQ. Better to drop the data # than fail the request.
"""Middlewares for the influxdb_metrics app.""" from django import VERSION as DJANGO_VERSION import inspect import time try: from urllib import parse except ImportError: import urlparse as parse from django.conf import settings try: from django.utils.deprecation import MiddlewareMixin except ImportError: class MiddlewareMixin(object): pass from tld import get_tld from tld.exceptions import TldBadUrl, TldDomainNotFound, TldIOError from .loader import write_points if DJANGO_VERSION < (1, 10): def is_user_authenticated(user): return user.is_authenticated() else: def is_user_authenticated(user): return user.is_authenticated class InfluxDBRequestMiddleware(MiddlewareMixin): """ Measures request time and sends metric to InfluxDB. Credits go to: https://github.com/andymckay/django-statsd/blob/master/django_statsd/middleware.py#L24 # NOQA """ def process_view(self, request, view_func, view_args, view_kwargs): view = view_func if not inspect.isfunction(view_func): view = view.__class__ try: request._view_module = view.__module__ request._view_name = view.__name__ request._start_time = time.time() except AttributeError: # pragma: no cover pass def process_response(self, request, response): self._record_time(request) return response def process_exception(self, request, exception): self._record_time(request) def _record_time(self, request): if hasattr(request, '_start_time'): ms = int((time.time() - request._start_time) * 1000) if request.is_ajax(): is_ajax = True else: is_ajax = False is_authenticated = False is_staff = False is_superuser = False if is_user_authenticated(request.user): is_authenticated = True if request.user.is_staff: is_staff = True if request.user.is_superuser: is_superuser = True referer = request.META.get('HTTP_REFERER') referer_tld = None referer_tld_string = '' if referer: try: referer_tld = get_tld(referer, as_object=True) except (TldBadUrl, TldDomainNotFound, TldIOError): pass if referer_tld: referer_tld_string = referer_tld.tld url = request.get_full_path() url_query = parse.parse_qs(parse.urlparse(url).query) # This allows you to measure click rates for ad-campaigns, just # make sure that your ads have `?campaign=something` in the URL campaign_keyword = getattr( settings, 'INFLUXDB_METRICS_CAMPAIGN_KEYWORD', 'campaign') campaign = '' if campaign_keyword in url_query: campaign = url_query[campaign_keyword][0] data = [{ 'measurement': 'django_request', 'tags': { 'host': settings.INFLUXDB_TAGS_HOST, 'is_ajax': is_ajax, 'is_authenticated': is_authenticated, 'is_staff': is_staff, 'is_superuser': is_superuser, 'method': request.method, 'module': request._view_module, 'view': request._view_name, 'referer': referer, 'referer_tld': referer_tld_string, 'full_path': url, 'path': request.path, 'campaign': campaign, }, 'fields': {'value': ms, }, }] try: write_points(data) except Exception as err: pass # sadly, when using celery, there can be issues with the connection to the MQ. Better to drop the data # than fail the request.
Python
0
71d0f02e1274829a302cdd6f716f2fc0680cce49
Update fab.py
ydcommon/fab.py
ydcommon/fab.py
from fabric.api import local, sudo, run from fabric.operations import prompt from fabric.colors import red from fabric.contrib.console import confirm def get_branch_name(on_local=True): cmd = "git branch --no-color 2> /dev/null | sed -e '/^[^*]/d'" if on_local: name = local(cmd, capture=True).replace("* ", "") else: name = run(cmd) return name.replace("* ", "").strip() def switch(stage): """ Switch to given stage (dev/qa/production) + pull """ stage = stage.lower() local("git pull") if stage in ['dev', 'devel', 'develop']: branch_name = 'develop' elif stage in ['qa', 'release']: branches = local('git branch -r', capture=True) possible_branches = [] for b in branches.split("\n"): b_parts = b.split('/') if b_parts[1] == 'release': possible_branches.append(b_parts[2]) if len(possible_branches) == 0: raise Exception('No release branches found. Please create a new release first.') possible_branches = sorted(possible_branches, reverse=True) branch_name = 'release/%s' % possible_branches[0] elif stage in ['production', 'master']: branch_name = 'master' else: raise NotImplemented local("git checkout %s" % branch_name) local("git pull") def release_qa(): """ Release code to QA server """ name = prompt(red('Sprint name?'), default='Sprint 1').lower().replace(' ', "_") date = prompt(red('Sprint start date (Y-m-d)?'), default='2013-01-20').replace('-', '') release_name = '%s_%s' % (date, name) local('git flow release start %s' % release_name) local('git flow release publish %s' % release_name) print red('PLEASE DEPLOY CODE: fab deploy:all') def update_qa(): """ Merge code from develop to qa """ switch('dev') switch('qa') local('git merge --no-edit develop') local('git push') print red('PLEASE DEPLOY CODE: fab deploy:all') def check_branch(environment, user): if environment == 'qa': local_branch = get_branch_name() remote_branch = get_branch_name(False) if local_branch != remote_branch: change = confirm(red('Branch on server is different, do you want to checkout %s ?' % local_branch), default=True) if change: sudo('git checkout %s' % local_branch, user=user)
from fabric.api import local, sudo, run from fabric.operations import prompt from fabric.colors import red from fabric.contrib.console import confirm def get_branch_name(on_local=True): cmd = "git branch --no-color 2> /dev/null | sed -e '/^[^*]/d'" if on_local: name = local(cmd, capture=True).replace("* ", "") else: name = run(cmd) return name.replace("* ", "").strip() def switch(stage): """ Switch to given stage (dev/qa/production) + pull """ stage = stage.lower() local("git pull") if stage in ['dev', 'devel', 'develop']: branch_name = 'develop' elif stage in ['qa', 'release']: branches = local('git branch -r', capture=True) possible_branches = [] for b in branches.split("\n"): b_parts = b.split('/') if b_parts[1] == 'release': possible_branches.append(b_parts[2]) if len(possible_branches) == 0: raise Exception('No release branches found. Please create a new release first.') possible_branches = sorted(possible_branches, reverse=True) branch_name = 'release/%s' % possible_branches[0] elif stage in ['production', 'master']: branch_name = 'master' else: raise NotImplemented local("git checkout %s" % branch_name) local("git pull") def release_qa(): """ Release code to QA server """ name = prompt(red('Sprint name?'), default='Sprint 1').lower().replace(' ', "_") date = prompt(red('Sprint start date (Y-m-d)?'), default='2013-01-20').replace('-', '') release_name = '%s_%s' % (date, name) local('git flow release start %s' % release_name) local('git flow release publish %s' % release_name) print red('PLEASE DEPLOY CODE: fab deploy:all') def update_qa(): """ Merge code from develop to qa """ switch('dev') switch('qa') local('git merge --no-edit develop') local('git push') print red('PLEASE DEPLOY CODE: fab deploy:all') def check_branch(environment, user): if environment == 'qa': local_branch = get_branch_name() remote_branch = get_branch_name(False) if local_branch != remote_branch: change = confirm(red('Branch on server is different, do you want to replace your local branch with server version?'), default=True) if change: sudo('git checkout %s' % local_branch, user=user)
Python
0
95aa4c210c735bd9ac74a65cdbef418d99beb319
Bump to v0.2.0
sii/__init__.py
sii/__init__.py
# -*- coding: utf-8 -*- __LIBRARY_VERSION__ = '0.2.0' __SII_VERSION__ = '0.7'
# -*- coding: utf-8 -*- __LIBRARY_VERSION__ = '0.1.0alpha' __SII_VERSION__ = '0.7'
Python
0.000001
c273fa5ba0ae43cc5979f1076349edf737a67710
Add reserved words to custom data field validation
corehq/apps/custom_data_fields/models.py
corehq/apps/custom_data_fields/models.py
from dimagi.ext.couchdbkit import (Document, StringProperty, BooleanProperty, SchemaListProperty, StringListProperty) from dimagi.ext.jsonobject import JsonObject from django.core.exceptions import ValidationError from django.utils.translation import ugettext as _ from .dbaccessors import * CUSTOM_DATA_FIELD_PREFIX = "data-field" # This list is used to grandfather in existing data, any new fields should use # the system prefix defined below SYSTEM_FIELDS = ("commtrack-supply-point",) SYSTEM_PREFIX = "commcare" RESERVED_WORDS = ('name', 'type', 'owner_id', 'external_id', 'hq_user_id') def _validate_reserved_words(slug, words=SYSTEM_FIELDS): if slug in words: return _('You may not use "{}" as a field name').format(slug) for prefix in [SYSTEM_PREFIX, 'xml']: if slug and slug.startswith(prefix): return _('Field names may not begin with "{}"').format(prefix) def is_system_key(slug): return bool(_validate_reserved_words(slug)) def validate_reserved_words(slug): error = _validate_reserved_words(slug, SYSTEM_FIELDS + RESERVED_WORDS) if error is not None: raise ValidationError(error) class CustomDataField(JsonObject): slug = StringProperty() is_required = BooleanProperty() label = StringProperty() choices = StringListProperty() is_multiple_choice = BooleanProperty(default=False) class CustomDataFieldsDefinition(Document): """ Per-project user-defined fields such as custom user data. """ field_type = StringProperty() base_doc = "CustomDataFieldsDefinition" domain = StringProperty() fields = SchemaListProperty(CustomDataField) def get_fields(self, required_only=False): def _is_match(field): if required_only and not field.is_required: return False return True return filter(_is_match, self.fields) @classmethod def get_or_create(cls, domain, field_type): # todo: this overrides get_or_create from DocumentBase but with a completely different signature. # This method should probably be renamed. existing = get_by_domain_and_type(domain, field_type) if existing: return existing else: new = cls(domain=domain, field_type=field_type) new.save() return new # TODO use this in the CustomDataEditor too? def get_validator(self, data_field_class): """ Returns a validator to be used in bulk import """ def validate_choices(field, value): if field.choices and value and unicode(value) not in field.choices: return _( "'{value}' is not a valid choice for {slug}, the available " "options are: {options}." ).format( value=value, slug=field.slug, options=', '.join(field.choices), ) def validate_required(field, value): if field.is_required and not value: return _( "Cannot create or update a {entity} without " "the required field: {field}." ).format( entity=data_field_class.entity_string, field=field.slug ) def validate_custom_fields(custom_fields): errors = [] for field in self.fields: value = custom_fields.get(field.slug, None) errors.append(validate_required(field, value)) errors.append(validate_choices(field, value)) return ' '.join(filter(None, errors)) return validate_custom_fields def get_model_and_uncategorized(self, data_dict): """ Splits data_dict into two dictionaries: one for data which matches the model and one for data that doesn't """ if not data_dict: return {}, {} model_data = {} uncategorized_data = {} slugs = [field.slug for field in self.fields] for k, v in data_dict.items(): if k in slugs: model_data[k] = v else: uncategorized_data[k] = v return model_data, uncategorized_data
from dimagi.ext.couchdbkit import (Document, StringProperty, BooleanProperty, SchemaListProperty, StringListProperty) from dimagi.ext.jsonobject import JsonObject from django.core.exceptions import ValidationError from django.utils.translation import ugettext as _ from .dbaccessors import * CUSTOM_DATA_FIELD_PREFIX = "data-field" # This list is used to grandfather in existing data, any new fields should use # the system prefix defined below SYSTEM_FIELDS = ["commtrack-supply-point"] SYSTEM_PREFIX = "commcare" def _validate_reserved_words(slug): if slug in SYSTEM_FIELDS: return _('You may not use "{}" as a field name').format(slug) for prefix in [SYSTEM_PREFIX, 'xml']: if slug and slug.startswith(prefix): return _('Field names may not begin with "{}"').format(prefix) def is_system_key(slug): return bool(_validate_reserved_words(slug)) def validate_reserved_words(slug): error = _validate_reserved_words(slug) if error is not None: raise ValidationError(error) class CustomDataField(JsonObject): slug = StringProperty() is_required = BooleanProperty() label = StringProperty() choices = StringListProperty() is_multiple_choice = BooleanProperty(default=False) class CustomDataFieldsDefinition(Document): """ Per-project user-defined fields such as custom user data. """ field_type = StringProperty() base_doc = "CustomDataFieldsDefinition" domain = StringProperty() fields = SchemaListProperty(CustomDataField) def get_fields(self, required_only=False): def _is_match(field): if required_only and not field.is_required: return False return True return filter(_is_match, self.fields) @classmethod def get_or_create(cls, domain, field_type): # todo: this overrides get_or_create from DocumentBase but with a completely different signature. # This method should probably be renamed. existing = get_by_domain_and_type(domain, field_type) if existing: return existing else: new = cls(domain=domain, field_type=field_type) new.save() return new # TODO use this in the CustomDataEditor too? def get_validator(self, data_field_class): """ Returns a validator to be used in bulk import """ def validate_choices(field, value): if field.choices and value and unicode(value) not in field.choices: return _( "'{value}' is not a valid choice for {slug}, the available " "options are: {options}." ).format( value=value, slug=field.slug, options=', '.join(field.choices), ) def validate_required(field, value): if field.is_required and not value: return _( "Cannot create or update a {entity} without " "the required field: {field}." ).format( entity=data_field_class.entity_string, field=field.slug ) def validate_custom_fields(custom_fields): errors = [] for field in self.fields: value = custom_fields.get(field.slug, None) errors.append(validate_required(field, value)) errors.append(validate_choices(field, value)) return ' '.join(filter(None, errors)) return validate_custom_fields def get_model_and_uncategorized(self, data_dict): """ Splits data_dict into two dictionaries: one for data which matches the model and one for data that doesn't """ if not data_dict: return {}, {} model_data = {} uncategorized_data = {} slugs = [field.slug for field in self.fields] for k, v in data_dict.items(): if k in slugs: model_data[k] = v else: uncategorized_data[k] = v return model_data, uncategorized_data
Python
0
510e04dfd68eeca2e940487eeca9e7474e7f2383
Fix methodcheck.py for the new API documentation style (split into subsections)
linode/methodcheck.py
linode/methodcheck.py
#!/usr/bin/python """ A quick script to verify that api.py is in sync with Linode's published list of methods. Copyright (c) 2010 Josh Wright <jshwright@gmail.com> Copyright (c) 2009 Ryan Tucker <rtucker@gmail.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ #The list of subsections found in the API documentation. This should #probably be discovered automatically in the future api_subsections = ('linode', 'nodebalancer', 'stackscript', 'dns', 'utility') import api import re import itertools from HTMLParser import HTMLParser from urllib import unquote from urllib2 import urlopen class SubsectionParser(HTMLParser): base_url = 'http://www.linode.com/api/' def __init__(self, subsection): HTMLParser.__init__(self) self.subsection_re = re.compile('/api/%s/(.*)$' % subsection) self.methods = [] url = self.base_url + subsection req = urlopen(url) self.feed(req.read()) def handle_starttag(self, tag, attrs): if tag == 'a' and attrs: attr_dict = dict(attrs) match = self.subsection_re.match(attr_dict.get('href', '')) if match: self.methods.append(unquote(match.group(1)).replace('.','_')) local_methods = api.Api.valid_commands() remote_methods = list(itertools.chain(*[SubsectionParser(subsection).methods for subsection in api_subsections])) # Cross-check! for i in local_methods: if i not in remote_methods: print('REMOTE Missing: ' + i) for i in remote_methods: if i not in local_methods: print('LOCAL Missing: ' + i)
#!/usr/bin/python """ A quick script to verify that api.py is in sync with Linode's published list of methods. Copyright (c) 2010 Josh Wright <jshwright@gmail.com> Copyright (c) 2009 Ryan Tucker <rtucker@gmail.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ # URL of API documentation apidocurl = 'http://www.linode.com/api/autodoc.cfm' import api import re import urllib tmpfile, httpheaders = urllib.urlretrieve(apidocurl) tmpfd = open(tmpfile) local_methods = api.Api.valid_commands() remote_methods = [] # Read in the list of methods Linode has rg = re.compile('.*?\\?method=((?:[a-z][a-z\\.\\d\\-]+)\\.(?:[a-z][a-z\\-]+))(?![\\w\\.])') for i in tmpfd.readlines(): m = rg.search(i) if m: remote_methods.append(m.group(1).replace('.','_')) # Cross-check! for i in local_methods: if i not in remote_methods: print('REMOTE Missing: ' + i) for i in remote_methods: if i not in local_methods: print('LOCAL Missing: ' + i)
Python
0