commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
0df292fbb34a66ee66fce919ea63b68a5f9eff1a
Set up data structures for parsing projects
app/data.py
app/data.py
import json import os from typing import Dict, List from app.util import cached_function class Projects(): def __init__(self) -> None: self.data: Dict[str, Dict[str, Dict[str, str]]] = {} @staticmethod def load() -> 'Projects': current_directory = os.path.dirname(os.path.realpath(__file__)) path = os.path.join(current_directory, 'data', 'projects.json') with open(path, 'r') as handle: project_data = handle.read() projects = Projects() projects.data = json.loads(project_data) return projects @cached_function def get_projects() -> Projects: loaded_projects = Projects.load() return loaded_projects class Shelf(): def __init__(self) -> None: self.data: Dict[str, List[Dict[str, str]]] @staticmethod def load() -> 'Shelf': current_directory = os.path.dirname(os.path.realpath(__file__)) path = os.path.join(current_directory, 'data', 'shelf.json') with open(path, 'r') as handle: shelf_data = handle.read() shelf = Shelf() shelf.data = json.loads(shelf_data) return shelf @cached_function def get_shelf() -> Shelf: loaded_shelf = Shelf.load() return loaded_shelf
Python
0.000003
@@ -152,55 +152,38 @@ elf. -data: Dict%5Bstr, Dict%5Bstr, Dict%5Bstr, str%5D%5D +languages: List%5BLanguage %5D = -%7B%7D +%5B%5D %0A%0A @@ -206,24 +206,34 @@ def load +_from_file () -%3E 'Proje @@ -434,24 +434,16 @@ -project_ data = h @@ -445,32 +445,371 @@ a = handle.read( +)%0A parsed_data = json.loads(data)%0A return Projects.load(parsed_data)%0A%0A @staticmethod%0A def load(data: Dict%5Bstr, Dict%5Bstr, Dict%5Bstr, str%5D%5D%5D) -%3E 'Projects':%0A projects = Projects()%0A for key, value in data.items():%0A language = Language.load(key, value)%0A projects.languages.append(language )%0A projec @@ -798,32 +798,39 @@ ge)%0A +return projects = Projects( @@ -821,93 +821,1464 @@ ects - = Projects()%0A projects.data = json.loads(project_data)%0A return project +%0A%0A%0Aclass Language():%0A def __init__(self) -%3E None:%0A self.name: str = ''%0A self.projects: List%5BProject%5D = %5B%5D%0A%0A @staticmethod%0A def load(key: str, data: Dict%5Bstr, Dict%5Bstr, str%5D%5D) -%3E 'Language':%0A language = Language()%0A language.name = key%0A for key, value in data.items():%0A project = Project.load(key, value)%0A language.projects.append(project)%0A return language%0A%0A%0Aclass Project():%0A def __init__(self) -%3E None:%0A self.name: str = ''%0A self.description: str = ''%0A self.github: str = ''%0A self.rubygems: str = ''%0A self.pypi: str = ''%0A self.npm: str = ''%0A self.web: str = ''%0A%0A @staticmethod%0A def load(key: str, data: Dict%5Bstr, str%5D) -%3E 'Project':%0A project = Project()%0A project.name = key%0A project.description = data.get('description', '')%0A project.github = data.get('github', '')%0A project.rubygems = data.get('rubygems', '')%0A project.pypi = data.get('pypi', '')%0A project.npm = data.get('npm', '')%0A project.web = data.get('web', '')%0A return project%0A%0A def links(self) -%3E Dict%5Bstr, str%5D:%0A links: Dict%5Bstr, str%5D = %7B%0A 'github': self.github,%0A 'rubygems': self.rubygems,%0A 'pypi': self.pypi,%0A 'npm': self.npm,%0A 'web': self.web,%0A %7D%0A links = dict(%5B(k, v) for k, v in links.items() if v%5D)%0A return link s%0A%0A%0A @@ -2357,24 +2357,34 @@ rojects.load +_from_file ()%0A retur
75a93ae0e55e240a5f8595c0d58d15b1d846948a
Add support for spectate after starting the game
chillin_server/gui/protocol.py
chillin_server/gui/protocol.py
# -*- coding: utf-8 -*- # python imports from threading import Thread, Lock, Event import sys if sys.version_info > (3,): from queue import Queue else: from Queue import Queue # project imports from ..config import Config from .network import Network from .parser import Parser from .messages import Auth class Protocol: def __init__(self, authenticate_func, game_info): self._auth_func = authenticate_func self._game_info = game_info self._network = Network() self._parser = Parser() self._clients = set() self._lock = Lock() self._running = Event() self.send_queue = Queue() def _add_client(self, sock): self._send_msg(sock, self._game_info) self._lock.acquire() self._clients.add(sock) self._lock.release() def _remove_clients(self, socks): for sock in socks: self._network.close(sock) self._lock.acquire() self._clients.difference_update(socks) self._lock.release() def _accept(self): def auth(sock): token = self._network.recv_data(sock) if token and self._auth_func(token): self._send_msg(sock, Auth(authenticated=True)) self._add_client(sock) else: self._send_msg(sock, Auth(authenticated=False)) self._network.close(sock) while self._running.is_set(): sock = self._network.accept() if sock and self._running.is_set(): if Config.config['general']['offline_mode']: self._add_client(sock) else: Thread(target=auth, args=(sock,)).start() def _send_msg(self, sock, msg): data = self._parser.encode(msg) self._network.send_data(sock, data) def _broadcast_msg(self, msg): data = self._parser.encode(msg) disconnected_clients = [] for sock in self._clients: if not self._network.send_data(sock, data): disconnected_clients.append(sock) self._remove_clients(disconnected_clients) def _send_thread(self): while self._running.is_set(): msg = self.send_queue.get() if msg: self._broadcast_msg(msg) def start(self): self._network.start() self._running.set() t = Thread(target=self._accept) t.setDaemon(True) t.start() t = Thread(target=self._send_thread) t.setDaemon(True) t.start() def stop(self): for sock in self._clients: self._network.close(sock) self._running.clear() self.send_queue.put(None) self._network.stop()
Python
0
@@ -462,16 +462,53 @@ me_info%0A + self._all_messages_data = %5B%5D%0A @@ -770,16 +770,295 @@ e_info)%0A + i = 0%0A while i %3C len(self._all_messages_data):%0A if not self._send_data(sock, self._all_messages_data%5Bi%5D):%0A self._network.close(sock)%0A return%0A i += 1%0A%0A # TODO: some messages may not get delivered here ...%0A%0A @@ -1364,47 +1364,355 @@ ef _ -accept(self):%0A%0A def auth(sock):%0A +can_join(self, sock):%0A if len(self._clients) %3E= Config.config%5B'gui'%5D.get('max_spectators', 5):%0A return False%0A return True%0A%0A%0A def _accept(self):%0A%0A def init(sock):%0A authenticated = False%0A if Config.config%5B'general'%5D%5B'offline_mode'%5D:%0A authenticated = True%0A else:%0A @@ -1769,16 +1769,20 @@ + if token @@ -1810,16 +1810,61 @@ token):%0A + authenticated = True%0A @@ -1938,49 +1938,18 @@ -s el -f._add_client(sock)%0A el se:%0A + @@ -2016,32 +2016,36 @@ + self._network.cl @@ -2047,32 +2047,103 @@ rk.close(sock)%0A%0A + if authenticated:%0A self._add_client(sock)%0A%0A%0A while se @@ -2225,143 +2225,202 @@ if -sock and self._running.is_set():%0A if Config.config%5B'general'%5D%5B'offline_mode'%5D:%0A self._add_client +not self._can_join(sock):%0A self._network.close(sock)%0A continue%0A%0A if sock and self._running.is_set():%0A t = Thread(target=init, args= (sock +,) )%0A @@ -2437,74 +2437,141 @@ -else:%0A Thread(target=auth, args=(sock,)).start( +t.setDaemon(True)%0A t.start()%0A%0A%0A def _send_data(self, sock, data):%0A return self._network.send_data(sock, data )%0A%0A%0A @@ -2654,30 +2654,29 @@ -self._network. +return self._ send_dat @@ -2758,32 +2758,77 @@ ser.encode(msg)%0A + self._all_messages_data.append(data)%0A disconne @@ -2901,32 +2901,24 @@ f not self._ -network. send_data(so
880ed36c5014b1d59d51d128cac4b429db82f108
version 0.1.8
pyroomacoustics/version.py
pyroomacoustics/version.py
__version__ = '0.1.7'
Python
0.000001
@@ -16,7 +16,7 @@ 0.1. -7 +8 '%0A
0f598361651d19401d261cc6294bdfc7d6d240bb
Update (in place) when constraint annotations change.
src/puzzle/puzzlepedia/problem_widget.py
src/puzzle/puzzlepedia/problem_widget.py
from typing import ContextManager from ipywidgets import widgets from puzzle.problems import problem from puzzle.puzzlepedia import _bind, _common, annotation_widget, \ debug_data_widget, meta_problem, table_widget from puzzle.puzzlepedia._bind import widget_observable from puzzle.steps import step _MAX_RESULTS = 30 def ProblemWidget(mp: meta_problem.MetaProblem): """Factory for IPython widgets, pretending to be real widget.""" capture = widgets.Output() items = [] options = {} for p in mp: # 'p' is instance of problem.Problem. options[p.kind] = p # Dropdown. dropdown = widgets.Dropdown(options=options) items.append(dropdown) dropdown_source = widget_observable(dropdown) # Interactive information appears between dropdown + solution and the # table of solutions. interactive_information = widgets.VBox([]) # Best solution. best_solution = widgets.Text() items.append(best_solution) def _on_problem_kind_change(p: problem.Problem) -> None: _update_solutions_for_problem(solutions_table, best_solution, p) _update_interactive_information_for_problem( interactive_information, p, capture) dropdown_source.subscribe(_on_problem_kind_change) best_solution_source = widget_observable(best_solution) def _on_best_solution_change(solution: str) -> None: mp.solution = solution best_solution_source.subscribe(_on_best_solution_change) solutions_table = table_widget.TableWidget() if mp.peek(): _update_solutions_for_problem( solutions_table, best_solution, mp.peek()) _update_interactive_information_for_problem( interactive_information, mp.peek(), capture) for p in mp: p.subscribe(_bind.callback_without_event( _update_solutions_for_problem, solutions_table, best_solution, p)) return widgets.VBox( [widgets.HBox(items), interactive_information, solutions_table, capture]) def _update_solutions_for_problem( table: table_widget.TableWidget, best_solution: widgets.Text, p: problem.Problem) -> None: solutions = p.solutions() if solutions.peek(): best_solution.value = solutions.peek() headers = ['score', 'solution', 'notes'] data = [] for i, (solution, score) in enumerate(solutions.items()): if i >= _MAX_RESULTS: break data.append([ round(score, 3), _common.preformat_html(solution), '<br />'.join(p.notes_for(solution)) ]) table.update_data(data, headers=headers) def _update_interactive_information_for_problem( interactive_information: widgets.VBox, p: problem.Problem, capture: ContextManager): accordion_children = [] steps = list(p.steps()) for s in steps: step_tabs_children = [] for group in s.constraints(): child_constraints = [] for key, value, annotation in group: child_constraints.append( annotation_widget.AnnotationWidget( annotation, group, key, value, capture)) step_tabs_children.append(widgets.VBox(child_constraints)) step_tabs = widgets.Tab(step_tabs_children) for i, group in enumerate(s.constraints()): step_tabs.set_title(i, _common.format_label(group.__class__.__name__)) debug_data_container = widgets.VBox([]) debug_data_accordion = widgets.Accordion([debug_data_container]) debug_data_accordion.set_title(0, 'debug data') debug_data_accordion.selected_index = None _update_debug_data_for_problem(debug_data_container, s) p.subscribe(_bind.callback_without_event( _update_debug_data_for_problem, debug_data_container, s)) s.subscribe(_bind.callback_without_event( _update_debug_data_for_problem, debug_data_container, s)) step_tabs = widgets.VBox([step_tabs, debug_data_accordion]) accordion_children.append(step_tabs) accordion = widgets.Accordion(children=accordion_children) for i, s in enumerate(steps): accordion.set_title(i, _common.format_label(str(s))) interactive_information.children = (accordion,) def _update_debug_data_for_problem( debug_data_container: widgets.VBox, s: step.Step ): # TODO: Diff. debug_widget = debug_data_widget.DebugDataWidget(s) debug_data_container.children = (debug_widget,)
Python
0
@@ -60,16 +60,59 @@ idgets%0A%0A +from puzzle.constraints import constraints%0A from puz @@ -185,16 +185,34 @@ _common, + _widget_util, %5C%0A annotat @@ -2841,133 +2841,179 @@ -for key, value, annotation in group:%0A child_constraints.append(%0A annotation_widget.AnnotationWidget(%0A +group_container = widgets.VBox(child_constraints)%0A _update_annotations_for_group(group_container, group, capture)%0A group.subscribe(_bind.callback_without_event(%0A @@ -3018,16 +3018,24 @@ +_update_ annotati @@ -3040,27 +3040,43 @@ tion -, +s_for_ group, -key, value +group_container, group , ca @@ -3119,39 +3119,23 @@ end( -widgets.VBox(child_constraints) +group_container )%0A @@ -4104,24 +4104,406 @@ cordion,)%0A%0A%0A +def _update_annotations_for_group(%0A annotations_container: widgets.VBox,%0A group: constraints.Constraints,%0A capture: ContextManager) -%3E None:%0A children = %5B%5D%0A for key, value, annotation in group:%0A children.append(annotation_widget.AnnotationWidget(%0A annotation, group, key, value, capture))%0A _widget_util.merge_assign_children(annotations_container, children)%0A%0A%0A def _update_
912b1e33eff873a07ca089c69fef51bf05e79051
Add User and Group to admin custom site
ideas/admin.py
ideas/admin.py
from .models import Idea, Outstanding from django.contrib import admin from django.contrib.admin import AdminSite class MyAdminSite(AdminSite): site_header = "Hackatrix Backend" site_title = "Hackatrix Backend" index_title = "Administrator" class IdeaAdmin(admin.ModelAdmin): list_display = ('name', 'votes', 'description', 'register', 'is_active') def save_model(self, request, obj, form, change): if getattr(obj, 'register', None) is None: obj.register = request.user obj.save() class OutstandingAdmin(admin.ModelAdmin): list_display = ('name', 'email', 'comment', 'register') def save_model(self, request, obj, form, change): if getattr(obj, 'register', None) is None: obj.register = request.user obj.save() admin_site = MyAdminSite(name='myadmin') admin_site.register(Idea, IdeaAdmin) admin_site.register(Outstanding, OutstandingAdmin)
Python
0
@@ -107,16 +107,67 @@ minSite%0A +from django.contrib.auth.models import User, Group%0A %0A%0Aclass @@ -890,16 +890,69 @@ admin')%0A +admin_site.register(User)%0Aadmin_site.register(Group)%0A admin_si
c0fc14f3f9f33e20650113803f8a0a81dd49f3ec
generate result.json
example_config.py
example_config.py
import os import logging from apscheduler.triggers.cron import CronTrigger if __name__ == "__main__": raise SystemExit("Not meant to be run directly!") def _rsync_cmd(dest): cmd = ("rsync --delete-delay --recursive --times --stats " "'{output}/' '{dest}'") return cmd.format(dest=dest, output="{output}") # configure the logger logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s') # make sure git does not block giving pw prompts, git 2.3+ only os.environ["GIT_TERMINAL_PROMPT"] = "0" # needs to be a byte like object GITHUB_SECRET = b"changetosomethingrandomlong" RUNNERS = { # unique name of the runner, avoid spaces and other obscure characters "website_master": { # directory where building takes place, will be created if not there # multiple runners may point to the same one "working_directory": "/tmp/test", # upstream url of the repository which contains the website # use https://git::@github.com... to avoid pw prompts and instead fail # (e.g. if github gives errornously 401 temporarily, git would block) # os.environ["GIT_TERMINAL_PROMPT"] = "0" does the same but git 2.3+only "clone_url": "https://git::@github.com/IEEE-SB-Passau/pelican-ieee-passau.git", # branch which will be built "git_branch": "master", # command which installs the generated directory tree to it's final # destination (the wwwroot) e.g. rsync. {output} will be replaced by # the path to the generator output "final_install_command": _rsync_cmd("/tmp/testroot"), # command which builds the website # important: specify {output} as output path of the generator "build_command": 'tox -e pelican --recreate -- -d --output "{output}"', # will be added to env when running build_command "build_env": {"PELICAN_SITEURL": "//apu:800"} } } # define crojobs as sequence of (runner, trigger) pairs, for cron triggers see # http://apscheduler.readthedocs.io/en/latest/modules/triggers/cron.html SCHEDULED_BUILD_JOBS = [ ("website_master", CronTrigger(minute="*/5")) ]
Python
0.998579
@@ -1756,16 +1756,17 @@ mmand%22: +( 'tox -e @@ -1773,16 +1773,82 @@ pelican +--result-json %22%7Boutput%7D/result.json%22 '%0A ' --recrea @@ -1876,16 +1876,17 @@ utput%7D%22' +) ,%0A%0A
917c0c58a0aaa62639d9b98d4aad88b3b8d55d2a
mark as skip
.github/workflows/flexci_dispatcher.py
.github/workflows/flexci_dispatcher.py
#!/usr/bin/env python3 # # FlexCI Dispatcher: Trigger FlexCI based on webhooks. # import argparse import hmac import json import os import re import sys from typing import Any, Dict, Optional, Set import urllib.request import github def _log(msg: str) -> None: sys.stderr.write(msg) sys.stderr.write('\n') sys.stderr.flush() def _forward_to_flexci( event_name: str, payload: Dict[str, Any], secret: str, projects: Set[str], base_url: str) -> bool: """ Submits the GitHub webhook payload to FlexCI. """ payload_enc = json.dumps(payload).encode('utf-8') project_list = ','.join(projects) url = f'{base_url}/x/github_webhook?project={project_list}&rule={event_name}:.%2B&quiet=true' # NOQA _log(f'Request URI: {url}') req = urllib.request.Request( url, data=payload_enc, headers={ 'User-Agent': 'FlexCI-Dispatcher', 'Content-Type': 'application/json', 'X-GitHub-Event': event_name, 'X-Hub-Signature': 'sha1={}'.format( hmac.new(secret.encode(), payload_enc, 'sha1').hexdigest()), 'X-Hub-Signature-256': 'sha256={}'.format( hmac.new(secret.encode(), payload_enc, 'sha256').hexdigest()), }, ) with urllib.request.urlopen(req) as res: response = json.loads(res.read()) if 'job_ids' in response: for job in response['job_ids']: _log(f'Triggered: {base_url}/r/job/{job["id"]}') return True elif 'message' in response: _log(f'Failed to submit webhook payload: {response["message"]}') return False raise RuntimeError(f'unexpected response: {response}') def _fill_commit_status( event_name: str, payload: Dict[str, Any], token: str, projects: Set[str], context_prefix: str, base_url: str) -> None: gh_repo = github.Github(token).get_repo(payload['repository']['full_name']) if event_name == 'push': sha = payload['after'] elif event_name == 'issue_comment': sha = gh_repo.get_pull(payload['issue']['number']).head.sha else: assert False _log(f'Retrieving commit {sha}') gh_commit = gh_repo.get_commit(sha) _log('Setting dashboard url to commit status') gh_commit.create_status( state='success', context=f'{context_prefix} (dashboard)', target_url=f'{base_url}/p/dashboard_by_commit_id?commit_id={sha}', ) if len(projects) == 0: _log('No projects to complement commit status') return _log(f'Checking statuses for commit {sha}') contexts = [s.context for s in gh_commit.get_statuses()] for prj in projects: context = f'{context_prefix}/{prj}' if context in contexts: # Preserve status set via previous (real) CI run. continue _log(f'Setting status as skipped: {context}') gh_commit.create_status( state='success', description='Skipped', context=context) def extract_requested_tags(comment: str) -> Optional[Set[str]]: """ Returns the set of test tags requested in the comment. """ for line in comment.splitlines(): match = re.fullmatch(r'/test ([\w,\- ]+)', line) if match is not None: return set([x.strip() for x in match.group(1).split(',')]) return None def parse_args(argv: Any) -> Any: parser = argparse.ArgumentParser() parser.add_argument( '--event', type=str, required=True, choices=['issue_comment', 'push'], help='The name of the event') parser.add_argument( '--webhook', type=str, required=True, help='Path to the JSON file containing the webhook payload') parser.add_argument( '--projects', type=str, required=True, help='Path to the JSON file containing map from FlexCI project to ' 'list of tags') parser.add_argument( '--flexci-uri', type=str, default='https://ci.preferred.jp', help='Base URI of the FlexCI server (default: %(default)s)') parser.add_argument( '--flexci-context', type=str, default='pfn-public-ci', help='Context prefix of the FlexCI server (default: %(default)s)') parser.add_argument( '--external-tag', action='append', default=[], help='Test tags to be ignored by FlexCI Dispatcher') return parser.parse_args(argv[1:]) def main(argv: Any) -> int: options = parse_args(argv) webhook_secret = str(os.environ['FLEXCI_WEBHOOK_SECRET']) github_token = str(os.environ['GITHUB_TOKEN']) event_name = options.event with open(options.webhook, 'rb') as f: payload = json.load(f) with open(options.projects) as f2: project_tags = json.load(f2) requested_tags = None if event_name == 'push': requested_tags = {'@push'} _log('Requesting tests with @push tag') elif event_name == 'issue_comment': action = payload['action'] if action != 'created': _log(f'Invalid issue_comment action: {action}') return 1 requested_tags = extract_requested_tags(payload['comment']['body']) if requested_tags is None: _log('No test requested in comment.') return 0 if len(requested_tags - set(options.external_tag)) == 0: _log('All tests requested are not for FlexCI') return 0 # Note: this is not for security but to show a friendly message. # FlexCI server also validates the membership of the user triggered. association = payload['comment']['author_association'] if association not in ('OWNER', 'MEMBER'): _log(f'Tests cannot be triggered by {association}') return 1 _log(f'Requesting tests with tags: {requested_tags}') else: _log(f'Invalid event name: {event_name}') return 1 projects_dispatch: Set[str] = set() projects_skip: Set[str] = set() for project, tags in project_tags.items(): dispatch = (len(set(tags) & requested_tags) != 0) if dispatch: projects_dispatch.add(project) else: projects_skip.add(project) _log(f'Project: {"✅" if dispatch else "🚫"} {project} (tags: {tags})') if len(projects_dispatch) == 0: if requested_tags == {'skip'}: _log('Skipping all projects as requested') else: _log('No projects matched with the requested tag') return 1 else: _log(f'Dispatching projects: {projects_dispatch}') success = _forward_to_flexci( event_name, payload, webhook_secret, projects_dispatch, options.flexci_uri) if not success: _log('Failed to dispatch') return 1 _fill_commit_status( event_name, payload, github_token, projects_skip, options.flexci_context, options.flexci_uri) return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
Python
0.999995
@@ -5401,22 +5401,39 @@ re -turn 0 +quested_tags = %7B'skip'%7D %0A%0A
5efe24880fd6b3cd796c309c91569af53ac1386b
version 0.0.24
datary/version.py
datary/version.py
#!/usr/bin/env python __version__ = "0.0.23"
Python
0.000001
@@ -39,7 +39,7 @@ .0.2 -3 +4 %22%0A
1db8627731a2e23693cd9fe38a455956b783c0cd
Update NoticiasTecnologicas.py
03-RSSTelegram/NoticiasTecnologicas.py
03-RSSTelegram/NoticiasTecnologicas.py
#!/usr/bin/env python3 # -*- coding: iso-8859-1 -*- """ Ejemplo: Leer Noticias RSS en Telegram (III) Libreria: pyTelegramBotAPI 1.4.2 [ok] Libreria: pyTelegramBotAPI 2.0 [ok] Python: 3.5.1 """ import telebot import sys import feedparser url = "http://blog.bricogeek.com/noticias/arduino/rss/" rss = feedparser.parse(url) servicio="Servicio del Bot de Telegram" inicio_servicio="Iniciando..."+servicio print (inicio_servicio), TOKEN = 'AQUÍ EL NUMERO DE VUESTRO TOKEN' #Ponemos nuestro TOKEN generado con el @BotFather telegram = telebot.TeleBot(TOKEN) # Combinamos la declaración del Token con la función de la API def listener(messages): for m in messages: chatID = m.chat.id if m.content_type == 'text': for noticia in rss.entries: evento=noticia.title+"\n"+noticia.link telegram.send_message(chatID, evento) try: telegram.get_me() # Comprobar el API. Devuelve un objeto print ("-> OK") print ("Token: "+TOKEN) print ("- Presionar Ctrl+C para parar el servicio...") telegram.set_update_listener(listener) except Exception as e: print ("-> ERROR") print (e) sys.exit(0) telegram.polling(none_stop=False) # Interval setup. Sleep 3 secs between request new message. telegram.polling(interval=3) telegram.polling() try: while True: pass except KeyboardInterrupt: print ("Programa Finalizado...") sys.exit(0)
Python
0
@@ -91,17 +91,16 @@ gram (II -I )%0A%09Libre
f68a10fec5d4dbc743c5d84f8b26d122e81b26e4
Use standard urlencode() for encoding URLs
derpibooru/request.py
derpibooru/request.py
# Copyright (c) 2014, Joshua Stone # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from requests import get, codes from sys import version_info from .image import Image if version_info < (3, 0): from urllib import quote_plus else: from urllib.parse import quote_plus def join_tags(tags): q = quote_plus(",".join(tags)) return q def join_parameters(parameters): p = ["{}={}".format(k, v) for k, v in parameters.items()] return p def url(parameters): url, p = "https://derpiboo.ru/search.json?", {} for key, value in parameters.items(): if key == "key": if value: p["key"] = value elif key == "q": p["q"] = join_tags(value) if value else "*" else: p[key] = value url += "&".join(join_parameters(p)) return url def request(parameters): p = parameters p.update({ "page": 1, "perpage": 50}) request = get(url(p)) while request.status_code == codes.ok: for image in request.json()["search"]: yield Image(image) parameters["page"] += 1 request = get(url(p)) yield None
Python
0.000006
@@ -1465,26 +1465,25 @@ import -quote_plus +urlencode %0Aelse:%0A @@ -1512,261 +1512,45 @@ ort -quote_plus%0A%0A%0Adef join_tags(tags):%0A q = quote_plus(%22,%22.join(tags))%0A%0A return q%0A%0Adef join_parameters(parameters):%0A p = %5B%22%7B%7D=%7B%7D%22.format(k, v) for k, v in parameters.items()%5D%0A%0A return p%0A%0Adef url(parameters):%0A url, p = %22https://derpiboo.ru/search.json?%22, +urlencode%0A%0Adef url(parameters):%0A p = %7B%7D%0A @@ -1692,17 +1692,16 @@ %5D = +%22,%22. join -_tags (val @@ -1766,35 +1766,63 @@ url -+ = %22 -&%22.join(join_parameters +https://derpiboo.ru/search.json?%7B%7D%22.format(urlencode (p))
8188008cf1bd41c1cbe0452ff635dd0319dfecd9
Add trailing slash to url
derrida/books/urls.py
derrida/books/urls.py
from django.conf.urls import url from django.contrib.admin.views.decorators import staff_member_required from derrida.books.views import ( PublisherAutocomplete, LanguageAutocomplete, InstanceDetailView, InstanceListView ) urlpatterns = [ # TODO: come up with cleaner url patterns/names for autocomplete views url(r'^publishers/autocomplete/$', staff_member_required(PublisherAutocomplete.as_view()), name='publisher-autocomplete'), url(r'^languages/autocomplete/$', staff_member_required(LanguageAutocomplete.as_view()), name='language-autocomplete'), url(r'^(?P<pk>\d+)$', InstanceDetailView.as_view(), name='detail'), url(r'^$', InstanceListView.as_view(), name='list'), ]
Python
0.000004
@@ -607,16 +607,17 @@ %3Cpk%3E%5Cd+) +/ $', Inst
60b2d812b71fd6e9ab0b464ee9b3f42a2ef16ed2
fix imports
.config/orcsome/rc.py
.config/orcsome/rc.py
import sys from orcsome import get_wm from orcsome.actions import spawn sys.modules.pop('rsi', None) import rsi TERMINAL = 'urxvtc -title terminal' wm = get_wm() wm.on_key('Shift+Mod+r').restart() wm.on_key('Shift+Mod+c').close() wm.on_key('Shift+Mod+e').spawn('external-monitor') wm.on_key('Ctrl+Alt+x').spawn(TERMINAL) wm.on_key('Mod+e').spawn('menu.sh') wm.on_key('Mod+[').spawn('mpc volume -2') wm.on_key('Mod+]').spawn('mpc volume +2') wm.on_key('Mod+.').spawn('mpc next') wm.on_key('Mod+,').spawn('mpc prev') wm.on_key('Mod+/').spawn('mpc-forward') wm.on_key('Mod+~').spawn('mpc-trash') wm.on_key("Mod+'").spawn('mpc-add-tag -D trash listen -A blade') wm.on_key('Mod+;').spawn('mpc-add-tag -D trash blade -A listen') wm.on_key('Mod+p').spawn('mpc toggle') wm.on_key('Mod+x').spawn('lock.sh') wm.on_key('XF86_MonBrightnessUp').spawn('xbacklight -inc 15') wm.on_key('XF86_MonBrightnessDown').spawn('xbacklight -dec 1') wm.on_key('XF86_PowerOff').spawn('sudo pm-suspend') # wm.on_key('Mod+i').spawn_or_raise('urxvtc -name weechat -e weechat-curses', name='weechat') wm.on_key('Mod+l').spawn_or_raise('urxvtc -g 100x30 -name ranger -e ranger', name='ranger') wm.on_key('Mod+j f').spawn_or_raise('firefox', cls='Firefox') ################################ # Handle quick apps window close restore_focus = {'on_create': lambda desktop, window: wm.on_destroy(wm.event_window).activate_desktop(desktop)} wm.on_key('Ctrl+Alt+p').spawn_or_raise( 'urxvtc -name ncmpcpp -e ncmpcpp', name='ncmpcpp', **restore_focus) wm.on_key('Mod+n').spawn_or_raise( 'urxvtc -title mutt -name mutt -e startmutt.sh', name='mutt', **restore_focus) wm.on_key('Ctrl+Alt+m').spawn_or_raise( 'urxvtc -name alsamixer -e alsamixer', name='alsamixer', **restore_focus) wm.on_key('Mod+k').spawn_or_raise( 'urxvtc -name rtorrent -e transmission-remote-cli', name='rtorrent', **restore_focus) @wm.on_key('Ctrl+Mod+space') def maximize_window(): w = wm.current_window if w.maximized_vert and w.maximized_horz: wm.set_window_state(w, vmax=False, hmax=False, decorate=True, otaskbar=False) else: wm.set_window_state(w, vmax=True, hmax=True, decorate=False, otaskbar=True) ########################## # Terminal desktop control @wm.on_key('Ctrl+Alt+c') def toggle_console(): cd = wm.current_desktop if cd == 1: wm.activate_desktop(0) else: clients = wm.find_clients(wm.get_clients(), cls="URxvt") if clients: wm.activate_desktop(1) else: spawn(TERMINAL) @wm.on_manage(cls='URxvt') def bind_urxvt_keys(): wm.on_key(wm.event_window, 'Shift+Right').focus_next() wm.on_key(wm.event_window, 'Shift+Left').focus_prev() def app_rules(w): desktop = 0 decorate = None maximize = None otaskbar = None if w.matches(name='vial'): maximize = True decorate = False elif w.matches(name='ranger'): otaskbar = False elif w.matches(name='Navigator', cls='Firefox'): decorate = False elif w.matches(cls='URxvt'): desktop = 1 decorate = False maximize = True elif w.matches(name='pinentry', cls='Pinentry'): desktop = -1 elif w.matches(cls='bmpanel'): return wm.change_window_desktop(w, desktop) if decorate is not None or maximize is not None or otaskbar is not None: wm.set_window_state(w, vmax=maximize, hmax=maximize, decorate=decorate, otaskbar=otaskbar) cd = wm.current_desktop if desktop >=0 and desktop != cd: wm.activate_desktop(desktop) ######################### # Apply application rules @wm.on_create def on_create(): w = wm.event_window if w.desktop is None: @wm.on_property_change(w, '_NET_WM_DESKTOP') def property_was_set(): property_was_set.remove() app_rules(wm.event_window) else: app_rules(w) @wm.on_timer(120) def reset_dpms_for_fullscreen_windows(): w = wm.current_window if w and w.fullscreen: wm.reset_dpms() ########################## # Start RSI prevent module @wm.on_init def init(): r = rsi.init(wm) wm.on_key('Mod+b').do(r.start_rest)
Python
0.000015
@@ -34,42 +34,8 @@ t_wm -%0Afrom orcsome.actions import spawn %0A%0Asy @@ -190,16 +190,23 @@ ').close +_window ()%0Awm.on @@ -2495,16 +2495,19 @@ +wm. spawn(TE
66568b681307835aa36da291581eea7e29d22d54
Fix BUG in backfill
backfill.py
backfill.py
import titanic import requests import json import time ''' Status new updated building running done ''' server = 'http://0.0.0.0:8314/' auth = None # auth = ('<username>@mozilla.com', '<password>') def updateJob(jobID, branch, buildername, revision, delta=7): revList, buildList = titanic.runAnalysis( branch, buildername, revision, delta) print revList print buildList if not (revList or buildList): return 401 buildRevs = ','.join(buildList) revs = ','.join(revList) data = {'id': jobID, 'buildrevs': buildRevs, 'analyzerevs': revs} headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} r = requests.post(server + 'update', data=json.dumps(data), headers=headers) print r.status_code return r.status_code def updateStatus(jobID, status): data = {'id': jobID, 'status': status} headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} r = requests.post(server + 'update_status', data=json.dumps(data), headers=headers) def processJob(job): if job['status'] == 'error': return if job['status'] == 'new': print 'New Job...' updateJob(job['id'], job['branch'], job['buildername'], job['revision']) updateStatus(job['id'], 'updated') print 'Updated Job...' if job['status'] == 'updated': if not (job['buildrevs'] == ''): buildList = job['buildrevs'].split(',') for rev in buildList: print rev titanic.triggerBuild(job['branch'], job['buildername'], rev, auth) updateStatus(job['id'], 'building') print 'Building for Job...' if job['status'] == 'building': print 'Builds are triggered!' buildFlag = 1 revList = job['analyzerevs'].split(',') for rev in revList: if (titanic.isBuildPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isBuildRunning(job['branch'], job['buildername'], rev, auth)): buildFlag = 0 continue elif not titanic.isBuildSuccessful(job['branch'], job['buildername'], rev, auth): print 'Error: For ' + rev + ' ' + job['buildername'] updateStatus(job['id'], 'error') buildFlag = 0 continue if buildFlag: print 'Builds are done!' for rev in revList: titanic.triggerJob(job['branch'], job['buildername'], rev, auth) if not (titanic.isJobPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isJobRunning(job['branch'], job['buildername'], rev, auth)): updateStatus(job['id'], 'error') updateStatus(job['id'], 'running') print 'Running Jobs...' if job['status'] == 'running': doneFlag = 1 revList = job['analyzerevs'].split(',') for rev in revList: if (titanic.isJobPending(job['branch'], job['buildername'], rev, auth) \ or titanic.isJobRunning(job['branch'], job['buildername'], rev, auth)): doneFlag = 0 if doneFlag: updateStatus(job['id'], 'done') print 'Done' def processCron(): jobsJSON = requests.get(server + 'active_jobs') jobs = json.loads(jobsJSON.text) for job in jobs['jobs']: processJob(job) # Schedule backfill.py to run every few minutes! if __name__ == '__main__': processCron()
Python
0.000073
@@ -2175,22 +2175,16 @@ e'%5D, rev -, auth ):%0A
c4ea3ce306d4464ac0bc80286a60689972c7bc63
Test isolation.
agon/tests.py
agon/tests.py
from threading import Thread from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.test import TestCase from django.contrib.auth.models import User from agon.models import award_points, points_awarded class PointsTestCase(TestCase): def setUp(self): self.users = [ User.objects.create_user("user_%d" % i, "user_%d@example.com" % i, str(i)) for i in xrange(1) ] def setup_points(self, value): settings.AGON_POINT_VALUES = value def test_improperly_configured(self): user = self.users[0] try: award_points(user, "JOINED_SITE") except ImproperlyConfigured, e: self.assertEqual(str(e), "You must define 'AGON_POINT_VALUES' in settings") self.setup_points({}) try: award_points(user, "JOINED_SITE") except ImproperlyConfigured, e: self.assertEqual(str(e), "You must define a point value for 'JOINED_SITE'") def test_simple_user_point_award(self): self.setup_points({ "JOINED_SITE": 1, }) user = self.users[0] award_points(user, "JOINED_SITE") self.assertEqual(points_awarded(user), 1) def test_concurrent_award(self): user = self.users[0] return def run(): award_points(user, "TEST_1") threads = [] for i in xrange(5): t = Thread(target=run) threads.append(t) t.start() for t in threads: t.join() self.assertEqual(points_awarded(user), 50)
Python
0
@@ -452,24 +452,147 @@ %5D%0A %0A + def tearDown(self):%0A if hasattr(settings, %22AGON_POINT_VALUES%22):%0A del settings.AGON_POINT_VALUES%0A %0A def setu @@ -1435,32 +1435,97 @@ = self.users%5B0%5D%0A + self.setup_points(%7B%0A %22TEST_1%22: 10,%0A %7D)%0A return%0A
4d4b497dcc6afeffcc9455ac6013a35c824bc876
Update docstring for new host argument
depot/io/awss3.py
depot/io/awss3.py
""" Provides FileStorage implementation for MongoDB GridFS. This is useful for storing files inside a mongodb database. """ from __future__ import absolute_import from datetime import datetime import uuid from boto.s3.connection import S3Connection from depot._compat import unicode_text from .interfaces import FileStorage, StoredFile from . import utils class S3StoredFile(StoredFile): def __init__(self, file_id, key): _check_file_id(file_id) self._key = key metadata_info = {'filename': key.get_metadata('x-depot-filename'), 'content_type': key.content_type, 'content_length': key.size, 'last_modified': None} try: last_modified = key.get_metadata('x-depot-modified') if last_modified: metadata_info['last_modified'] = datetime.strptime(last_modified, '%Y-%m-%d %H:%M:%S') except: pass super(S3StoredFile, self).__init__(file_id=file_id, **metadata_info) def read(self, n=-1): if self.closed: raise ValueError("cannot read from a closed file") return self._key.read(n) def close(self): self._key.close() @property def closed(self): return self._key.closed @property def public_url(self): return self._key.generate_url(expires_in=0, query_auth=False) class S3Storage(FileStorage): """:class:`depot.io.interfaces.FileStorage` implementation that stores files on S3. All the files are stored inside a bucket named ``bucket`` to which Depot connects to using ``access_key_id`` and ``secret_access_key``. """ def __init__(self, access_key_id, secret_access_key, bucket=None, host=None): if bucket is None: bucket = 'filedepot-%s' % (access_key_id.lower(),) kw = {} if host is not None: kw['host'] = host self._conn = S3Connection(access_key_id, secret_access_key, **kw) self._bucket = self._conn.lookup(bucket) if self._bucket is None: self._bucket = self._conn.create_bucket(bucket) def get(self, file_or_id): fileid = self.fileid(file_or_id) _check_file_id(fileid) key = self._bucket.get_key(fileid) if key is None: raise IOError('File %s not existing' % fileid) return S3StoredFile(fileid, key) def __save_file(self, key, content, filename, content_type=None): key.set_metadata('content-type', content_type) key.set_metadata('x-depot-filename', filename) key.set_metadata('x-depot-modified', utils.timestamp()) key.set_metadata('Content-Disposition', 'inline; filename="%s"' % filename) if hasattr(content, 'read'): can_seek_and_tell = True try: pos = content.tell() content.seek(pos) except: can_seek_and_tell = False if can_seek_and_tell: key.set_contents_from_file(content, policy='public-read') else: key.set_contents_from_string(content.read(), policy='public-read') else: if isinstance(content, unicode_text): raise TypeError('Only bytes can be stored, not unicode') key.set_contents_from_string(content, policy='public-read') def create(self, content, filename=None, content_type=None): content, filename, content_type = self.fileinfo(content, filename, content_type) new_file_id = str(uuid.uuid1()) key = self._bucket.new_key(new_file_id) self.__save_file(key, content, filename, content_type) return new_file_id def replace(self, file_or_id, content, filename=None, content_type=None): fileid = self.fileid(file_or_id) _check_file_id(fileid) content, filename, content_type = self.fileinfo(content, filename, content_type) if filename is None: f = self.get(fileid) filename = f.filename content_type = f.content_type key = self._bucket.get_key(fileid) self.__save_file(key, content, filename, content_type) return fileid def delete(self, file_or_id): fileid = self.fileid(file_or_id) _check_file_id(fileid) k = self._bucket.get_key(fileid) if k: k.delete() def exists(self, file_or_id): fileid = self.fileid(file_or_id) _check_file_id(fileid) k = self._bucket.get_key(fileid) return k is not None def _check_file_id(file_id): # Check that the given file id is valid, this also # prevents unsafe paths. try: uuid.UUID('{%s}' % file_id) except: raise ValueError('Invalid file id %s' % file_id)
Python
0
@@ -1666,18 +1666,27 @@ ucket%60%60 -t o +n %60%60host%60%60 which D @@ -1689,16 +1689,20 @@ ch Depot +%0A connect @@ -1702,20 +1702,16 @@ connects -%0A to usin @@ -1756,16 +1756,78 @@ s_key%60%60. + If %60%60host%60%60 is%0A omitted the Amazon AWS S3 storage is used. %0A%0A %22%22
ac084c574b58771bd240af3fa4b4a000fc742229
update to handle different kinds of files
projects/allan_cont/showlog_long.py
projects/allan_cont/showlog_long.py
import numpy as np import pylab as pl from ourgui import openFile def plotline(maxx, minx=0, value=0, style="k-", plotfunc=pl.plot): plotfunc([minx, maxx], [value, value], style) def quickplot(filename): data = np.loadtxt(filename, comments="#") maxdata, mindata, stddata, meandata = np.max(data), np.min(data), np.std(data), np.mean(data) n = len(data) pl.subplot(211) pl.plot(data,'k.') plotline(n, value=maxdata, style="g-") plotline(n, value=mindata, style="r-") plotline(n, value=meandata, style="k-") plotline(n, value=(meandata+stddata), style="b-") plotline(n, value=(meandata-stddata), style="b-") pl.xlabel('data points') pl.ylabel('Frequency (Hz)') pl.title("Frequency: %f (+- %f) Hz" %(meandata, stddata)) pl.subplot(212) n, bins, patches = pl.hist(data-meandata, 100, normed=1, facecolor='green', alpha=0.75) pl.xlabel('Frequency deviation from mean (Hz)') pl.ylabel('distribution') pl.show() filename = openFile("log") if filename: quickplot(filename)
Python
0
@@ -215,16 +215,19 @@ ):%0D%0A +all data = n @@ -258,17 +258,199 @@ ents=%22#%22 -) +, delimiter=%22,%22)%0D%0A datashape = np.shape(alldata)%0D%0A try:%0D%0A col = np.shape(alldata)%5B1%5D%0D%0A data = alldata%5B:, col-1%5D%0D%0A except (IndexError):%0D%0A data = alldata%0D%0A %0D%0A ma
5dd73ca95ff06a7b53309def5fea3a4ec9692b3f
添加 company 邮箱验证测试数据
core/tests/test_core.py
core/tests/test_core.py
# -*- coding: utf-8 -*- from odoo.tests.common import TransactionCase from psycopg2 import IntegrityError from odoo.exceptions import UserError class TestCore(TransactionCase): def test_partner(self): ''' 测试删除已有客户的分类报错 ''' return # 测试已通过,但会在log里报ERROR,所以暂时去掉 with self.assertRaises(IntegrityError): self.env.ref('core.customer_category_1').unlink() def test_partner_name_search(self): """ partner在many2one字段中支持按编号搜索 """ partner = self.env.ref('core.jd') # 使用 name 来搜索京东 result = self.env['partner'].name_search(u'京东') real_result = [(partner.id, partner.name)] self.assertEqual(result, real_result) # 使用 code 来搜索京东 res = self.env['partner'].name_search('jd') self.assertEqual(res, real_result) def test_partner_write(self): ''' 测试 业务伙伴应收/应付余额不为0时,不允许取消对应的客户/供应商身份 ''' partner = self.env.ref('core.jd') partner.receivable = 100 with self.assertRaises(UserError): partner.c_category_id = False partner = self.env.ref('core.lenovo') partner.payable = 100 with self.assertRaises(UserError): partner.s_category_id = False def test_res_currency(self): """测试阿拉伯数字转换成中文大写数字的方法""" self.env['res.currency'].rmb_upper(10000100.3) # 测试输入value为负时的货币大写问题 self.assertTrue( self.env['res.currency'].rmb_upper(-10000100.3) == u'负壹仟万零壹佰元叁角整') class TestResUsers(TransactionCase): def test_write(self): '''修改管理员权限''' user_demo = self.env.ref('base.user_demo') user_demo.groups_id = [(4, self.env.ref('base.group_erp_manager').id)] user_admin = self.env.ref('base.user_root') env2 = self.env(self.env.cr, user_demo.id, self.env.context) with self.assertRaises(UserError): user_admin.with_env(env2).name = 'adsf' # with self.assertRaises(UserError): user_admin.groups_id = [(3, self.env.ref('base.group_erp_manager').id)] class TestBusinessData(TransactionCase): def test_business_data_table(self): ''' 选择model填充table名''' business_data_table = self.env['business.data.table'] business_data_table_row = business_data_table.create( {'name': 'home.report.type'}) business_data_table_row.onchange_model() def test_clean_business_data(self): ''' 测试清空业务数据 表存在''' business_data_table = self.env['business.data.table'] clean_business_data = self.env['clean.business.data'] business_data_table.create({'name': 'home.report.type'}) clean_business_data.create({'create_uid': self.env.uid}).remove_data() def test_clean_business_data_no_table(self): ''' 测试清空业务数据 表不存在会报错''' business_data_table = self.env['business.data.table'] clean_business_data = self.env['clean.business.data'] business_data_table.create({'name': 'ABCD'}) with self.assertRaises(UserError): clean_business_data.create( {'create_uid': self.env.uid}).remove_data() class TestResCompany(TransactionCase): def test_get_logo(self): ''' 取默认logo ''' self.env['res.company'].create({ 'name': 'demo company', 'partner_id': self.env.ref('core.zt').id })
Python
0
@@ -136,16 +136,32 @@ serError +,ValidationError %0A%0A%0Aclass @@ -3377,8 +3377,395 @@ %7D)%0A +%0A def test_check_email(self):%0A ''' test check email '''%0A company = self.env%5B'res.company'%5D.create(%7B%0A 'name': 'demo company',%0A 'partner_id': self.env.ref('core.zt').id%0A %7D)%0A # %E9%82%AE%E7%AE%B1%E6%A0%BC%E5%BC%8F%E6%AD%A3%E7%A1%AE%0A company.email = 'gooderp@osbzr.com'%0A%0A # %E9%82%AE%E7%AE%B1%E6%A0%BC%E5%BC%8F%E4%B8%8D%E6%AD%A3%E7%A1%AE%EF%BC%8C%E6%8A%A5%E9%94%99%0A with self.assertRaises(ValidationError):%0A company.email = 'gooderp'%0A
47854856322cc16bbfeb96a581208d41e6c4466e
return from dumps
mifkit/mif.py
mifkit/mif.py
import json import objects from mifkit.util.mif_encoder import MifEncoder from mifkit.util.case import keys_to_snake_case from mifkit.util.case import to_capitalized_camel_case def dump(mif_object, fp, **kwargs): """ Convert this object into a JSON-encoded string and save it in a file. :param mif_object: Object to serialize. :type mif_object: Single MifObject-type object or list of MifObject-type objects. :param fp: Object to write the serialization to. :type fp: File-like object supporting .write() method. :param kwargs: Any options available to json.dump(). """ json.dump(mif_object, fp, cls=MifEncoder, **kwargs) def dumps(mif_object, **kwargs): """ Convert this object into a JSON-encoded string. :param mif_object: Object to serialize. :type mif_object: Single MifObject-type object or list of MifObject-type objects. :param kwargs: Any options available to json.dumps(). """ json.dumps(mif_object, cls=MifEncoder, **kwargs) def load(fp, **kwargs): """ Convert content in a JSON-encoded string to a Mif object. :param fp: Object to deserialize from. :type fp: File-like object supporting .read() method. :param kwargs: Any options available to json.load(). :return: Single MifObject-type object or list of MifObject-type objects. """ return _to_mif_object(json.load(fp, **kwargs)) def loads(s, **kwargs): """ Convert content in a JSON-encoded string to a Mif object. :param s: String to deserialize from. :type s: String. :param kwargs: Any options available to json.loads(). :return: Single MifObject-type object or list of MifObject-type objects. """ return _to_mif_object(json.loads(s, **kwargs)) def from_dict(obj): """ Convert content in a list or dictionary to :param obj: Python object to convert to MifObject type. :type obj: List or dictionary. :return: Single MifObject-type object or list of MifObject-type objects. """ return _to_mif_object(obj) def _to_mif_object(obj): """ Convert a dictionary or list of a single or multiple MifObject objects. :param obj: Object to convert. :type obj: Dictionary or list. :return: A single MifObject object or a list of MifObject objects. """ if isinstance(obj, list): return [_dict_to_mif_object(i) for i in obj] elif isinstance(obj, dict): return [_dict_to_mif_object(obj)] else: raise ValueError('expecting list or dictionary as outermost structure') def _dict_to_mif_object(obj): """ Convert a dictionary to a MifObject object based on its name. :param obj: Object to convert to a MifObject object. :type obj: Dictionary. :return: MifObject with the content of obj. """ if len(obj) != 1: raise ValueError('Top-level mif object must contain exactly one key') key = obj.keys()[0] value = obj[key] if not isinstance(value, dict): raise ValueError(key + ' must have a value that is a dictionary') return getattr(objects, to_capitalized_camel_case(key))(**keys_to_snake_case(value)) class Mif(object): """ Legacy class. Don't use this. It's only here to prevent old scripts from breaking. """ def __init__(self, sample=None): """ Constructor. :param sample: Samples to sample. :type sample: Sample object or list of Sample objects. """ super(Mif, self).__init__() self.sample = sample def to_json(self, indent=None): """ Convert this object into a JSON-encoded string. :param indent: Indent to apply to the json string. :returns: JSON-encoded string with the content of this object. """ return json.dumps(self.sample) if indent is None else json.dumps(self.sample, indent=indent)
Python
0
@@ -596,32 +596,39 @@ ().%0A %22%22%22%0A +return json.dump(mif_ob @@ -960,16 +960,23 @@ %22%22%22%0A +return json.dum
c206936120519912762f30eb269f1733b5593bf8
fix window edges
contrib/spryte/balls.py
contrib/spryte/balls.py
import random from pyglet import window, clock, gl, event from pyglet.window import key import spryte win = window.Window(vsync=False) fps = clock.ClockDisplay(color=(1, 1, 1, 1)) layer = spryte.Layer() balls = [] for i in range(200): balls.append(spryte.Sprite('ball.png', layer, win.width * random.random(), win.height * random.random(), dx=-50 + 100*random.random(), dy=-50 + 100*random.random(), dead=False)) def animate(dt): for ball in balls: ball.x += ball.dx * dt ball.y += ball.dy * dt if ball.x > win.width or ball.x < 0: ball.dx *= -1 if ball.y > win.height or ball.y < 0: ball.dy *= -1 clock.schedule(animate) layer2 = spryte.Layer() car = spryte.Sprite('car.png', layer2, win.width/2, win.height/2) keyboard = key.KeyStateHandler() win.push_handlers(keyboard) def animate(dt): car.x += (keyboard[key.RIGHT] - keyboard[key.LEFT]) * 200 * dt car.y += (keyboard[key.UP] - keyboard[key.DOWN]) * 200 * dt for i, ball in enumerate(balls): if ball.intersects(car): if ball.width > ball.image.width * 2: # pop! balls[i].delete() balls[i] = spryte.Sprite('ball.png', layer, win.width * random.random(), win.height * random.random(), dx=-50 + 100*random.random(), dy=-50 + 100*random.random()) else: ball.width += 1 ball.height += 1 clock.schedule(animate) while not win.has_exit: clock.tick() win.dispatch_events() win.clear() gl.glPushAttrib(gl.GL_ENABLE_BIT) gl.glEnable(gl.GL_BLEND) gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA) layer.draw() layer2.draw() gl.glPopAttrib() fps.draw() win.flip()
Python
0.000001
@@ -283,32 +283,33 @@ layer,%0A +( win.width * rand @@ -293,32 +293,38 @@ (win.width + - 64) * random.random @@ -319,32 +319,33 @@ andom.random(), +( win.height * ran @@ -330,32 +330,38 @@ m(), (win.height + - 64) * random.random @@ -575,16 +575,29 @@ ball.x ++ ball.width %3E win.wi @@ -647,16 +647,30 @@ ball.y ++ ball.height %3E win.he
b77cb1ac7524e76fd1f29ee6c8e214d12d04226f
Improve variable names.
scripts/gen_regex.py
scripts/gen_regex.py
import unicodedata from ftfy import chardata import pathlib from pkg_resources import resource_filename CATEGORIES = [unicodedata.category(chr(i)) for i in range(0x110000)] DATA_PATH = pathlib.Path(resource_filename('wordfreq', 'data')) def func_to_regex(func): """ Given a function that returns True or False for a numerical codepoint, return a regex character class accepting the characters resulting in True. Ranges separated only by unassigned characters are merged for efficiency. """ # Where the last range would end if it also included unassigned codepoints. # If we need to add a codepoint right after this point, we extend the # range; otherwise we start a new one. tentative_end = None ranges = [] for i, cat in enumerate(CATEGORIES): if func(i): if tentative_end == i - 1: ranges[-1][1] = i else: ranges.append([i, i]) tentative_end = i elif cat == 'Cn' and tentative_end == i - 1: tentative_end = i return '[%s]' % ''.join(chr(r[0]) + '-' + chr(r[1]) for r in ranges) def cache_regex_from_func(filename, func): """ Generates a regex from a function that accepts a single unicode character, and caches it in the data path at filename. """ with (DATA_PATH / filename).open(mode='w') as file: file.write(func_to_regex(func)) def _is_emoji_codepoint(i): """ Report whether a numerical codepoint is (likely) an emoji: a Unicode 'So' character (as future-proofed by the ftfy chardata module) but excluding symbols like © and ™ below U+2600 and the replacement character U+FFFD. """ return chardata.CHAR_CLASS_STRING[i] == '3' and i >= 0x2600 and i != 0xfffd def _is_non_punct_codepoint(i): """ Report whether a numerical codepoint is not one of the following classes: - P: punctuation - S: symbols - Z: separators - C: control characters This will classify symbols, including emoji, as punctuation; users that want to accept emoji should add them separately. """ return CATEGORIES[i][0] not in 'PSZC' def _is_combining_mark_codepoint(i): """ Report whether a numerical codepoint is a combining mark (Unicode 'M'). """ return CATEGORIES[i][0] == 'M' if __name__ == '__main__': cache_regex_from_func('emoji.txt', _is_emoji_codepoint) cache_regex_from_func('non_punct.txt', _is_non_punct_codepoint) cache_regex_from_func('combining_mark.txt', _is_combining_mark_codepoint)
Python
0.999999
@@ -248,24 +248,31 @@ nc_to_regex( +accept_ func):%0A %22 @@ -765,14 +765,27 @@ for -i, cat +codepoint, category in @@ -822,14 +822,29 @@ if -func(i +accept_func(codepoint ):%0A @@ -866,33 +866,41 @@ entative_end == -i +codepoint - 1:%0A @@ -921,17 +921,25 @@ 1%5D%5B1%5D = -i +codepoint %0A @@ -984,12 +984,28 @@ nd(%5B -i, i +codepoint, codepoint %5D)%0A @@ -1031,17 +1031,25 @@ e_end = -i +codepoint %0A @@ -1057,16 +1057,21 @@ elif cat +egory == 'Cn' @@ -1092,17 +1092,25 @@ _end == -i +codepoint - 1:%0A @@ -1135,17 +1135,25 @@ e_end = -i +codepoint %0A%0A re
0d44fd6ae6f8a7d51cbb6933c9fddda1b2f61f8b
Remove debug statement
extract_scales.py
extract_scales.py
#!/usr/bin/env python import json from lxml import etree ZOOM = [ 559082264, 279541132, 139770566, 69885283, 34942642, 17471321, 8735660, 4367830, 2183915, 1091958, 545979, 272989, 136495, 68247, 34124, 17062, 8531, 4265, 2133, 1066, 533 ] DEFAULT_ZOOM_LEVELS = range(10, 21) class Styles(list): def __init__(self): super(Styles, self).__init__() def __getitem__(self, index): if isinstance(index, str) or isinstance(index, unicode): return self.find_by_name(index) else: return super(Styles, self).__getitem__(index) def find_by_name(self, name): res = [s for s in self if s['name'] == name] if not res: raise KeyError('No style with name: "{}"'.format(name)) return res[0] def find_by_scale(self, scale_min, scale_max): new_styles = Styles() for style in self: flag = False for rule in style['rules']: flag_min = False flag_max = False rmin = int(rule.get('MinScaleDenominator', 0)) rmax = int(rule.get('MaxScaleDenominator', ZOOM[0])) if rmin <= scale_min: flag_min = True if rmax >= scale_max: flag_max = True if flag_min and flag_max: flag = True if flag: new_styles.append(style) break if flag: continue return new_styles def with_scale_leq_than(self, scale_max): return self.find_by_scale(scale_min=0, scale_max=scale_max) def with_scale_geq_than(self, scale_min): return self.find_by_scale(scale_min=scale_min, scale_max=ZOOM[0]) @staticmethod def zoom_limits(zoom): if zoom < -1 or zoom > 20: raise IndexError('Zoom level should be an integer z with' ' 0 <= z <= 20' ) zmax = ZOOM[zoom] if zoom == 20: zmin = 0 else: zmin = ZOOM[zoom+1] return (zmin, zmax) def visible_at_zoom_level(self, zoom): zmin, zmax = self.zoom_limits(zoom) return self.find_by_scale(zmin, zmax) class Layers(list): def __init__(self): super(Layers, self).__init__() def __getitem__(self, index): if isinstance(index, str) or isinstance(index, unicode): return self.find_by_name(index) else: return super(Layers, self).__getitem__(index) def find_by_name(self, name): res = [l for l in self if l['name'] == name] if not res: raise KeyError('No layer with name: "{}"'.format(name)) return res[0] def with_style(self, style): new_layers = Layers() for l in self: if isinstance(style, str) or isinstance(style, unicode): if style in l['styles']: new_layers.append(l) elif isinstance(style, list) or isinstance(style, tuple): set_styles = set(style) if set_styles.intersection(l['styles']): new_layers.append(l) return new_layers def make_range(interval_str): ranges = (x.split("-") for x in interval_str.split(",")) interval = [i for r in ranges for i in range(int(r[0]), int(r[-1]) + 1)] return [i for i in interval if i in DEFAULT_ZOOM_LEVELS] if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument('-x', '--xml', dest='input_xml', default='mapnik.xml', help='Mapnik xml file [default: mapnik.xml]' ) parser.add_argument('-z', '--zoom-levels', dest='zoom_levels', default=DEFAULT_ZOOM_LEVELS, help='Zoom levels to be processed [default: 10-20]' ) parser.add_argument('-o', '--outfile-prefix', dest='prefix', default='zoom_', help='Prefix for the output files [default: zoom_]' ) args = parser.parse_args() zoom_levels = [] if args.zoom_levels: zoom_levels = make_range(args.zoom_levels) with open(args.input_xml, 'r') as infile: text = infile.read() root = etree.fromstring(text) styles = Styles() for style in root.iterfind('Style'): el_style = {} el_style.update(style.items()) el_style['rules'] = [] for rule in style.iterchildren(): if rule.tag != 'Rule': import pdb pdb.set_trace() el_rule = {} ch = None for ch in rule.iterchildren(): if ch.tag in ['MinScaleDenominator', 'MaxScaleDenominator']: el_rule[ch.tag] = ch.text el_style['rules'].append(el_rule) styles.append(el_style) layers = Layers() for layer in root.iterfind('Layer'): el_layer = {} el_layer.update(layer.items()) el_layer['datasources'] = [] el_layer['styles'] = [] for child in layer.iterchildren(): if child.tag == 'StyleName': el_layer['styles'].append(child.text) elif child.tag == 'Datasource': for parameter in child.iterchildren(): el_parameter = {} el_parameter.update(parameter.items()) el_parameter['parameter'] = parameter.text el_layer['datasources'].append(el_parameter) layers.append(el_layer) for zlev in zoom_levels: styles_zoom = styles.visible_at_zoom_level(zlev) layers_zoom = layers.with_style([s['name'] for s in styles_zoom]) filename = args.prefix + str(zlev) + '.json' with open(filename, 'w+') as outfile: json.dump(layers_zoom, outfile) import pdb pdb.set_trace()
Python
0.000021
@@ -6227,40 +6227,4 @@ le)%0A -%0A import pdb%0A pdb.set_trace()%0A
b9c7ea4613773d7414acca2e066b174447b0d75a
Fix conf_name handling in fabd.conf
fabdeploy/fabd.py
fabdeploy/fabd.py
import os import shutil import logging from fabric.api import env, run, sudo, puts, abort from . import users, ssh from .containers import conf as conf_dec from .task import Task __all__ = [ 'mkdirs', 'remove_src', 'debug', 'conf', 'default_conf', 'create_user', 'create_configs', ] logger = logging.getLogger('fabdeploy.fabd') class Mkdirs(Task): """ Create all known remote dirs. We treat config variables ending with ``_path`` postfix as dir. """ def do(self): home_dirs, sudo_dirs = [], [] for k, v in self.conf.items(): if k.endswith('_path'): if v.startswith(self.conf.home_path): home_dirs.append(v) else: sudo_dirs.append(v) run('mkdir --parents %s' % ' '.join(home_dirs)) sudo('mkdir --parents %s' % ' '.join(sudo_dirs)) mkdirs = Mkdirs() class RemoveSrc(Task): """ Remove ``src_path`` dir. This is usefull when you want to perform clean deploy. See also ``virtualenv.remove``. """ def do(self): sudo('rm --recursive --force %(src_path)s' % self.conf) remove_src = RemoveSrc() class Debug(Task): """Print config variable.""" def do(self): if 'var' in self.conf: puts(self.conf[self.conf.var]) else: out = '\n' for k, v in self.conf.items(): out += '%s = %s\n' % (k, v) puts(out) def run(self, var=None, **kwargs): if var is not None: kwargs.setdefault('var', var) super(Debug, self).run(**kwargs) debug = Debug() class Conf(Task): def _conf_name(self, name): return ''.join([p[:1].upper() + p[1:] for p in name.split('_')]) + 'Conf' def get_conf(self): try: import fabconf as config except ImportError: abort('Can not import fabconf.py.') name = self._conf_name(self.conf.name) conf = getattr(config, name)(name='fabd.conf') if self.conf.name == 'default': conf.set_globally('conf_name', self.conf.name) return conf def create_conf(self): conf = self.get_conf() for k, v in self.task_kwargs.items(): conf[k] = v return conf def do(self): env.conf = self.create_conf() env.hosts = [env.conf.address] def run(self, name, **kwargs): kwargs.setdefault('name', name) return super(Conf, self).run(**kwargs) conf = Conf() class DefaultConf(Conf): def get_conf(self): from .containers import DefaultConf return DefaultConf(name='default') def run(self, **kwargs): return super(Conf, self).run(**kwargs) default_conf = DefaultConf() class CreateUser(Task): @conf_dec def fabd_user(self): return 'fabdeploy' def do(self): users.create.run(user=self.conf.fabd_user) ssh.push_key.run( user=self.conf.fabd_user, pub_key_file='~/.ssh/id_rsa.pub') users.grant_sudo.run(user=self.conf.fabd_user) create_user = CreateUser() class CreateConfigs(Task): """Creates config_templates directory with all available configs.""" @conf_dec def configs_src(self): return os.path.join( os.path.dirname(__file__), 'config_templates') @conf_dec def configs_target(self): return os.path.join(os.getcwd(), 'config_templates') def do(self): for (dirpath, dirnames, filenames) in os.walk(self.conf.configs_src): for filename in filenames: src_filepath = os.path.join(dirpath, filename) name = src_filepath.replace(self.conf.configs_src + '/', '') target_filepath = os.path.join( self.conf.configs_target, name) if os.path.exists(target_filepath): continue puts('Copying %s...' % filename) try: os.makedirs(os.path.dirname(target_filepath)) except OSError, exc: logger.debug('CreateConfigs: %s' % exc) shutil.copyfile(src_filepath, target_filepath) create_configs = CreateConfigs()
Python
0.00028
@@ -2041,16 +2041,17 @@ .conf')%0A +%0A @@ -2059,24 +2059,29 @@ f self.conf. +conf_ name == 'def
960618782d81035dd9671c973ad6d95c55ff6534
Use the firefox capabilities if wires exist
tests/functional_tests_gerrit.py
tests/functional_tests_gerrit.py
#!/bin/env python import unittest import os import yaml from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from gerrit import Gerrit MAX_WAIT = 10 class TestGerrit(unittest.TestCase): @classmethod def setUpClass(cls): try: with open('test-config.yaml', 'r') as ymlfile: cls.config = yaml.load(ymlfile) except IOError: # No config file, set defaults cls.config = {'webdriver': 'firefox', 'url': 'http://localhost:8080/', 'username': 'felix', 'password': '<password>', } if cls.config.get('webdriver') == 'firefox': if os.path.isfile('./wires'): firefox_capabilities = DesiredCapabilities.FIREFOX firefox_capabilities['marionette'] = True firefox_capabilities['binary'] = os.environ.get('firefox_path', '/usr/bin/firefox') cls._browser = webdriver.Firefox() elif cls.config.get('webdriver') == 'chrome': cls._browser = webdriver.Chrome() else: raise Exception('Webdriver not supported') cls._url = cls.config.get('url') cls._username = cls.config.get('username') cls._password = cls.config.get('password') # Log in once to make user admin cls._browser.get('%slogin' % cls._url) cls._browser.implicitly_wait(MAX_WAIT) elem = cls._browser.find_element_by_id('f_user') elem.send_keys(cls._username) elem = cls._browser.find_element_by_id('f_pass') elem.send_keys(cls._password + Keys.RETURN) element = WebDriverWait(cls._browser, MAX_WAIT).until( expected_conditions.title_contains('My Reviews') ) @classmethod def tearDownClass(cls): cls._browser.quit() def test_add_project(self): # Felix wants to add a project, he uses the gerrit module to do this gerrit = Gerrit( url=self._url, auth_type='http', auth_id=self._username, auth_pw=self._password, ) project = gerrit.create_project('my project') # Felix can now access his project in the web interface self._browser.get('%s#/admin/projects/my+project' % self._url) element = WebDriverWait(self._browser, MAX_WAIT).until( expected_conditions.title_contains('Project my project') ) self.assertIn('Project my project', self._browser.title)
Python
0.000001
@@ -1189,16 +1189,122 @@ refox')%0A + cls._browser = webdriver.Firefox(capabilities=firefox_capabilities)%0A else:%0A @@ -2200,12 +2200,13 @@ ser. -quit +close ()%0A%0A
439b977b14b12d42ee886a432f3a4af555d8de10
add storage stuctures
minMaxCalc.py
minMaxCalc.py
import pandas as pd # read in dataset xl = pd.ExcelFile("data/130N_Cycles_1-47.xlsx") df = xl.parse("Specimen_RawData_1") df """ This is what the dataset currently looks like - it has 170,101 rows and two columns. The dataset contains data from 47 cycles following an experiment. The output of these experiments form the two columns:<br> - time (seconds) - load (exerted force, in Newtons) My task is to find the local maxima and minima in the dataset, and mark these values in a database. Initially, the database will consist of four columns: time, load, max, and min. It can be modified or condensed later on to fit further requirements. This is the criteria I will use to find the maxima: - write each row in the db to a cache - initialize a flag value to false - if the force in the previous row is smaller than the force in the next row, write the new row to the cache (leave the flag as false) - if the force in the previous row is bigger than the force in the next row, write the new row to cache and mark it as a max cycle (change the flag to true) This is the criteria I will use to find the minima: - write each row in the db to a cache - initialize a flag value to false - if the force in the previous row is bigger than the force in the next row, write the new row to the cache (leave the flag as false) - if the force in the previous row is smaller than the force in the next row, write the new row to the cache and mark it as a min cycle (change the flag to true) """ # append data from time column to list time = [] for item in df.index: time.append(df["Time"][item]) # append data from load column to list load = [] for item in df.index: load.append(df["Load"][item]) # create list of tuples for time and load data = [] for i, j in zip(time, load): data.append((i,j)) # apply algorithm for finding maxima in data max_data = [] for idx, item in enumerate(data): prev = data[idx-1][1] curr = item[1] if prev > curr: max_data.append(item + ("max",)) else: max_data.append(item + ("",)) # apply algorithm for finding minima in data min_data = [] for idx, item in enumerate(max_data): prev = max_data[idx-1][1] curr = item[1] if prev < curr: min_data.append(item + ("min",)) else: min_data.append(item + ("",)) all_data = min_data # count maxima number max_count = 0 for item in all_data: if item[2] == "max": max_count += 1 print(max_count) # count minima number min_count = 0 for item in all_data: if item[3] == "min": min_count += 1 print(min_count) df = pd.DataFrame(data, columns=['Time', 'Load', 'Max', 'Min']) df
Python
0
@@ -2622,71 +2622,62 @@ %0A -df = pd.DataFrame(data, columns=%5B'Time', 'Load', 'Max', 'Min'%5D)%0Adf%0A +# create db model%0Adb = %5B%5D%0A%0A# create cache store%0Acache = %5B%5D
642cd34041a579fa37ea3790143d79842c7141f3
add implementation for all makers
ismrmrdpy/backend/acquisition.py
ismrmrdpy/backend/acquisition.py
# -*- coding: utf-8 -*- # # Copyright (c) 2014-2015, Ghislain Antony Vaillant # All rights reserved. # # This file is distributed under the BSD License, see the LICENSE file or # checkout the license terms at http://opensource.org/licenses/BSD-2-Clause). from __future__ import absolute_import, division, print_function from .constants import Constants, AcquisitionFlags, acquisition_header_dtype def make_header(*args, **kwargs): pass def make_dtype(header): pass def make_array(header=None, *args, **kwargs): pass def frombytes(bytestring): pass def set_flags(header, flags=None): pass def clear_flags(header, flags=None): pass def is_flag_set(header, flag): pass def _verify_flags(flags): pass def set_channels(header, channels=None): pass def clear_channels(header, channels=None): pass def is_channel_set(header, channel): pass def _verify_channels(flags): pass
Python
0
@@ -371,167 +371,1248 @@ gs, -acquisition_header_dtype%0A%0A%0Adef make_header(*args, **kwargs):%0A pass%0A%0Adef make_dtype(header):%0A pass%0A%0Adef make_array(header=None, *args, **kwargs):%0A pass +DataTypes%0Afrom .constants import acquisition_header_dtype, ismrmrd_to_numpy_dtypes%0Aimport numpy%0A%0A%0Adef make_header(version=Constants.version, *args, **kwargs):%0A header = numpy.zeros((), dtype=acquisition_header_dtype)%0A header%5B'version'%5D = version%0A for key in kwargs:%0A if key in acquisition_header_dtype.fields:%0A header%5Bkey%5D = kwargs%5Bkey%5D%0A return header %0A%0Adef make_dtype(header):%0A data_dtype = ismrmrd_to_numpy_dtypes%5BDataTypes.cxfloat%5D%0A data_shape = (header%5B'active_channels'%5D,%0A header%5B'number_of_samples'%5D)%0A traj_dtype = ismrmrd_to_numpy_dtypes%5BDataTypes.float%5D%0A traj_shape = (header%5B'number_of_samples'%5D,%0A header%5B'trajectory_dimensions'%5D)%0A return numpy.dtype(%5B%0A ('head', acquisition_header_dtype),%0A ('traj', (traj_dtype, traj_shape)),%0A ('data', (data_dtype, data_shape)),%0A %5D)%0A%0Adef make_array(header=None, *args, **kwargs):%0A header = header or make_header(**kwargs)%0A trajectory = None%0A data = None%0A dtype = make_dtype(header)%0A array = numpy.zeros((), dtype=dtype)%0A array%5B'head'%5D = header%0A if trajectory is not None:%0A array%5B'traj'%5D = trajectory%0A if data is not None:%0A array%5B'data'%5D = data %0A%0Ade
72067069138ce9568c06140d23bd07cc6741a30e
Test case can't throw away windows, it needs shared context space to continue. XXX fix this in pyglet, ideally.
tests/resource/RES_LOAD_IMAGE.py
tests/resource/RES_LOAD_IMAGE.py
#!/usr/bin/python # $Id:$ import os import sys import unittest from pyglet.gl import * from pyglet import image from pyglet import resource from pyglet import window __noninteractive = True # Test image is laid out # M R # B G # In this test the image is sampled at four points from top-right clockwise: # R G B M (red, green, blue, magenta) class TestCase(unittest.TestCase): def setUp(self): self.w = window.Window(width=10, height=10) self.w.dispatch_events() resource.path.append('@' + __name__) resource.reindex() def tearDown(self): self.w.close() def check(self, img, colors): glClear(GL_COLOR_BUFFER_BIT) glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST) glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST) img.blit(0, 0) buffer = image.get_buffer_manager().get_color_buffer().image_data buffer.format = 'RGBA' buffer.pitch = len(buffer.format) * buffer.width bytes = buffer.data def sample(x, y): i = y * buffer.pitch + x * len(buffer.format) r, g, b, _ = bytes[i:i+len(buffer.format)] r, g, b = map(ord, (r, g, b)) return { (255, 0, 0): 'r', (0, 255, 0): 'g', (0, 0, 255): 'b', (255, 0, 255): 'm'}.get((r, g, b), 'x') samples = ''.join([ sample(3, 3), sample(3, 0), sample(0, 0), sample(0, 3)]) self.assertTrue(samples == colors, samples) def test0(self): self.check(resource.image('rgbm.png'), 'rgbm') def test1(self): self.check(resource.image('rgbm.png', pad=1), 'rgbm') def test1a(self): self.check(resource.image('rgbm.png', pad=2), 'rgbm') def test1b(self): self.check(resource.image('rgbm.png', pad=4), 'rgbm') def test2(self): self.check(resource.image('rgbm.png', flip_x=True), 'mbgr') def test3(self): self.check(resource.image('rgbm.png', flip_y=True), 'grmb') def test4(self): self.check(resource.image('rgbm.png', flip_x=True, flip_y=True), 'bmrg') def test5(self): self.check(resource.image('rgbm.png', rotate=90), 'mrgb') def test5a(self): self.check(resource.image('rgbm.png', rotate=-270), 'mrgb') def test6(self): self.check(resource.image('rgbm.png', rotate=180), 'bmrg') def test6a(self): self.check(resource.image('rgbm.png', rotate=-180), 'bmrg') def test7(self): self.check(resource.image('rgbm.png', rotate=270), 'gbmr') def test7a(self): self.check(resource.image('rgbm.png', rotate=-90), 'gbmr') if __name__ == '__main__': unittest.main()
Python
0.000001
@@ -613,24 +613,25 @@ ):%0D%0A +# self.w.close @@ -633,16 +633,30 @@ close()%0D +%0A pass%0D %0A%0D%0A d
a80e063a4afb65018a8b137f1909956839f42767
Test default search context
tests/sentry/interfaces/tests.py
tests/sentry/interfaces/tests.py
# -*- coding: utf-8 -*- from __future__ import absolute_import import pickle from sentry.interfaces import Interface, Message, Query, Stacktrace from sentry.models import Event from sentry.testutils import TestCase, fixture class InterfaceBase(TestCase): @fixture def event(self): return Event( id=1, ) class InterfaceTest(InterfaceBase): @fixture def interface(self): return Interface(foo=1) def test_init_sets_attrs(self): assert self.interface.attrs == ['foo'] def test_setstate_sets_attrs(self): data = pickle.dumps(self.interface) obj = pickle.loads(data) assert obj.attrs == ['foo'] def test_to_html_default(self): assert self.interface.to_html(self.event) == '' def test_to_string_default(self): assert self.interface.to_string(self.event) == '' class MessageTest(InterfaceBase): @fixture def interface(self): return Message(message='Hello there %s!', params=('world',)) def test_serialize_behavior(self): assert self.interface.serialize() == { 'message': self.interface.message, 'params': self.interface.params, } def test_get_hash_uses_message(self): assert self.interface.get_hash() == [self.interface.message] def test_get_search_context_with_params_as_list(self): interface = self.interface interface.params = ['world'] assert interface.get_search_context(self.event) == { 'text': [interface.message] + list(interface.params) } def test_get_search_context_with_params_as_tuple(self): assert self.interface.get_search_context(self.event) == { 'text': [self.interface.message] + list(self.interface.params) } def test_get_search_context_with_params_as_dict(self): interface = self.interface interface.params = {'who': 'world'} interface.message = 'Hello there %(who)s!' assert self.interface.get_search_context(self.event) == { 'text': [interface.message] + interface.params.values() } class QueryTest(InterfaceBase): @fixture def interface(self): return Query(query='SELECT 1', engine='psycopg2') def test_serialize_behavior(self): assert self.interface.serialize() == { 'query': self.interface.query, 'engine': self.interface.engine, } def test_get_hash_uses_query(self): assert self.interface.get_hash() == [self.interface.query] def test_get_search_context(self): assert self.interface.get_search_context(self.event) == { 'text': [self.interface.query], }
Python
0.000001
@@ -874,16 +874,131 @@ == ''%0A%0A + def test_get_search_context_default(self):%0A assert self.interface.get_search_context(self.event) == %7B%7D%0A%0A %0Aclass M
59ba038f117744ca0c5fe8c24b97b64830f8e7ec
Put bulk data into db
court_bulk_collector.py
court_bulk_collector.py
from courtreader import readers from courtutils.logger import get_logger from datetime import datetime, timedelta import pymongo import os import sys import time # configure logging log = get_logger() log.info('Worker running') def get_db_connection(): return pymongo.MongoClient(os.environ['MONGO_DB'])['va_court_search'] # Fill in cases court_reader = None current_court_fips = None db = get_db_connection() court_fips = '013' case_type = 'R' year = 2015 reader = readers.CircuitCourtReader() reader.connect() date = datetime(year, 12, 31) while date.year == year: dateStr = date.strftime('%m/%d/%Y') log.info('Getting cases on ' + dateStr) cases = reader.get_cases_by_date(court_fips, case_type, dateStr) for case in cases: case['details'] = reader.get_case_details_by_number( \ court_fips, \ case_type, \ case['case_number']) case['details_fetched'] = datetime.utcnow() print case['case_number'], case['defendant'], case['details']['Filed'] break date += timedelta(days=-1) reader.log_off()
Python
0.000004
@@ -443,17 +443,24 @@ type = ' -R +criminal '%0Ayear = @@ -527,101 +527,38 @@ )%0A%0Ad -ate = datetime(year, 12, 31)%0Awhile date.year == year:%0A dateStr = date.strftime('%25m/%25d/%25Y') +ef get_cases_on_date(dateStr): %0A @@ -937,16 +937,56 @@ tcnow()%0A + case%5B'court_fips'%5D = court_fips%0A @@ -1068,13 +1068,623 @@ -break +db.circuit_court_detailed_cases.find_one_and_replace(%7B%0A 'court_fips': case%5B'court_fips'%5D,%0A 'case_number': case%5B'case_number'%5D%0A %7D, case, upsert=True)%0A%0Adate = datetime(year, 12, 31)%0Awhile date.year == year:%0A date_search = %7B%0A 'court_fips': court_fips,%0A 'case_type': case_type,%0A 'date': date%0A %7D%0A dateStr = date.strftime('%25m/%25d/%25Y')%0A if db.circuit_court_dates_collected.find_one(date_search) != None:%0A log.info(dateStr + ' already searched')%0A else:%0A get_cases_on_date(dateStr)%0A db.circuit_court_dates_collected.insert_one(date_search) %0A
6e61156b00759a05321d65c50bb223cfa46b97bc
add fuzzing test for ZstdCompressor.compressobj()
tests/test_compressor_fuzzing.py
tests/test_compressor_fuzzing.py
import io import os try: import unittest2 as unittest except ImportError: import unittest try: import hypothesis import hypothesis.strategies as strategies except ImportError: raise unittest.SkipTest('hypothesis not available') import zstd from . common import ( make_cffi, random_input_data, ) @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') @make_cffi class TestCompressor_write_to_fuzzing(unittest.TestCase): @hypothesis.given(original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), write_size=strategies.integers(min_value=1, max_value=1048576)) def test_write_size_variance(self, original, level, write_size): refctx = zstd.ZstdCompressor(level=level) ref_frame = refctx.compress(original) cctx = zstd.ZstdCompressor(level=level) b = io.BytesIO() with cctx.write_to(b, size=len(original), write_size=write_size) as compressor: compressor.write(original) self.assertEqual(b.getvalue(), ref_frame) @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') @make_cffi class TestCompressor_copy_stream_fuzzing(unittest.TestCase): @hypothesis.given(original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), read_size=strategies.integers(min_value=1, max_value=1048576), write_size=strategies.integers(min_value=1, max_value=1048576)) def test_read_write_size_variance(self, original, level, read_size, write_size): refctx = zstd.ZstdCompressor(level=level) ref_frame = refctx.compress(original) cctx = zstd.ZstdCompressor(level=level) source = io.BytesIO(original) dest = io.BytesIO() cctx.copy_stream(source, dest, size=len(original), read_size=read_size, write_size=write_size) self.assertEqual(dest.getvalue(), ref_frame) @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') class TestCompressor_multi_compress_to_buffer_fuzzing(unittest.TestCase): @hypothesis.given(original=strategies.lists(strategies.sampled_from(random_input_data()), min_size=1, max_size=1024), threads=strategies.integers(min_value=1, max_value=8), use_dict=strategies.booleans()) def test_data_equivalence(self, original, threads, use_dict): kwargs = {} # Use a content dictionary because it is cheap to create. if use_dict: kwargs['dict_data'] = zstd.ZstdCompressionDict(original[0]) cctx = zstd.ZstdCompressor(level=1, threads=threads, write_content_size=True, write_checksum=True, **kwargs) result = cctx.multi_compress_to_buffer(original) self.assertEqual(len(result), len(original)) # The frame produced via the batch APIs may not be bit identical to that # produced by compress() because compression parameters are adjusted # from the first input in batch mode. So the only thing we can do is # verify the decompressed data matches the input. dctx = zstd.ZstdDecompressor(**kwargs) for i, frame in enumerate(result): self.assertEqual(dctx.decompress(frame), original[i])
Python
0
@@ -2082,32 +2082,1144 @@ ), ref_frame)%0A%0A%0A +@unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')%0A@make_cffi%0Aclass TestCompressor_compressobj_fuzzing(unittest.TestCase):%0A @hypothesis.given(original=strategies.sampled_from(random_input_data()),%0A level=strategies.integers(min_value=1, max_value=5),%0A chunk_sizes=strategies.streaming(%0A strategies.integers(min_value=1, max_value=4096)))%0A def test_random_input_sizes(self, original, level, chunk_sizes):%0A chunk_sizes = iter(chunk_sizes)%0A%0A refctx = zstd.ZstdCompressor(level=level)%0A ref_frame = refctx.compress(original)%0A%0A cctx = zstd.ZstdCompressor(level=level)%0A cobj = cctx.compressobj(size=len(original))%0A%0A chunks = %5B%5D%0A i = 0%0A while True:%0A chunk_size = next(chunk_sizes)%0A source = original%5Bi:i + chunk_size%5D%0A if not source:%0A break%0A%0A chunks.append(cobj.compress(source))%0A i += chunk_size%0A%0A chunks.append(cobj.flush())%0A%0A self.assertEqual(b''.join(chunks), ref_frame)%0A%0A%0A @unittest.skipUn
fa6cecee83ce90b95b35dd96ebd9dced71189578
Simplify block and debug log true pid
instana/fsm.py
instana/fsm.py
import subprocess import os import psutil import socket import threading as t import fysom as f import instana.log as l import instana.agent_const as a class Discovery(object): pid = 0 name = None args = None fd = -1 inode = "" def __init__(self, **kwds): self.__dict__.update(kwds) def to_dict(self): kvs = dict() kvs['pid'] = self.pid kvs['name'] = self.name kvs['args'] = self.args kvs['fd'] = self.fd kvs['inode'] = self.inode return kvs class Fsm(object): RETRY_PERIOD = 30 agent = None fsm = None timer = None def __init__(self, agent): l.info("Stan is on the scene. Starting Instana instrumentation.") l.debug("initializing fsm") self.agent = agent self.fsm = f.Fysom({ "initial": "lostandalone", "events": [ ("startup", "*", "lostandalone"), ("lookup", "lostandalone", "found"), ("announce", "found", "announced"), ("ready", "announced", "good2go")], "callbacks": { "onlookup": self.lookup_agent_host, "onannounce": self.announce_sensor, "onchangestate": self.printstatechange}}) def printstatechange(self, e): l.debug('========= (%i#%s) FSM event: %s, src: %s, dst: %s ==========' % \ (os.getpid(), t.current_thread().name, e.event, e.src, e.dst)) def reset(self): self.fsm.lookup() def lookup_agent_host(self, e): if self.agent.sensor.options.agent_host != "": host = self.agent.sensor.options.agent_host else: host = a.AGENT_DEFAULT_HOST h = self.check_host(host) if h == a.AGENT_HEADER: self.agent.set_host(host) self.fsm.announce() return True elif os.path.exists("/proc/"): host = self.get_default_gateway() if host: h = self.check_host(host) if h == a.AGENT_HEADER: self.agent.set_host(host) self.fsm.announce() return True else: l.error("Cannot lookup agent host. Scheduling retry.") self.schedule_retry(self.lookup_agent_host, e, "agent_lookup") return False def get_default_gateway(self): l.debug("checking default gateway") try: proc = subprocess.Popen( "/sbin/ip route | awk '/default/' | cut -d ' ' -f 3 | tr -d '\n'", shell=True, stdout=subprocess.PIPE) addr = proc.stdout.read() return addr.decode("UTF-8") except Exception as e: l.error(e) return None def check_host(self, host): l.debug("checking host", host) (_, h) = self.agent.request_header( self.agent.make_host_url(host, "/"), "GET", "Server") return h def announce_sensor(self, e): l.debug("announcing sensor to the agent") p = psutil.Process(os.getpid()) s = None d = Discovery(pid=p.pid, name=p.cmdline()[0], args=p.cmdline()[1:]) # If we're on a system with a procfs if os.path.exists("/proc/"): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.agent.host, 42699)) path = "/proc/%d/fd/%d" % (p.pid, s.fileno()) d.fd = s.fileno() d.inode = os.readlink(path) (b, _) = self.agent.request_response( self.agent.make_url(a.AGENT_DISCOVERY_URL), "PUT", d) if not b: l.error("Cannot announce sensor. Scheduling retry.") self.schedule_retry(self.announce_sensor, e, "announce") return False else: self.agent.set_from(b) self.fsm.ready() l.warn("Host agent available. We're in business. (Announced pid: %i)" % p.pid) return True def schedule_retry(self, fun, e, name): l.debug("Scheduling: " + name) self.timer = t.Timer(self.RETRY_PERIOD, fun, [e]) self.timer.daemon = True self.timer.name = name self.timer.start() l.debug('Threadlist: ', str(t.enumerate())) def test_agent(self, e): l.debug("testing communication with the agent") (b, _) = self.agent.head(self.agent.make_url(a.AGENT_DATA_URL)) if not b: self.schedule_retry(self.test_agent, e, "agent test") else: self.fsm.test()
Python
0.000521
@@ -3738,36 +3738,32 @@ , d)%0A if -not b:%0A l @@ -3765,128 +3765,186 @@ -l.error(%22Cannot announce sensor. Scheduling retry.%22)%0A self.schedule_retry(self.announce_sensor, e, %22announce%22 +self.agent.set_from(b)%0A self.fsm.ready()%0A l.warn(%22Host agent available. We're in business. Announced pid: %25i (true pid: %25i)%22 %25 (p.pid, self.agent.from_.pid) )%0A @@ -3960,20 +3960,19 @@ return -Fals +Tru e%0A @@ -3995,149 +3995,128 @@ -self.agent.set_from(b)%0A self.fsm.ready()%0A l.warn(%22Host agent available. We're in business. (Announced pid: %25i)%22 %25 p.pid +l.error(%22Cannot announce sensor. Scheduling retry.%22)%0A self.schedule_retry(self.announce_sensor, e, %22announce%22 )%0A @@ -4121,30 +4121,27 @@ - return -Tru +Fals e%0A%0A d
8c0dc68c41137cd809d4403045834ab4f876294c
Add small test for parsing the Var datashape
tests/test_datashape_creation.py
tests/test_datashape_creation.py
import blaze from blaze import datashape import numpy as np import unittest class TestDatashapeCreation(unittest.TestCase): def test_raise_on_bad_input(self): # Make sure it raises exceptions on a few nonsense inputs self.assertRaises(TypeError, blaze.dshape, None) self.assertRaises(TypeError, blaze.dshape, lambda x: x+1) def test_atom_shapes(self): self.assertEqual(blaze.dshape('bool'), datashape.bool_) self.assertEqual(blaze.dshape('int8'), datashape.int8) self.assertEqual(blaze.dshape('int16'), datashape.int16) self.assertEqual(blaze.dshape('int32'), datashape.int32) self.assertEqual(blaze.dshape('int64'), datashape.int64) self.assertEqual(blaze.dshape('uint8'), datashape.uint8) self.assertEqual(blaze.dshape('uint16'), datashape.uint16) self.assertEqual(blaze.dshape('uint32'), datashape.uint32) self.assertEqual(blaze.dshape('uint64'), datashape.uint64) self.assertEqual(blaze.dshape('float32'), datashape.float32) self.assertEqual(blaze.dshape('float64'), datashape.float64) self.assertEqual(blaze.dshape('complex64'), datashape.complex64) self.assertEqual(blaze.dshape('complex128'), datashape.complex128) def test_atom_shape_errors(self): self.assertRaises(TypeError, blaze.dshape, 'boot') self.assertRaises(TypeError, blaze.dshape, 'int33') self.assertRaises(TypeError, blaze.dshape, '12') def test_type_decl(self): self.assertRaises(TypeError, blaze.dshape, 'type X T = 3, T') self.assertEqual(blaze.dshape('3, int32'), blaze.dshape('type X = 3, int32')) def test_string_atom(self): self.assertEqual(blaze.dshape('string'), blaze.dshape("string('U8')")) self.assertEqual(blaze.dshape("string('ascii')").encoding, 'A') self.assertEqual(blaze.dshape("string('A')").encoding, 'A') self.assertEqual(blaze.dshape("string('utf-8')").encoding, 'U8') self.assertEqual(blaze.dshape("string('U8')").encoding, 'U8') self.assertEqual(blaze.dshape("string('utf-16')").encoding, 'U16') self.assertEqual(blaze.dshape("string('U16')").encoding, 'U16') self.assertEqual(blaze.dshape("string('utf-32')").encoding, 'U32') self.assertEqual(blaze.dshape("string('U32')").encoding, 'U32') def test_struct_of_array(self): self.assertEqual(str(blaze.dshape('5, int32')), '5, int32') self.assertEqual(str(blaze.dshape('{field: 5, int32}')), '{ field : 5, int32 }') self.assertEqual(str(blaze.dshape('{field: M, int32}')), '{ field : M, int32 }') if __name__ == '__main__': unittest.main()
Python
0.000001
@@ -2677,16 +2677,135 @@ 32 %7D')%0A%0A + def test_ragged_array(self):%0A self.assertTrue(isinstance(blaze.dshape('3, Var, int32')%5B1%5D, datashape.Var))%0A%0A if __nam
55bff70c3dabe5251ed23720c9f2491cc8bd1ed1
Add support for django 1.8+
favit/managers.py
favit/managers.py
# -*- coding: utf-8 -*- from django.contrib.contenttypes.models import ContentType from django.db import models from django.db.models import get_model def _get_content_type_and_obj(obj, model=None): if isinstance(model, basestring): model = get_model(*model.split(".")) if isinstance(obj, (int, long)): obj = model.objects.get(pk=obj) return ContentType.objects.get_for_model(type(obj)), obj class FavoriteManager(models.Manager): """ A Manager for Favorite objects """ def for_user(self, user, model=None): """ Returns a Favorite objects queryset for a given user. If a model params is provided, it returns only the favorited objects of that model class Usage: Favorite.objects.for_user(user) Favorite.objects.for_user(user, model=Song) Favorite.objects.for_user(user, model="music.song") """ qs = self.get_query_set().filter(user=user) if model: if isinstance(model, basestring): model = get_model(*model.split(".")) content_type = ContentType.objects.get_for_model(model) qs = qs.filter(target_content_type=content_type) return qs.order_by("-timestamp") def for_model(self, model): """ Returns a Favorite objects queryset for a given model. `model` may be a django model class or an string representing a model in module-notation, ie: "auth.User" Usage: Favorite.objects.for_model(Song) Favorite.objects.for_model("music.Song") """ # if model is an app_label.model string make it a Model class if isinstance(model, basestring): model = get_model(*model.split(".")) content_type = ContentType.objects.get_for_model(model) qs = self.get_query_set().filter( target_content_type=content_type ) return qs.order_by("-timestamp") def for_object(self, obj, model=None): """ Returns a Favorite objects queryset for a given object Usage: Favorite.objects.for_object(1, "music.Song") Favorite.objects.for_object(1, Song) or given a music app with a Song model: song = Song.objects.get(pk=1) Favorite.objects.for_object(song) """ content_type, obj = _get_content_type_and_obj(obj, model) qs = self.get_query_set().filter( target_content_type=content_type, target_object_id=obj.pk ) return qs.order_by("-timestamp") def get_favorite(self, user, obj, model=None): """ Returns a Favorite instance if the `user` has favorited the given object `obj`. Otherwise returns None Usage: Favorite.objects.get_favorite(user, 1, "music.Song") Favorite.objects.get_favorite(user, 1, Song) or given a music app with a Song model: song = Song.objects.get(pk=1) Favorite.objects.get_favorite(user, song) """ content_type, obj = _get_content_type_and_obj(obj, model) try: return self.get_query_set().get( user=user, target_content_type=content_type, target_object_id=obj.id ) except self.model.DoesNotExist: return None def create(self, user, obj, model=None): """ Creates and returns a new Favorite obj for the given user and obj """ content_type, content_object = _get_content_type_and_obj(obj, model) fav = super(FavoriteManager, self).create( user=user, target_content_type=content_type, target_object_id=content_object.pk, target=content_object ) return fav
Python
0.000007
@@ -504,24 +504,151 @@ ects%0A %22%22%22 +%0A from django import VERSION%0A if VERSION %3E (1,8):%0A def get_query_set(self):%0A return self.get_queryset() %0A%0A def fo
e3a9db58f03eb73635a94ed6249e3c2a308f4ad0
Fix some typos found in staging.
fedmsg_genacls.py
fedmsg_genacls.py
# -*- coding: utf-8 -*- """ A fedmsg consumer that listens to pkgdb messages to update gitosis acls Authors: Janez Nemanič <janez.nemanic@gmail.com> Ralph Bean <rbean@redhat.com> """ import pprint import subprocess import os import fedmsg.consumers import moksha.hub.reactor class GenACLsConsumer(fedmsg.consumers.FedmsgConsumer): # Really, we want to use this specific topic to listen to. topic = 'org.fedoraproject.prod.pkgdb.acl.update' # But for testing, we'll just listen to all topics with this: #topic = '*' config_key = 'genacls.consumer.enabled' def __init__(self, hub): super(GenACLsConsumer, self).__init__(hub) # This is required. It is the number of seconds that we should wait # until we ultimately act on a pkgdb message. self.delay = self.hub.config['genacls.consumer.delay'] # We use this to manage our state self.queued_messages = [] def consume(self, msg): msg = msg['body'] self.log.info("Got a message %r" % msg['topic']) def delayed_consume(): if self.queued_messages: try: self.action(self.queued_messages) finally: # Empty our list at the end of the day. self.queued_messages = [] else: self.log.debug("Woke up, but there were no messages.") self.queued_messages.append(msg) moksha.hub.reactor.reactor.callLater(self.delay, delayed_consume) def action(self, messages): self.log.debug("Acting on %r" % pprint.pformat(messages)) command = '/usr/local/bin/genacls.sh' genacls_UID = 417 genacls_GID = 417 def change_subprocess_id(): os.setuid(user_UID) os.setgid(user_GID) return_code = subprocess.Popen( args=command, preexec_fn=change_subprocess_id) if return_code == 0: self.log.info("%r successful" % command) else: self.log.error("%r exited with %r" % (command, return_code))
Python
0.000004
@@ -1604,17 +1604,17 @@ ing on %25 -r +s %22 %25 ppri @@ -1636,16 +1636,201 @@ ages))%0A%0A + # This script and the UID/GID are found in our puppet repo.%0A # The fedmsg user must be given passwordless sudo as the gen-acls user%0A # for this to work correctly.%0A @@ -1978,20 +1978,23 @@ .setuid( -user +genacls _UID)%0A @@ -2013,20 +2013,23 @@ .setgid( -user +genacls _GID)%0A%0A
a2475c22f15e565f1a64022fa6b7a1ba791b8ced
Test the empty case
tests/testapp/tests/test_page.py
tests/testapp/tests/test_page.py
from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.test import TestCase # from django.utils import timezone try: from django.urls import reverse except ImportError: # pragma: no cover from django.core.urlresolvers import reverse from content_editor.models import Region, ContentProxy, MPTTContentProxy from testapp.models import Article, RichText, Download, Page, PageText class ContentEditorTest(TestCase): def login(self): u = User( username='test', is_active=True, is_staff=True, is_superuser=True) u.set_password('test') u.save() self.assertTrue(self.client.login(username='test', password='test')) def test_stupid_coverage(self): self.assertRaises(TypeError, Region, bla='bla') def test_stuff(self): # Smoke test some stuff article = Article.objects.create( title='Test', ) richtext = article.testapp_richtext_set.create( text='<p>bla</p>', region='main', ordering=10, ) with self.assertNumQueries(2): # Two content types. content = ContentProxy(article, plugins=[RichText, Download]) self.assertEqual(content.main[0], richtext) self.assertEqual(content.main[0].parent, article) self.assertEqual(len(content.main), 1) self.assertEqual(len(content.sidebar), 0) self.assertRaises(AttributeError, lambda: content.bla) response = self.client.get(article.get_absolute_url()) self.assertContains(response, '<h1>Test</h1>') self.assertContains(response, '<p>bla</p>') def test_admin(self): self.login() response = self.client.get(reverse('admin:testapp_article_add')) self.assertContains(response, 'content-editor-script', 1) self.assertContains(response, 'class="richtext"', 1) self.assertContains( response, '[&quot;testapp_richtext&quot;, &quot;Rich text&quot;]', 1, ) self.assertContains( response, '[&quot;testapp_download&quot;, &quot;Download&quot;]', 1, ) self.assertContains( response, '[[&quot;main&quot;, &quot;main region&quot;],' ' [&quot;sidebar&quot;, &quot;sidebar region&quot;]', 1, ) article = Article.objects.create(title='Test') response = self.client.get(reverse( 'admin:testapp_article_change', args=(article.pk,), )) self.assertContains( response, 'value="Test"', 1, ) def test_empty(self): article = Article.objects.create( title='Test', ) with self.assertNumQueries(2): content = ContentProxy(article, plugins=[RichText, Download]) self.assertEqual(content.main, []) with self.assertNumQueries(0): content = ContentProxy(article, plugins=[]) self.assertEqual(content.main, []) def test_hierarchy(self): page = Page.objects.create(title='root') child = page.children.create(title='child 1') page.refresh_from_db() child.refresh_from_db() self.assertEqual( list(child.get_ancestors()), [page], ) page.testapp_pagetext_set.create( region='main', ordering=10, text='page main text', ) page.testapp_pagetext_set.create( region='sidebar', ordering=20, text='page sidebar text', ) with self.assertNumQueries(2): content = MPTTContentProxy(child, plugins=[PageText]) self.assertEqual( content.main, [], ) self.assertEqual( [c.text for c in content.sidebar], ['page sidebar text'], ) self.assertEqual(content.sidebar[0].parent, page) child.testapp_pagetext_set.create( region='sidebar', ordering=10, text='child sidebar text', ) child.testapp_pagetext_set.create( region='main', ordering=20, text='child main text', ) with self.assertNumQueries(2): content = MPTTContentProxy(child, plugins=[PageText]) self.assertEqual( [c.text for c in content.main], ['child main text'], ) self.assertEqual( [c.text for c in content.sidebar], ['child sidebar text'], ) self.assertEqual(content.sidebar[0].parent, child)
Python
1
@@ -3446,32 +3446,329 @@ ge%5D,%0A )%0A%0A + with self.assertNumQueries(2):%0A content = MPTTContentProxy(child, plugins=%5BPageText%5D)%0A self.assertEqual(%0A content.main,%0A %5B%5D,%0A )%0A self.assertEqual(%0A content.sidebar,%0A %5B%5D,%0A )%0A%0A page.tes
d5e418e24990c2b7294f3fd6fd8ef94819ddfe66
Allow regular users to view any feedback issue that is public.
feedback/views.py
feedback/views.py
from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.views.generic import ListView, DetailView, CreateView, UpdateView from .forms import IssueForm, IssueUpdateStatusForm, CommentForm from .models import Issue, Discussion class LoginRequiredMixin(object): @classmethod def as_view(cls, **initkwargs): view = super(LoginRequiredMixin, cls).as_view(**initkwargs) return login_required(view) class ProfileNameMixin(object): def get_context_data(self, **kwargs): context = super(ProfileNameMixin, self).get_context_data(**kwargs) context['profile_name'] = self.request.user.username return context class IssueList(LoginRequiredMixin, ProfileNameMixin, ListView): model = Issue def get_queryset(self): if self.request.user.is_superuser: return Issue.objects.all() else: return Issue.objects.filter(user=self.request.user) class IssueCreate(LoginRequiredMixin, ProfileNameMixin, CreateView): model = Issue form_class = IssueForm def form_valid(self, form): form.instance.user = self.request.user form.instance.status = Issue.STATUS_UNREVIEWED return super(IssueCreate, self).form_valid(form) class IssueDetail(LoginRequiredMixin, ProfileNameMixin, DetailView): model = Issue def get_context_data(self, **kwargs): context = super(IssueDetail, self).get_context_data(**kwargs) comment_form = CommentForm() comment_form.helper.form_action = reverse('feedback:comment_add', kwargs={'issue_pk': self.kwargs['pk']}) context['comment_form'] = comment_form if self.request.user.is_superuser: status_form = IssueUpdateStatusForm(instance=self.object) status_form.helper.form_action = reverse('feedback:issue_status_update', kwargs={'pk': self.kwargs['pk']}) context['status_form'] = status_form return context class IssueUpdateStatus(LoginRequiredMixin, ProfileNameMixin, UpdateView): model = Issue form_class = IssueUpdateStatusForm def get_success_url(self): print 'success hurr durr' return reverse('feedback:issue_detail', kwargs={'pk': self.kwargs['pk']}) class CommentCreate(LoginRequiredMixin, ProfileNameMixin, CreateView): model = Discussion form_class = CommentForm def form_valid(self, form): form.instance.user = self.request.user form.instance.feedback = Issue.objects.get(pk=self.kwargs['issue_pk']) return super(CommentCreate, self).form_valid(form) def get_success_url(self): return reverse('feedback:issue_detail', kwargs={'pk': self.kwargs['issue_pk']})
Python
0
@@ -173,16 +173,47 @@ dateView +%0Afrom django.db.models import Q %0A%0Afrom . @@ -987,16 +987,18 @@ .filter( +Q( user=sel @@ -1012,16 +1012,34 @@ st.user) + %7C Q(public=True)) %0A%0A%0Aclass
d6342967598ae7fa822592b42e0f85de2beaf916
use constants
tests/twisted/test-self-alias.py
tests/twisted/test-self-alias.py
# # Test alias setting for the self handle # from sofiatest import exec_test from servicetest import tp_name_prefix import dbus TEXT_TYPE = tp_name_prefix + '.Channel.Type.Text' ALIASING_INTERFACE = tp_name_prefix + '.Connection.Interface.Aliasing' CONTACTS_INTERFACE = tp_name_prefix + '.Connection.Interface.Contacts' def test(q, bus, conn, sip_proxy): conn.Connect() q.expect('dbus-signal', signal='StatusChanged', args=[0, 1]) self_handle = conn.GetSelfHandle() default_alias = conn.Aliasing.GetAliases([self_handle])[self_handle] conn.Aliasing.SetAliases({self_handle: 'foo@bar.baz'}) event = q.expect('dbus-signal', signal='AliasesChanged', args=[[(self_handle, u'foo@bar.baz')]]) handle = conn.RequestHandles(1, ['sip:user@somewhere.com'])[0] assert ALIASING_INTERFACE in \ conn.Properties.Get(CONTACTS_INTERFACE, "ContactAttributeInterfaces") attrs = conn.Contacts.GetContactAttributes([self_handle, handle], [ALIASING_INTERFACE], False) assert ALIASING_INTERFACE + "/alias" in attrs[self_handle] assert attrs[self_handle][ALIASING_INTERFACE + "/alias"] == u'foo@bar.baz' conn.RequestChannel(TEXT_TYPE, 1, handle, True) event = q.expect('dbus-signal', signal='NewChannel') text_iface = dbus.Interface(bus.get_object(conn.bus_name, event.args[0]), TEXT_TYPE) text_iface.Send(0, 'Check the display name in From') event = q.expect('sip-message') self_uri = conn.InspectHandles(1, [self_handle])[0] from_header = event.sip_message.headers['from'][0] assert from_header.startswith('"foo@bar.baz" <' + self_uri + '>'), from_header # Test setting of the default alias conn.Aliasing.SetAliases({self_handle: default_alias}) text_iface.Send(0, 'The display name should be missing in From') event = q.expect('sip-message') from_header = event.sip_message.headers['from'][0] assert from_header.startswith('<' + self_uri + '>'), from_header # Test if escaping and whitespace normalization works conn.Aliasing.SetAliases({self_handle: 'foo " bar \\\r\n baz\t'}) text_iface.Send(0, 'Check display name escaping in From') event = q.expect('sip-message') from_header = event.sip_message.headers['from'][0] assert from_header.startswith(r'"foo \" bar \\ baz " <' + self_uri + '>'), from_header if __name__ == '__main__': exec_test(test)
Python
0.00001
@@ -75,252 +75,43 @@ est%0A -from servicetest import tp_name_prefix%0A%0Aimport dbus%0A%0ATEXT_TYPE = tp_name_prefix + '.Channel.Type.Text'%0AALIASING_INTERFACE = tp_name_prefix + '.Connection.Interface.Aliasing'%0ACONTACTS_INTERFACE = tp_name_prefix + '.Connection.Interface.Contacts' +import constants as cs%0A%0Aimport dbus %0A%0Ade @@ -591,34 +591,38 @@ assert -ALIASING_INTERFACE +cs.CONN_IFACE_ALIASING in %5C%0A @@ -651,26 +651,30 @@ Get( -CONTACTS_INTERFACE +cs.CONN_IFACE_CONTACTS , %22C @@ -777,65 +777,73 @@ ,%0A%09%5B -ALIASING_INTERFACE%5D, False)%0A assert ALIASING_INTERFACE +cs.CONN_IFACE_ALIASING%5D, False)%0A assert cs.CONN_IFACE_ALIASING + %22 @@ -906,26 +906,30 @@ le%5D%5B -ALIASING_INTERFACE +cs.CONN_IFACE_ALIASING + %22 @@ -980,25 +980,36 @@ Channel( -TEXT_TYPE +cs.CHANNEL_TYPE_TEXT , 1, han @@ -1172,36 +1172,28 @@ - TEXT_TYPE +cs.CHANNEL_TYPE_TEXT )%0A
16abb3720d9c41b130ea83a4b678ec99521567eb
Fix Grid unit test
tests/unit/analysis/test_grid.py
tests/unit/analysis/test_grid.py
# """Unit tests for cartoframes.analysis.grid""" import os import pytest import numpy as np from pandas import read_csv from geopandas import GeoDataFrame from shapely.geometry import box, shape from cartoframes.utils import decode_geometry from cartoframes.analysis.grid import QuadGrid from geopandas.testing import assert_geodataframe_equal # DATA FRAME SRC BBOX pol_1 = box(1, 1, 2, 2) pol_2 = box(3, 3, 4, 4) GDF_BOX = GeoDataFrame({'id': [1, 2], 'geom': [pol_1, pol_2]}, columns=['id', 'geom'], geometry='geom') pol_geojson = { 'type': 'Polygon', 'coordinates': [ [ [ -5.899658203125, 38.436379603 ], [ -6.690673828125, 37.67512527892127 ], [ -6.15234375, 37.43997405227057 ], [ -5.8447265625, 37.70120736474139 ], [ -6.13037109375, 37.82280243352756 ], [ -5.877685546874999, 38.02213147353745 ], [ -6.009521484375, 38.12591462924157 ], [ -5.5810546875, 38.1777509666256 ], [ -5.899658203125, 38.436379603 ] ] ] } GDF_IRREGULAR = GeoDataFrame({'id': [1], 'geom': [shape(pol_geojson)]}, columns=['id', 'geom'], geometry='geom') BASE_FOLDER = os.path.dirname(os.path.abspath(__file__)) class TestGrid(object): def _load_test_gdf(self, fname): fname = os.path.join(BASE_FOLDER, fname) df = read_csv(fname, dtype={'id': np.int64, 'geom': object, 'quadkey': object}) gdf_test = GeoDataFrame(df, geometry=decode_geometry(df['geom']), crs='epsg:4326') return gdf_test @pytest.mark.skip() def test_quadgrid_polyfill_box(self, mocker): """cartoframes.analysis.grid.QuadGrid.polyfill""" gdf = QuadGrid().polyfill(GDF_BOX, 12) assert isinstance(gdf, GeoDataFrame) # Check both dataframes are equals gdf_test = self._load_test_gdf('grid_quadkey_bbox.csv') assert_geodataframe_equal(gdf, gdf_test, check_less_precise=True) @pytest.mark.skip() def test_quadgrid_polyfill_pol(self, mocker): """cartoframes.analysis.grid.QuadGrid.polyfill""" gdf = QuadGrid().polyfill(GDF_IRREGULAR, 12) assert isinstance(gdf, GeoDataFrame) # Check both dataframes are equals gdf_test = self._load_test_gdf('grid_quadkey_pol.csv') assert_geodataframe_equal(gdf, gdf_test, check_less_precise=True)
Python
0
@@ -221,22 +221,19 @@ import -decode +set _geometr @@ -1832,21 +1832,16 @@ gdf -_test = GeoDa @@ -1856,23 +1856,36 @@ df, -geometry=decode +crs='epsg:4326')%0A set _geo @@ -1894,36 +1894,33 @@ try( +g df -%5B +, 'geom' -%5D), crs='epsg:4326' +, inplace=True )%0A @@ -1935,21 +1935,16 @@ turn gdf -_test %0A%0A @p
2877e2722ba99aaa5ee41f9f063629efbfc75bf5
store radosgw_agent server/port
teuthology/task/radosgw-agent.py
teuthology/task/radosgw-agent.py
import contextlib import logging import argparse from ..orchestra import run from teuthology import misc as teuthology import teuthology.task_util.rgw as rgw_utils log = logging.getLogger(__name__) def run_radosgw_agent(ctx, config): """ Run a single radosgw-agent. See task() for config format. """ return_list = list() for (client, cconf) in config.items(): # don't process entries that are not clients if not client.startswith('client.'): log.debug('key {data} does not start with \'client.\', moving on'.format( data=client)) continue src_client = cconf['src'] dest_client = cconf['dest'] src_zone = rgw_utils.zone_for_client(ctx, src_client) dest_zone = rgw_utils.zone_for_client(ctx, dest_client) log.info("source is %s", src_zone) log.info("dest is %s", dest_zone) testdir = teuthology.get_testdir(ctx) (remote,) = ctx.cluster.only(client).remotes.keys() # figure out which branch to pull from branch = cconf.get('force-branch', None) if not branch: branch = cconf.get('branch', 'master') sha1 = cconf.get('sha1') remote.run( args=[ 'cd', testdir, run.Raw('&&'), 'git', 'clone', '-b', branch, 'https://github.com/ceph/radosgw-agent.git', 'radosgw-agent.{client}'.format(client=client), ] ) if sha1 is not None: remote.run( args=[ 'cd', testdir, run.Raw('&&'), run.Raw('&&'), 'git', 'reset', '--hard', sha1, ] ) remote.run( args=[ 'cd', testdir, run.Raw('&&'), 'cd', 'radosgw-agent.{client}'.format(client=client), run.Raw('&&'), './bootstrap', ] ) src_host, src_port = rgw_utils.get_zone_host_and_port(ctx, src_client, src_zone) dest_host, dest_port = rgw_utils.get_zone_host_and_port(ctx, dest_client, dest_zone) src_access, src_secret = rgw_utils.get_zone_system_keys(ctx, src_client, src_zone) dest_access, dest_secret = rgw_utils.get_zone_system_keys(ctx, dest_client, dest_zone) sync_scope = cconf.get('sync-scope', None) port = cconf.get('port', 8000) daemon_name = '{host}.{port}.syncdaemon'.format(host=remote.name, port=port) in_args=[ '{tdir}/daemon-helper'.format(tdir=testdir), 'kill', '{tdir}/radosgw-agent.{client}/radosgw-agent'.format(tdir=testdir, client=client), '-v', '--src-access-key', src_access, '--src-secret-key', src_secret, '--src-host', src_host, '--src-port', str(src_port), '--src-zone', src_zone, '--dest-access-key', dest_access, '--dest-secret-key', dest_secret, '--dest-host', dest_host, '--dest-port', str(dest_port), '--dest-zone', dest_zone, '--daemon-id', daemon_name, '--log-file', '{tdir}/archive/rgw_sync_agent.{client}.log'.format( tdir=testdir, client=client), ] # the test server and full/incremental flags are mutually exclusive if sync_scope is None: in_args.append('--test-server-host') in_args.append('0.0.0.0') in_args.append('--test-server-port') in_args.append(str(port)) log.debug('Starting a sync test server on {client}'.format(client=client)) else: in_args.append('--sync-scope') in_args.append(sync_scope) log.debug('Starting a {scope} sync on {client}'.format(scope=sync_scope,client=client)) return_list.append((client, remote.run( args=in_args, wait=False, stdin=run.PIPE, logger=log.getChild(daemon_name), ))) return return_list @contextlib.contextmanager def task(ctx, config): """ Run radosgw-agents in test mode. Configuration is clients to run the agents on, with settings for source client, destination client, and port to listen on. Binds to 0.0.0.0. Port defaults to 8000. This must be run on clients that have the correct zone root pools and rgw zone set in ceph.conf, or the task cannot read the region information from the cluster. By default, this task will start an HTTP server that will trigger full or incremental syncs based on requests made to it. Alternatively, a single full sync can be triggered by specifying 'sync-scope: full' or a loop of incremental syncs can be triggered by specifying 'sync-scope: incremental' (the loop will sleep '--incremental-sync-delay' seconds between each sync, default is 20 seconds). An example:: tasks: - ceph: conf: client.0: rgw zone = foo rgw zone root pool = .root.pool client.1: rgw zone = bar rgw zone root pool = .root.pool2 - rgw: # region configuration omitted for brevity - radosgw-agent: client.0: branch: wip-next-feature-branch src: client.0 dest: client.1 sync-scope: full # port: 8000 (default) client.1: src: client.1 dest: client.0 port: 8001 """ assert isinstance(config, dict), 'rgw_sync_agent requires a dictionary config' log.debug("config is %s", config) overrides = ctx.config.get('overrides', {}) # merge each client section, but only if it exists in config since there isn't # a sensible default action for this task for client in config.iterkeys(): if config[client]: log.debug('config[{client}]: {data}'.format(client=client, data=config[client])) teuthology.deep_merge(config[client], overrides.get('radosgw-agent', {})) ctx.radosgw_agent = argparse.Namespace() ctx.radosgw_agent.config = config procs = run_radosgw_agent(ctx, config) ctx.radosgw_agent.procs = procs try: yield finally: testdir = teuthology.get_testdir(ctx) try: for client, proc in procs: log.info("shutting down sync agent on %s", client) proc.stdin.close() proc.exitstatus.get() finally: for client, proc in procs: ctx.cluster.only(client).run( args=[ 'rm', '-rf', '{tdir}/radosgw-agent.{client}'.format(tdir=testdir, client=client) ] )
Python
0.000003
@@ -4000,32 +4000,176 @@ client=client))%0A + # Stash the radosgw-agent server / port # for use by subsequent tasks %0A ctx.radosgw_agent.endpoint = (client, str(port))%0A else:%0A
42599f84c68821695b2d378f7261e4c74095272f
Should be logger.debug
thumbor/storages/file_storage.py
thumbor/storages/file_storage.py
#!/usr/bin/python # -*- coding: utf-8 -*- # thumbor imaging service # https://github.com/thumbor/thumbor/wiki # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com timehome@corp.globo.com import os from shutil import move from json import dumps, loads from datetime import datetime from os.path import exists, dirname, getmtime, splitext import hashlib from uuid import uuid4 import thumbor.storages as storages from thumbor.utils import logger from tornado.concurrent import return_future class Storage(storages.BaseStorage): def put(self, path, bytes): file_abspath = self.path_on_filesystem(path) temp_abspath = "%s.%s" % (file_abspath, str(uuid4()).replace('-', '')) file_dir_abspath = dirname(file_abspath) logger.debug('creating tempfile for %s in %s...' % (path, temp_abspath)) self.ensure_dir(file_dir_abspath) with open(temp_abspath, 'w') as _file: _file.write(bytes) logger.debug('moving tempfile %s to %s...' % (temp_abspath, file_abspath)) move(temp_abspath, file_abspath) return path def put_crypto(self, path): if not self.context.config.STORES_CRYPTO_KEY_FOR_EACH_IMAGE: return file_abspath = self.path_on_filesystem(path) file_dir_abspath = dirname(file_abspath) self.ensure_dir(file_dir_abspath) if not self.context.server.security_key: raise RuntimeError("STORES_CRYPTO_KEY_FOR_EACH_IMAGE can't be True if no SECURITY_KEY specified") crypto_path = '%s.txt' % splitext(file_abspath)[0] temp_abspath = "%s.%s" % (crypto_path, str(uuid4()).replace('-', '')) with open(temp_abspath, 'w') as _file: _file.write(self.context.server.security_key) move(temp_abspath, crypto_path) logger.error('Stored crypto at %s (security key: %s)' % (crypto_path, self.context.server.security_key)) return file_abspath def put_detector_data(self, path, data): file_abspath = self.path_on_filesystem(path) path = '%s.detectors.txt' % splitext(file_abspath)[0] temp_abspath = "%s.%s" % (path, str(uuid4()).replace('-', '')) file_dir_abspath = dirname(file_abspath) self.ensure_dir(file_dir_abspath) with open(temp_abspath, 'w') as _file: _file.write(dumps(data)) move(temp_abspath, path) return file_abspath @return_future def get(self, path, callback): abs_path = self.path_on_filesystem(path) def file_exists(resource_available): if not resource_available: callback(None) else: callback(open(self.path_on_filesystem(path), 'r').read()) self.exists(None, file_exists, path_on_filesystem=abs_path) @return_future def get_crypto(self, path, callback): file_abspath = self.path_on_filesystem(path) crypto_file = "%s.txt" % (splitext(file_abspath)[0]) if not exists(crypto_file): callback(None) else: with open(crypto_file, 'r') as crypto_f: callback(crypto_f.read()) @return_future def get_detector_data(self, path, callback): file_abspath = self.path_on_filesystem(path) path = '%s.detectors.txt' % splitext(file_abspath)[0] def file_exists(resource_available): if not resource_available: callback(None) else: callback(loads(open(path, 'r').read())) self.exists(None, file_exists, path_on_filesystem=path) def path_on_filesystem(self, path): digest = hashlib.sha1(path.encode('utf-8')).hexdigest() return "%s/%s/%s" % ( self.context.config.FILE_STORAGE_ROOT_PATH.rstrip('/'), digest[:2], digest[2:] ) @return_future def exists(self, path, callback, path_on_filesystem=None): if path_on_filesystem is None: path_on_filesystem = self.path_on_filesystem(path) callback(os.path.exists(path_on_filesystem) and not self.__is_expired(path_on_filesystem)) def remove(self, path): n_path = self.path_on_filesystem(path) return os.remove(n_path) def __is_expired(self, path): if self.context.config.STORAGE_EXPIRATION_SECONDS is None: return False timediff = datetime.now() - datetime.fromtimestamp(getmtime(path)) return timediff.seconds > self.context.config.STORAGE_EXPIRATION_SECONDS
Python
0.999486
@@ -1880,13 +1880,13 @@ ger. -error +debug ('St
ae896f3c8eaa7fa9863a862f0679065348a7b971
Remove obsolete argument from workflow cli
src/tmlib/tmaps/args.py
src/tmlib/tmaps/args.py
from ..args import Args class TmapsSubmitArgs(Args): def __init__(self, **kwargs): ''' Initialize an instance of class TmapsSubmitArgs. Parameters ---------- **kwargs: dict arguments as key-value pairs ''' self.stage = self._stage_params['default'] self.step = self._step_params['default'] self.backup = self._backup_params['default'] super(TmapsSubmitArgs, self).__init__(**kwargs) @property def _required_args(self): return set() @property def _persistent_attrs(self): return {'stage', 'step', 'backup'} @property def stage(self): ''' Returns ------- str name of the stage from where workflow should be started (default: ``None``) ''' return self._stage @stage.setter def stage(self, value): if not(isinstance(value, self._stage_params['type']) or value is None): raise TypeError('Attribute "stage" must have type %s' % self._stage_params['type']) self._stage = value @property def _stage_params(self): return { 'type': str, 'default': None, 'help': ''' name of the stage from where workflow should be started ''' } @property def step(self): ''' Returns ------- str name of the step from where workflow should be started (default: ``None``) ''' return self._step @step.setter def step(self, value): if not(isinstance(value, self._step_params['type']) or value is None): raise TypeError('Attribute "step" must have type %s' % self._step_params['type']) self._step = value @property def _step_params(self): return { 'type': str, 'default': None, 'help': ''' name of the step from where workflow should be started ''' } @property def backup(self): ''' Returns ------- bool indicator that an existing session should be overwritten (default: ``False``) ''' return self._backup @backup.setter def backup(self, value): if not isinstance(value, self._backup_params['type']): raise TypeError('Attribute "backup" must have type %s.' % self._backup_params['type']) self._backup = value @property def _backup_params(self): return { 'type': bool, 'default': False, 'help': ''' backup an existing session ''' }
Python
0.000003
@@ -2120,719 +2120,4 @@ %7D%0A -%0A @property%0A def backup(self):%0A '''%0A Returns%0A -------%0A bool%0A indicator that an existing session should be overwritten%0A (default: %60%60False%60%60)%0A '''%0A return self._backup%0A%0A @backup.setter%0A def backup(self, value):%0A if not isinstance(value, self._backup_params%5B'type'%5D):%0A raise TypeError('Attribute %22backup%22 must have type %25s.'%0A %25 self._backup_params%5B'type'%5D)%0A self._backup = value%0A%0A @property%0A def _backup_params(self):%0A return %7B%0A 'type': bool,%0A 'default': False,%0A 'help': '''%0A backup an existing session%0A '''%0A %7D%0A
82921fb53db2b6e7fdd731f23addd413a6f87673
Add function to sign SSH key
misc/sshca.py
misc/sshca.py
#!/usr/bin/python import confluent.collective.manager as collective import eventlet.green.subprocess as subprocess import glob import os def normalize_uid(): curruid = os.getuid() neededuid = os.stat('/etc/confluent').st_uid if curruid != neededuid: os.setuid(neededuid) if os.getuid() != neededuid: raise Exception('Need to run as root or owner of /etc/confluent') def initialize_ca(): normalize_uid() try: os.makedirs('/etc/confluent/ssh', mode=0o700) except OSError as e: if e.errno != 17: raise caname = '{0} SSH CA'.format(collective.get_myname()) subprocess.check_call(['ssh-keygen', '-C', caname, '-t', 'ed25519', '-f', '/etc/confluent/ssh/ca', '-N', '']) try: os.makedirs('/var/lib/confluent/ssh', mode=0o755) except OSError as e: if e.errno != 17: raise currknownhosts = [] try: with open('/var/lib/confluent/ssh/ssh_known_hosts', 'r') as skh: for ent in skh: descr = ent.split(' ', 4)[-1].strip() if descr != caname: currknownhosts.append(ent) except OSError as e: if e.errno != 2: raise with open('/etc/confluent/ssh/ca.pub', 'r') as capub: newent = '@cert-authority * ' + capub.read() currknownhosts.append(newent) with open('/var/lib/confluent/ssh/ssh_known_hosts', 'w') as skh: for ckh in currknownhosts: skh.write(ckh) def initialize_root_key(): authorized = [] for currkey in glob.glob('/root/.ssh/*.pub'): authorized.append(open(currkey).read()) if not authorized: subprocess.check_call(['ssh-keygen', '-t', 'ed25519', '-f', '/root/.ssh/id_ed25519', '-N', '']) for currkey in glob.glob('/root/.ssh/*.pub'): authorized.append(open(currkey).read()) try: os.makedirs('/var/lib/confluent/ssh', mode=0o755) neededuid = os.stat('/etc/confluent').st_uid os.chown('/var/lib/confluent/ssh', neededuid, -1) except OSError as e: if e.errno != 17: raise for auth in authorized: if 'PRIVATE' in auth: continue currcomment = auth.split(' ', 2)[-1].strip() curralgo = auth.split(' ', 1)[0] authed = [] try: with open('/var/lib/confluent/ssh/authorized_keys', 'r') as ak: for keyline in ak: comment = keyline.split(' ', 2)[-1].strip() algo = keyline.split(' ', 1)[0] if currcomment != comment or algo != curralgo: authed.append(keyline) except OSError as e: if e.errno != 2: raise authed.append(auth) with open('/var/lib/confluent/ssh/authorized_keys', 'w') as ak: for auth in authed: ak.write(auth) def ca_exists(): return os.path.exists('/etc/confluent/ssh/ca') if __name__ == '__main__': initialize_root_key() if not ca_exists(): initialize_ca()
Python
0
@@ -130,16 +130,46 @@ mport os +%0Aimport shutil%0Aimport tempfile %0A%0Adef no @@ -1529,16 +1529,548 @@ e(ckh)%0A%0A +def sign_host_key(pubkey, nodename):%0A tmpdir = tempfile.mkdtemp()%0A try:%0A pkeyname = os.path.join(tmpdir, 'hostkey.pub')%0A with open(pkeyname, 'w') as pubfile:%0A pubfile.write(pubkey)%0A subprocess.check_call(%0A %5B'ssh-keygen', '-s', '/etc/confluent/ssh/ca', '-I', nodename,%0A '-n', nodename, '-h', pkeyname%5D)%0A certname = pkeyname.replace('.pub', '-cert.pub')%0A with open(certname) as cert:%0A return cert.read()%0A finally:%0A shutil.rmtree(tmpdir)%0A%0A def init @@ -3629,24 +3629,24 @@ a_exists():%0A - init @@ -3640,24 +3640,130 @@ %0A initialize_ca() +%0A print(repr(sign_host_key(open('/etc/ssh/ssh_host_ed25519_key.pub').read(), collective.get_myname())))
0a57bcc2faca88d0527bb1f14dae2b0b9b5168f2
bump filer version to 0.9pbs.54
filer/__init__.py
filer/__init__.py
#-*- coding: utf-8 -*- # version string following pep-0396 and pep-0386 __version__ = '0.9pbs.53' # pragma: nocover
Python
0
@@ -88,17 +88,17 @@ 0.9pbs.5 -3 +4 ' # pra
006cbb88f2a06cd1411f88126ccf4a43121aa858
Update app startup process with new servicemanager and websocket communication.
app/main.py
app/main.py
""" The main module for HomePiServer. Initializes SocketIO, ServiceManager, NavigationChannel, View Manager. """ import signal from threading import Thread from gevent import monkey from flask import Flask from flask_socketio import SocketIO from .controllers import CONTROLLERS from .core.socketchannel import NavigationChannel from .core.logger import configure_logging from .services import ServiceManager, SERVICES from .views import ViewManager monkey.patch_all() class HomePiServer(object): """ Encapsulates the entire server. """ def __init__(self, config): params = { "template_folder": "../templates", "static_folder": "../static" } self.flask_app = Flask(__name__, **params) self.flask_app.config.from_object(config) self.register_blueprints(self.flask_app, CONTROLLERS) self.app = SocketIO(self.flask_app) self.nav_channel = NavigationChannel("/navigation", self.app) self.app.on_namespace(self.nav_channel) self.view_manager = ViewManager(self.nav_channel) self.nav_channel.display = self.view_manager self.service_manager = ServiceManager(SERVICES, self.view_manager) configure_logging(self.flask_app) self.start_services() def start_services(self): """Starts self.service_manager.start() on a new thread.""" self.service_thread = Thread(target=self.service_manager.start).start() @staticmethod def register_blueprints(app, params): """ Registers all the blueprints in controllers list. Args: app: Flask app to register the blueprint with. controllers: List like: [(prefix, blueprint), ...] """ for prefix, controller in params: app.register_blueprint(controller, url_prefix=prefix) def shutdown(self): pass def setup_signals(app): """ Listen for SIGTERM and SIGINIT and calls app.shutdown()""" def make_new_handler(prev_handler_func): def new_handler(var1, var2): app.shutdown() if prev_handler_func: prev_handler_func(var1, var2) return new_handler for sig in (signal.SIGTERM, signal.SIGINT): prev_handler = signal.getsignal(sig) signal.signal(sig, make_new_handler(prev_handler)) def create_app(config=None): """ Returns a new instance of HomePiServer.""" if config is None: import app.config config = app.config app = HomePiServer(config) setup_signals(app) return app.flask_app, app.app
Python
0
@@ -291,103 +291,111 @@ ore. -socketchannel import NavigationChannel%0Afrom .core.logger import configure_logging +logger import configure_logging%0Afrom .core.websocket_manager import WebSocketManager %0Afrom +.core .service s im @@ -390,17 +390,23 @@ .service -s +manager import @@ -423,49 +423,39 @@ ager -, SERVICES%0Afrom .views import ViewManager +%0Afrom .services import SERVICES %0A%0Amo @@ -927,131 +927,14 @@ elf. -nav_channel = NavigationChannel(%22/navigation%22, self.app)%0A self.app.on_namespace(self.nav_channel)%0A%0A self.view +socket _man @@ -940,20 +940,25 @@ nager = -View +WebSocket Manager( @@ -966,75 +966,13 @@ elf. -nav_channel +app )%0A - self.nav_channel.display = self.view_manager%0A%0A @@ -1032,12 +1032,14 @@ elf. -view +socket _man @@ -2428,8 +2428,9 @@ app.app%0A +%0A
7202f65e2a59f913b70a09e5b8352f91a68e98c2
add pandas
Python/create_climatology/createclim_ts_template.py
Python/create_climatology/createclim_ts_template.py
""" Create SubX daily climatology. The file is filled in by generate_ts_clim.ksh. """ import os import xarray as xr import numpy as np # Inputs outPath = 'outdir' ft = 'ftype' mo = 'mod' ins = 'inst' va = 'var' pl = plev yv = lat.0 xv = lon.0 subsampletime = subsampleS starttime = 'startS' endtime = 'endS' ysave = str(int(yv)) xsave = str(int(xv)) url = 'http://iridl.ldeo.columbia.edu/SOURCES/.Models/.SubX/' ddir = outPath+ft+'/'+mo+'/'+va+'/'+str(pl)+'/daily/ts/' outclimDir = outPath+ft+'/'+mo+'/'+va+'/'+str(pl)+'/daily/clim/' if not os.path.isdir(outclimDir): os.makedirs(outclimDir) climfname = 'day_clim.y'+ysave+'.x'+xsave+'.nc' sclimfname = 'smooth_day_clim.y'+ysave+'.x'+xsave+'.nc' # Find out how many ensembles associated with the model: _rd = xr.open_dataarray(url+ins+'/.'+mo+'/.'+ft+'/.'+va+'/dods') nens = len(_rd.M.values) # Use solution given in https://bit.ly/2xyhjCy _l = [] for e in range(1, nens+1): ens = 'e%d' % e _l.append(xr.open_mfdataset(ddir+'*.'+ens+'.y'+ysave+'.x'+xsave+'.nc', autoclose=True)) ds = xr.concat(_l, dim='M') # Drop 1 dimensional coordinates ds = ds.squeeze() # Obtain data varialbe da = ds[va] # Sub-sample time if 1 == subsampletime: da = da.sel(S=slice(starttime, endtime)) else: starttime = pd.Timestamp(da.S.values[0]).strftime('%Y-%m-%d') endtime = pd.Timestamp(da.S.values[-1]).strftime('%Y-%m-%d') # Update save file same climfname = starttime+'.'+endtime+'.'+climfname sclimfname = starttime+'.'+endtime+'.'+sclimfname # Ensemble mean da_ensmean = da.mean(dim='M') # Average daily data da_day_clim = da_ensmean.groupby('S.dayofyear').mean('S') # Save file da_day_clim.to_netcdf(outclimDir+climfname) # Pad the daily climatolgy with nans x = np.empty((366, len(da_day_clim.L))) x.fill(np.nan) _da = xr.DataArray(x, coords=[np.linspace(1, 366, num=366, dtype=np.int64), da_day_clim.L], dims = da_day_clim.dims) da_day_clim_wnan = da_day_clim.combine_first(_da) # Period rolling twice to make it triangular smoothing # See https://bit.ly/2H3o0Mf da_day_clim_smooth = da_day_clim_wnan.copy() for i in range(2): # Extand the DataArray to allow rolling to do periodic da_day_clim_smooth = xr.concat([da_day_clim_smooth[-15:], da_day_clim_smooth, da_day_clim_smooth[:15]], 'dayofyear') # Rolling mean da_day_clim_smooth = da_day_clim_smooth.rolling(dayofyear=31, center=True, min_periods=1).mean() # Drop the periodic boundaries da_day_clim_smooth = da_day_clim_smooth.isel(dayofyear=slice(15, -15)) # Extract the original days da_day_clim_smooth = da_day_clim_smooth.sel(dayofyear=da_day_clim.dayofyear) # Save file da_day_clim_smooth.to_netcdf(outclimDir+sclimfname)
Python
0.999689
@@ -129,16 +129,36 @@ y as np%0A +import pandas as pd%0A %0A%0A# Inpu
19dbb6c960d299acf813e96a102a4e14ef7b3ef8
make N/A case references just blank [skip ci]
custom/enikshay/management/commands/base_data_dump.py
custom/enikshay/management/commands/base_data_dump.py
# -*- coding: utf-8 -*- from __future__ import absolute_import import os import csv import tempfile from datetime import datetime from django.core.management.base import BaseCommand from django.urls import reverse from couchexport.models import Format from corehq.blobs import get_blob_db from corehq.form_processor.interfaces.dbaccessors import CaseAccessors from corehq.util.files import safe_filename_header from dimagi.utils.django.email import send_HTML_email from soil.util import expose_blob_download DOMAIN = "enikshay" LIMITED_TEST_DUMP_SIZE = 500 class BaseDataDump(BaseCommand): TASK_NAME = "" INPUT_FILE_NAME = "" def __init__(self, *args, **kwargs): super(BaseDataDump, self).__init__(*args, **kwargs) self.log_progress = None self.result_file_name = None self.case_type = None self.report = {} self.notes = {} self.column_statuses = {} self.result_file_headers = [] self.recipient = None self.full = False def add_arguments(self, parser): parser.add_argument('--recipient', type=str) parser.add_argument('--full', action='store_true', dest='full', default=False) def handle(self, recipient, *args, **options): self.recipient = recipient self.full = options.get('full') self.setup() temp_file_path = self.generate_dump() download_id = self.save_dump_to_blob(temp_file_path) if self.recipient: self.email_result(download_id) def setup_result_file_name(self): result_file_name = "{dump_title}_{timestamp}.csv".format( dump_title=self.TASK_NAME, timestamp=datetime.now().strftime("%Y-%m-%d--%H-%M-%S"), ) return result_file_name def setup(self): input_file_path = '%s/%s' % (os.path.dirname(os.path.realpath(__file__)), self.INPUT_FILE_NAME) with open(input_file_path, 'rU') as input_file: reader = csv.DictReader(input_file) for row in reader: self.report[row['Column Name']] = { row['Case Reference']: row['Calculation'] } self.notes[row['Column Name']] = row.get('Notes') self.column_statuses[row['Column Name']] = row.get('Column Status') self.result_file_headers.append(row['Column Name']) self.result_file_name = self.setup_result_file_name() def include_case_in_dump(self, case): return True def generate_dump(self): _, temp_path = tempfile.mkstemp() with open(temp_path, 'w') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=self.result_file_headers) writer.writeheader() writer.writerow(self.notes) writer.writerow(self.column_statuses) # iterate cases for case in self.get_cases(self.case_type): # store any references like last_episode or any data point # that might be needed repeatedly for the same case and is expensive call self.context = {} case_row = {} if not self.include_case_in_dump(case): continue # iterate columns to be generated # details is a dict with key in [ # "N/A" -> not to be populated so ignore it # self -> value would be a case property or some meta on the case itself # custom -> value would be some custom logic to be manually coded # specific case reference/association -> value would be case property on this associated case] for column_name, details in self.report.items(): for case_reference, calculation in details.items(): if case_reference == "N/A": case_row[column_name] = "N/A" elif case_reference == 'self': if calculation == 'caseid': case_row[column_name] = case.case_id else: column_value = case.get_case_property(calculation) if column_value and not isinstance(column_value, bool): column_value = column_value.encode("utf-8") case_row[column_name] = column_value elif case_reference == 'custom': try: case_row[column_name] = self.get_custom_value(column_name, case) except Exception as e: case_row[column_name] = str(e) else: try: column_value = self.get_case_reference_value( case_reference, case, calculation) if column_value: column_value = column_value.encode("utf-8") case_row[column_name] = column_value except Exception as e: case_row[column_name] = str(e) writer.writerow(case_row) return temp_path def save_dump_to_blob(self, temp_path): with open(temp_path, 'rb') as file_: blob_db = get_blob_db() blob_db.put(file_, self.result_file_name, timeout=60 * 48) # 48 hours file_format = Format.from_format(Format.CSV) blob_dl_object = expose_blob_download( self.result_file_name, mimetype=file_format.mimetype, content_disposition=safe_filename_header(self.result_file_name, file_format.extension), ) return blob_dl_object.download_id def email_result(self, download_id): url = reverse('ajax_job_poll', kwargs={'download_id': download_id}) send_HTML_email('%s Download for %s Finished' % (DOMAIN, self.case_type), self.recipient, 'Simple email, just to let you know that there is a ' 'download waiting for you at %s' % url) def get_cases(self, case_type): case_accessor = CaseAccessors(DOMAIN) case_ids_query = self.get_case_ids_query(case_type) if not self.full: case_ids_query = case_ids_query.size(LIMITED_TEST_DUMP_SIZE) case_ids = case_ids_query.get_ids() return case_accessor.iter_cases(case_ids) def get_case_ids_query(self, case_type): raise NotImplementedError def get_custom_value(self, column_name, case): raise NotImplementedError def get_case_reference_value(self, case_reference, case, calculation): raise NotImplementedError
Python
0.000001
@@ -3950,19 +3950,16 @@ ame%5D = %22 -N/A %22%0A
cbcc75a495851c1c8bacdeaf0ade5952fc19c3f3
enhance reference title
doc/source/conf.py
doc/source/conf.py
# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) ROOT = os.path.abspath(os.path.join(BASE_DIR, "..", "..")) sys.path.insert(0, ROOT) sys.path.insert(0, BASE_DIR) def gen_ref(ver, title, names): refdir = os.path.join(BASE_DIR, "ref") pkg = "gnocchiclient" if ver: pkg = "%s.%s" % (pkg, ver) refdir = os.path.join(refdir, ver) if not os.path.exists(refdir): os.makedirs(refdir) idxpath = os.path.join(refdir, "index.rst") with open(idxpath, "w") as idx: idx.write(("%(title)s\n" "%(signs)s\n" "\n" ".. toctree::\n" " :maxdepth: 1\n" "\n") % {"title": title, "signs": "=" * len(title)}) for name in names: idx.write(" %s\n" % name) rstpath = os.path.join(refdir, "%s.rst" % name) with open(rstpath, "w") as rst: rst.write(("%(title)s\n" "%(signs)s\n" "\n" ".. automodule:: %(pkg)s.%(name)s\n" " :members:\n" " :undoc-members:\n" " :show-inheritance:\n" " :noindex:\n") % {"title": name.capitalize(), "signs": "=" * len(name), "pkg": pkg, "name": name}) gen_ref("v1", "Version 1 API", ["client", "resource", "archive_policy", "archive_policy_rule"]) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', #'sphinx.ext.intersphinx', 'oslosphinx' ] # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'python-gnocchiclient' copyright = u'2013, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] # html_theme = '_theme' # html_static_path = ['static'] # Output file base name for HTML help builder. htmlhelp_basename = '%sdoc' % project # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '%s.tex' % project, u'%s Documentation' % project, u'OpenStack Foundation', 'manual'), ] # Example configuration for intersphinx: refer to the Python standard library. #intersphinx_mapping = {'http://docs.python.org/': None}
Python
0
@@ -1909,20 +1909,27 @@ title%22: -name +%22 %22.join(%5Bn .capital @@ -1933,16 +1933,91 @@ talize() +%0A for n in name.split(%22_%22)%5D) ,%0A
8d8dec3dba4db223fc0254d11d3a81d9c079a9d1
Change request var name
dockci/handlers.py
dockci/handlers.py
""" Handlers for Flask, and Flask plugins """ import json import logging import jwt import rollbar from flask import (abort, flash, got_request_exception, redirect, request, request_finished, Response, ) from flask_login import login_url from flask_security.utils import verify_and_update_password from redis.exceptions import RedisError from .api.base import BaseRequestParser from .api.util import clean_attrs from .models.auth import User from .server import APP, CONFIG, DB, MAIL, redis_pool from .util import (check_auth_fail, is_api_request, ) SECURITY_STATE = APP.extensions['security'] LOGIN_MANAGER = SECURITY_STATE.login_manager LOGIN_FORM = BaseRequestParser() @LOGIN_MANAGER.unauthorized_handler def unauthorized_handler(): """ Handler for unauthorized user requests. If API request, handle with a basic auth dialog (for users) and a JSON response (for APIs). Otherwise, treat the login like ``flask-login`` treats them. In most cases (all cases for DockCI; extra code left for completeness), this redirects to the login form """ message = None if LOGIN_MANAGER.login_message: message = LOGIN_MANAGER.login_message if LOGIN_MANAGER.localize_callback is not None: message = LOGIN_MANAGER.localize_callback(message) if is_api_request(request): args = clean_attrs(LOGIN_FORM.parse_args()) if 'username' in args or 'password' in args or 'api_key' in args: message = "Invalid credentials" return Response( json.dumps({'message': message or "Unauthorized"}), 401, { 'Content-Type': 'application/json', 'WWW-Authenticate': 'Basic realm="DockCI API"', }, ) else: if not LOGIN_MANAGER.login_view: abort(401) if message: flash(message, category=LOGIN_MANAGER.login_message_category) return redirect(login_url(LOGIN_MANAGER.login_view, request.url)) @LOGIN_MANAGER.request_loader def request_loader(req): # has request as arg """ Request loader that first tries the ``LOGIN_FORM`` request parser (see ``try_reqparser``), then basic auth (see ``try_basic_auth``) """ idents_set = set() try: with redis_pool() as redis_pool_: req_windows, unthrottled = check_auth_fail( (req.remote_addr,), redis_pool_, ) if not unthrottled: return None user = try_reqparser(idents_set) or try_basic_auth(idents_set) ident_windows, unthrottled = check_auth_fail( idents_set, redis_pool_, ) if not unthrottled: return None if user is not None: return user # Only update where a login attempt was made if len(idents_set) > 0: # Unique value in all windows value = str(hash(req)) for window in req_windows + ident_windows: window.add(value) except RedisError: logging.exception("Authentication throttling disabled") return try_reqparser(idents_set) or try_basic_auth(idents_set) @SECURITY_STATE.send_mail_task def security_mail_task(message): """ Handle mail failures in Flask-Security by flashing a message """ try: MAIL.send(message) except Exception: # pylint:disable=broad-except flash("Couldn't send email message", 'danger') def try_jwt(token, idents_set): """ Check a JWT token """ if token is None: return None try: jwt_data = jwt.decode(token, CONFIG.secret) except jwt.exceptions.InvalidTokenError: return None else: idents_set.add(str(jwt_data['sub'])) user = User.query.get(jwt_data['sub']) if user is not None: idents_set.add(user.email.lower()) return user def try_user_pass(password, lookup, idents_set): """ Try to authenticate a user based on first a user ID, if ``lookup`` can be parsed into an ``int``, othewise it's treated as a user email. Uses ``verify_and_update_password`` to check the password """ if lookup is not None: idents_set.add(str(lookup).lower()) if password is None or lookup is None: return None user = SECURITY_STATE.datastore.get_user(lookup) if not user: return None idents_set.add(user.email.lower()) idents_set.add(user.id) if verify_and_update_password(password, user): return user return None def try_all_auth(api_key, password, username, idents_set): """ Attempt auth with the API key, then username/password """ user = try_jwt(api_key, idents_set) if user is not None: return user user = try_user_pass(password, username, idents_set) if user is not None: return user return None def try_reqparser(idents_set): """ Use ``try_all_auth`` to attempt authorization from the ``LOGIN_FORM`` ``RequestParser``. Will take JWT keys from ``x_dockci_api_key``, and ``x_dockci_username``/``x_dockci_password`` combinations """ args = LOGIN_FORM.parse_args() return try_all_auth( args['x_dockci_api_key'] or args['hx_dockci_api_key'], args['x_dockci_password'] or args['hx_dockci_password'], args['x_dockci_username'] or args['hx_dockci_username'], idents_set, ) def try_basic_auth(idents_set): """ Use ``try_all_auth`` to attempt authorization from HTTP basic auth. Only the password is used for API key """ auth = request.authorization if not auth: return None return try_all_auth( auth.password, auth.password, auth.username, idents_set, ) @got_request_exception.connect @request_finished.connect def db_rollback(*args, **kwargs): # pylint:disable=unused-argument """ Rollback the DB transaction when the request completes """ dirty = DB.session.dirty if dirty: message = ( "Dirty session had to be rolled back. Objects were: %s" % dirty ) rollbar.report_message(message, 'warning') logging.error(message) DB.session.rollback()
Python
0.000002
@@ -2229,32 +2229,15 @@ (req -): # has request as arg +uest_): %0A @@ -2539,16 +2539,21 @@ (req +uest_ .remote_ @@ -3135,16 +3135,21 @@ hash(req +uest_ ))%0A%0A
a0a92e237ca91dc8f0318a27dfeec9b9c8e95de5
Add utility to guess livelock file for an owner
lib/utils/livelock.py
lib/utils/livelock.py
# # # Copyright (C) 2014 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Lockfiles to prove liveliness When requesting resources, like locks, from wconfd, requesters have to provide the name of a file they own an exclusive lock on, to prove that they are still alive. Provide methods to obtain such a file. """ import fcntl import os import struct import time from ganeti import pathutils class LiveLock(object): """Utility for a lockfile needed to request resources from WconfD. """ def __init__(self, name=None): if name is None: name = "pid%d_" % os.getpid() # to avoid reusing existing lock files, extend name # by the current time name = "%s_%d" % (name, int(time.time())) fname = os.path.join(pathutils.LIVELOCK_DIR, name) self.lockfile = open(fname, 'w') fcntl.fcntl(self.lockfile, fcntl.F_SETLKW, struct.pack('hhllhh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)) def close(self): """Close the lockfile and clean it up. """ self.lockfile.close() os.remove(self.lockfile.name)
Python
0.000116
@@ -1034,16 +1034,55 @@ t time%0A%0A +from ganeti.utils.algo import NiceSort%0A from gan @@ -1758,12 +1758,491 @@ kfile.name)%0A +%0A%0Adef GuessLockfileFor(name):%0A %22%22%22For a given name, take the latest file matching.%0A%0A @return: the file with the latest name matching the given%0A prefix in LIVELOCK_DIR, or the plain name, if none%0A exists.%0A %22%22%22%0A lockfiles = filter(lambda n: n.startswith(name),%0A os.listdir(pathutils.LIVELOCK_DIR))%0A if len(lockfiles) %3E 0:%0A lockfile = NiceSort(lockfiles)%5B-1%5D%0A else:%0A lockfile = name%0A%0A return os.path.join(pathutils.LIVELOCK_DIR, lockfile)%0A
27b0b1cf34ea21b48cb2ae3669ee3d6c44b73bf6
Allow smoketest.py to load a different test list
scripts/smoketest.py
scripts/smoketest.py
#!/usr/bin/python import os import pbutil import progress import re import subprocess import sys import configtool import time import shutil from xml.dom.minidom import parse CHECK=True check_exclude=[ "convolution/Convolution", # Difference "multiply/strassen", # Difference, why??? "regression/whereclause", # Difference, why??? "simple/matrixrotate", # NewProgramCrash "multiply/multiply", # NewProgramCrash "regression/params", # AlwaysCrashes "convolution2/ConvFFTRecursion", "convolution2/Convolution", "convolution2/ConvLinAlg", "kclustering/kmeans", # (Variable accuracy) "matrixapprox/matrixapprox", # (Variable accuracy) "regression/accuracymetric", # (Variable accuracy) "preconditioner/preconditioner", # (Variable accuracy) "kernel/nwkdeVA", # (Variable accuracy) "kernel/nwkde", # floating-point precision errors "regression/floattunables", "regression/floattunables2", ] def resolveInputPath(path): if os.path.isfile("./testdata/"+path): return "./testdata/"+path return path def forkrun(cmd): null=open("/dev/null","w") return subprocess.Popen(cmd, stdout=null, stderr=null) def run(cmd): return forkrun(cmd).wait() def diffFiles(a, b): '''true if files differ''' try: af=open(a) bf=open(b) rv = (af.read() != bf.read()) af.close() bf.close() return rv except Exception, e: print "ERROR: ",e return True def checkBenchmark(b): if b in check_exclude or not CHECK: return True import sgatuner, warnings, tunerwarnings warnings.resetwarnings() warnings.simplefilter('error', tunerwarnings.TunerWarning) warnings.simplefilter('ignore', DeprecationWarning) warnings.simplefilter('ignore', tunerwarnings.IgnoredTunerWarning) warnings.simplefilter('ignore', tunerwarnings.InitialProgramCrash) warnings.simplefilter('ignore', tunerwarnings.ProgramTimeout) try: sgatuner.regression_check(b) print "check PASSED" return True except tunerwarnings.TunerWarning, e: print "check FAILED (%s: %s)" % (e.__class__.__name__, str(e)) return False except: import traceback traceback.print_exc(10) return False def testBenchmark(b): name=b[0] bin=pbutil.benchmarkToBin(name) cfg=pbutil.benchmarkToCfg(name) if not os.path.isfile(bin): return False #build cmd hash=name iofiles=[] for x in b[1:]: iofiles.append(resolveInputPath(x)) hash+=" "+os.path.basename(x) outfile="./testdata/.output/"+re.sub("[ /.]",'_',hash) iofiles.append(outfile+".latest") try: cmd=[bin, '--fixedrandom', '--config=%s.cfg'%outfile, '--reset'] if run(cmd) != 0: print "ERROR: reset config failed" return False except OSError: print "ERROR: program not runnable" return False if os.path.isfile("%s.cfg.default"%outfile): shutil.copy("%s.cfg.default"%outfile, "%s.cfg"%outfile) try: infoxml=parse(pbutil.benchmarkToInfo(name)) except: print "invalid *.info file" return False def test(): if isFloatingPoint() and os.path.exists(outfile+".float"): ext = ".float" print "FLOAT" else: ext = "" #run cpu config cmd=[bin, '--fixedrandom', '--config=%s.cfg'%outfile] cmd.extend(iofiles) t1=time.time() rv = run(cmd) t2=time.time() if rv != 0: print "run FAILED (status=%d, cmd=%s)"%(rv, ' '.join(cmd)) return False if diffFiles(outfile+ext, outfile+".latest"): time.sleep(0.1) #try letting the filesystem settle down if diffFiles(outfile+ext, outfile+".latest"): print "run FAILED (wrong output)" return False print "run PASSED (took %.2fs)" % (t2-t1) if (not haveOpenCL()) or (not os.path.exists(outfile+".gpucfg")): return True #run gpu config cmd=[bin, '--fixedrandom', '--noisolation', '--config=%s.gpucfg'%outfile] cmd.extend(iofiles) t1=time.time() rv = run(cmd) t2=time.time() if rv != 0: print "gpu FAILED (status=%d, cmd=%s)"%(rv, ' '.join(cmd)) return False if diffFiles(outfile+ext, outfile+".latest"): time.sleep(0.1) #try letting the filesystem settle down if diffFiles(outfile+ext, outfile+".latest"): print "gpu FAILED (wrong output)" return False print "gpu PASSED (took %.2fs)" % (t2-t1) return True return test() def isFloatingPoint(): for line in open("./src/config.h"): if "MATRIX_ELEMENT_T" in line and "float" in line: return True return False def haveOpenCL(): for line in open("./src/config.h"): if "HAVE_OPENCL" in line: if "/*" in line: return False else: return True return False if 'nocheck' in sys.argv[1:]: sys.argv[1:] = filter(lambda x: x!='nocheck', sys.argv[1:]) CHECK = False from optparse import OptionParser parser = OptionParser(usage="usage: smoketest.py [options]") parser.add_option("--learning", action="store_true", dest="learning", default=False, help="enable heuristics learning") parser.add_option("--heuristics", type="string", help="name of the file containing the set of heuristics to use. Automatically enables --learning", default=None) (options, args) = parser.parse_args() if options.heuristics: options.learning = True if options.learning: print "Learning of heuristics is ACTIVE" if options.heuristics: print "Using heuristics file: "+ str(options.heuristics) else: print "Using only heuristics in the database" t1=time.time() results,b=pbutil.loadAndCompileBenchmarks("./scripts/smoketest.tests", args, testBenchmark, postfn=checkBenchmark, learning=options.learning, heuristicSetFileName=options.heuristics, excludeBenchmarks=check_exclude) t2=time.time() passed=len(filter(lambda x: x.rv==0, results)) total=len(results) print "%d of %d tests passed (took %.2fs)"%(passed,total,(t2-t1)) sys.exit(min(total-passed, 124))
Python
0.000001
@@ -5424,16 +5424,177 @@ lt=None) +%0Aparser.add_option(%22--testlist%22, type=%22string%22, help=%22name of the file containing the list of tests to be run%22, default=%22./scripts/smoketest.tests%22) %0A%0A(optio @@ -5948,35 +5948,24 @@ rks( -%22./scripts/smoketest +options .test -s%22 +list , ar
ea97edc9937119c4a3c00123b0cd54e2cdb33fb6
Update document
chainer/functions/util/forget.py
chainer/functions/util/forget.py
import chainer from chainer import function from chainer import function_node from chainer import variable def _call_func(func, xs): outs = func(*xs) if isinstance(outs, tuple): for i, out in enumerate(outs): if isinstance(out, variable.Variable): continue n = i + 1 suffix = {1: 'st', 2: 'nd', 3: 'rd'}.get( n if n < 20 else n % 10, 'th') msg = ('{}{} element of a returned tuple is not Variable, ' 'but is {}').format(n, suffix, type(out)) raise RuntimeError(msg) elif isinstance(outs, variable.Variable): outs = (outs,) else: msg = ('A tuple of Variables or a Variable are expected, but {} ' 'is returned.'.format(type(outs))) raise RuntimeError(msg) return outs class Forget(function_node.FunctionNode): def __init__(self, func): if not callable(func): raise TypeError('func must be callable') self.func = func def forward(self, inputs): self.retain_inputs(tuple(range(len(inputs)))) with function.no_backprop_mode(): xs = [variable.Variable(x) for x in inputs] outs = _call_func(self.func, xs) return tuple(out.data for out in outs) def backward(self, indexes, grad_outputs): # Double backprop is not allowed if chainer.config.enable_backprop: raise RuntimeError('double backpropagation in functions.forget is ' 'not allowed.') inputs = self.get_retained_inputs() # Create new variables that have no creators dummy_inputs = tuple([variable.Variable(inp.array) for inp in inputs]) with function.force_backprop_mode(),\ chainer.using_config('recompute', True): outs = _call_func(self.func, dummy_inputs) assert len(outs) == len(grad_outputs) if len(outs) > 1: # Avoid doing backward multiple times when `outs` is a tuple outs = chainer.functions.identity(*outs) for out, grad_output in zip(outs, grad_outputs): out.grad_var = grad_output outs[0].backward() return tuple([inp.grad_var for inp in dummy_inputs]) def forget(func, *xs): """Calls a function without storing intermediate results. On a forward propagation, Chainer normally stores all intermediate results of :class:`~chainer.variable.VariableNode`\\ s on a computational graph as they are required on backward propagation. Sometimes these results consume too much memory. ``F.forget`` *forgets* such intermediate results on forward propagation, and still supports backpropagation with recalculation. On a forward propagation, ``F.forget`` calls a given function with given variables without creating a computational graph. That means, no intermediate results are stored. On a backward propagation, ``F.forget`` calls the given function again to create a computational graph for backpropagation. ``F.forget`` reduces internal memory usage, whereas it requires more calculation time as it calls the function twice. .. admonition:: Example Let ``f`` be a function defined as: >>> def f(a, b): ... return a + b + a and, ``x`` and ``y`` be :class:`~chainer.Variable`\\ s: >>> x = chainer.Variable(np.random.uniform(-1, 1, 5).astype(np.float32)) >>> y = chainer.Variable(np.random.uniform(-1, 1, 5).astype(np.float32)) When ``z`` is calculated as ``z = f(x, y)``, its intermediate result ``x + y`` is stored in memory. Instead, if you call ``f`` with ``F.forget``: >>> z = F.forget(f, x, y) intermediate ``x + y`` is forgotten. .. note:: ``F.forget`` does not support functions which behave differently in multiple calls with the same inputs, such as :meth:`F.dropout() <chainer.functions.dropout>` and :meth:`F.negative_sampling() <chainer.functions.negative_sampling>`. .. note:: In case input argument variables are of class :class:`numpy.ndarray` or :class:`cupy.ndarray` objects, arguments will automatically be converted to :class:`~chainer.Variable`\\ s. This conversion takes place to ensure that this function is included in the computational graph to enable backward computations. .. note:: ``F.forget`` does not support double backpropagation. Args: func (callable): A function to call. It needs to be called with :class:`~chainer.Variable` object(s) and to return a :class:`~chainer.Variable` object or a tuple of :class:`~chainer.Variable` objects. xs (~chainer.Variable): Argument variables of the function. Returns: ~chainer.Variable: A variable ``func`` returns. If it returns a tuple, the method returns a tuple too. """ xs = tuple(x if isinstance(x, variable.Variable) else variable.Variable(x, requires_grad=True) for x in xs) y = Forget(func).apply(xs) if len(y) == 1: y, = y return y
Python
0
@@ -4543,24 +4543,518 @@ opagation.%0A%0A + .. note::%0A%0A If you want to use %60%60F.forget%60%60 to a link which updates the link's%0A internal information every time the forward computation is called,%0A please ensure that the information is updated just once in a single%0A iteration. You may use the %60%60chainer.config.recompute%60%60 flag to check%0A if the forward computation is the first call in an iteration.%0A Please see the implementation of%0A :class:%60~chainer.links.BatchNormalization%60 for detail.%0A%0A Args:%0A
f431f2408ca1e1a38479a9ac1224bd608df1c0d4
Test build.source rather than legacy attributes
changes/listeners/green_build.py
changes/listeners/green_build.py
import logging import requests from datetime import datetime from flask import current_app from changes.config import db from changes.constants import Result from changes.db.utils import create_or_update from changes.models import ( Build, Event, EventType, ProjectOption, RepositoryBackend ) from changes.utils.http import build_uri from changes.utils.locking import lock logger = logging.getLogger('green_build') def get_options(project_id): return dict( db.session.query( ProjectOption.name, ProjectOption.value ).filter( ProjectOption.project_id == project_id, ProjectOption.name.in_([ 'green-build.notify', 'green-build.project', ]) ) ) @lock def build_finished_handler(build_id, **kwargs): build = Build.query.get(build_id) if build is None: return if build.result != Result.passed: return url = current_app.config.get('GREEN_BUILD_URL') if not url: logger.info('GREEN_BUILD_URL not set') return auth = current_app.config['GREEN_BUILD_AUTH'] if not auth: logger.info('GREEN_BUILD_AUTH not set') return # we only want to identify stable revisions if build.patch_id or not build.revision_sha: logger.debug('Ignoring build due to non-commit: %s', build.id) return options = get_options(build.project_id) if options.get('green-build.notify', '1') != '1': logger.info('green-build.notify disabled for project: %s', build.project_id) return if build.repository.backend != RepositoryBackend.hg: logger.info('Repository backend is not supported: %s', build.repository.id) return vcs = build.repository.get_vcs() if vcs is None: logger.info('Repository has no VCS set: %s', build.repository.id) return # ensure we have the latest changes vcs.update() release_id = vcs.run(['log', '-r %s' % (build.revision_sha,), '--limit=1', '--template={rev}:{node|short}']) project = options.get('green-build.project') or build.project.slug try: requests.post(url, auth=auth, data={ 'project': project, 'id': release_id, 'build_url': build_uri('/projects/{0}/builds/{1}/'.format( build.project.slug, build.id.hex)), 'build_server': 'changes', }) except Exception: logger.exception('Failed to report green build') status = 'fail' else: status = 'success' create_or_update(Event, where={ 'type': EventType.green_build, 'item_id': build.id, }, values={ 'data': { 'status': status, }, 'date_modified': datetime.utcnow(), })
Python
0
@@ -1249,16 +1249,23 @@ f build. +source. patch_id @@ -1274,24 +1274,31 @@ r not build. +source. revision_sha
1bc98a6f8a2f4a77a465147fd0b9cf3c6ef9edba
Remove dead 'first' parameter.
api/common.py
api/common.py
#!/usr/bin/python # Copyright (C) 2012 Humbug, Inc. # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation files # (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, # publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import simplejson import requests import time import traceback import urlparse import sys import os # Check that we have a recent enough version # Older versions don't provide the 'json' attribute on responses. assert(requests.__version__ > '0.12') API_VERSTRING = "/api/v1/" class HumbugAPI(object): def __init__(self, email, api_key=None, api_key_file=None, verbose=False, retry_on_errors=True, site="https://humbughq.com", client="API"): if api_key is None: if api_key_file is None: api_key_file = os.path.join(os.environ["HOME"], ".humbug-api-key") if not os.path.exists(api_key_file): raise RuntimeError("api_key not specified and %s does not exist" % (api_key_file,)) with file(api_key_file, 'r') as f: api_key = f.read().strip() self.api_key = api_key self.email = email self.verbose = verbose self.base_url = site self.retry_on_errors = retry_on_errors self.client_name = client def do_api_query(self, orig_request, url, longpolling = False): request = {} request["email"] = self.email request["api-key"] = self.api_key request["client"] = self.client_name for (key, val) in orig_request.iteritems(): if not (isinstance(val, str) or isinstance(val, unicode)): request[key] = simplejson.dumps(val) else: request[key] = val query_state = { 'had_error_retry': False, 'request': request, 'failures': 0, } def error_retry(error_string): if not self.retry_on_errors or query_state["failures"] >= 10: return False if self.verbose: if not query_state["had_error_retry"]: sys.stdout.write("humbug API(%s): connection error%s -- retrying." % \ (url.split(API_VERSTRING, 2)[1], error_string,)) query_state["had_error_retry"] = True else: sys.stdout.write(".") sys.stdout.flush() query_state["request"]["dont_block"] = simplejson.dumps(True) time.sleep(1) query_state["failures"] += 1 return True def end_error_retry(succeeded): if query_state["had_error_retry"] and self.verbose: if succeeded: print "Success!" else: print "Failed!" while True: try: res = requests.post(urlparse.urljoin(self.base_url, url), data=query_state["request"], verify=True, timeout=55) # On 50x errors, try again after a short sleep if str(res.status_code).startswith('5'): if error_retry(" (server %s)" % (res.status_code,)): continue # Otherwise fall through and process the python-requests error normally except (requests.exceptions.Timeout, requests.exceptions.SSLError) as e: # Timeouts are either a Timeout or an SSLError; we # want the later exception handlers to deal with any # non-timeout other SSLErrors if (isinstance(e, requests.exceptions.SSLError) and str(e) != "The read operation timed out"): raise if longpolling: # When longpolling, we expect the timeout to fire, # and the correct response is to just retry continue else: end_error_retry(False) return {'msg': "Connection error:\n%s" % traceback.format_exc(), "result": "connection-error"} except requests.exceptions.ConnectionError: if error_retry(""): continue end_error_retry(False) return {'msg': "Connection error:\n%s" % traceback.format_exc(), "result": "connection-error"} except Exception: # We'll split this out into more cases as we encounter new bugs. return {'msg': "Unexpected error:\n%s" % traceback.format_exc(), "result": "unexpected-error"} if res.json is not None: end_error_retry(True) return res.json end_error_retry(False) return {'msg': res.text, "result": "http-error", "status_code": res.status_code} @classmethod def _register(cls, name, url=None, make_request=(lambda request={}: request), **query_kwargs): if url is None: url = name def call(self, *args, **kwargs): request = make_request(*args, **kwargs) return self.do_api_query(request, API_VERSTRING + url, **query_kwargs) call.func_name = name setattr(cls, name, call) def call_on_each_message(self, callback, options = {}): max_message_id = None while True: if max_message_id is not None: options["first"] = "0" options["last"] = str(max_message_id) res = self.get_messages(options) if 'error' in res.get('result'): if self.verbose: if res["result"] == "http-error": print "HTTP error fetching messages -- probably a server restart" elif res["result"] == "connection-error": print "Connection error fetching messages -- probably server is temporarily down?" else: print "Server returned error:\n%s" % res["msg"] # TODO: Make this back off once it's more reliable time.sleep(1) continue for message in sorted(res['messages'], key=lambda x: int(x["id"])): max_message_id = max(max_message_id, int(message["id"])) callback(message) def _mk_subs(streams): return {'subscriptions': streams} HumbugAPI._register('send_message', make_request=(lambda request: request)) HumbugAPI._register('get_messages', longpolling=True) HumbugAPI._register('get_profile') HumbugAPI._register('get_public_streams') HumbugAPI._register('list_subscriptions', url='subscriptions/list') HumbugAPI._register('add_subscriptions', url='subscriptions/add', make_request=_mk_subs) HumbugAPI._register('remove_subscriptions', url='subscriptions/remove', make_request=_mk_subs)
Python
0
@@ -6477,47 +6477,8 @@ ne:%0A - options%5B%22first%22%5D = %220%22%0A
a281fd3c49b86012fd370ae82df19525af89ff1c
Disable swift test
parsl/tests/test_swift.py
parsl/tests/test_swift.py
import pytest import parsl from parsl import * parsl.set_stream_logger() from parsl.executors.swift_t import * def foo(x, y): return x * y def slow_foo(x, y): import time time.sleep(x) return x * y def bad_foo(x, y): time.sleep(x) return x * y @pytest.mark.local def test_simple(): print("Start") tex = TurbineExecutor() x = tex.submit(foo, 5, 10) print("Got: ", x) print("X result: ", x.result()) assert x.result() == 50, "X != 50" print("done") @pytest.mark.local def test_slow(): futs = {} tex = TurbineExecutor() for i in range(0, 3): futs[i] = tex.submit(slow_foo, 1, 2) total = sum([futs[i].result(timeout=10) for i in futs]) assert total == 6, "expected 6, got {}".format(total) @pytest.mark.local def test_except(): with pytest.raises(NameError): tex = TurbineExecutor() x = tex.submit(bad_foo, 5, 10) x.result() if __name__ == "__main__": # test_simple() # test_slow() test_except() print("Done")
Python
0.000004
@@ -272,16 +272,58 @@ x * y%0A%0A%0A +@pytest.mark.skip('fails intermittently')%0A @pytest. @@ -325,32 +325,32 @@ test.mark.local%0A - def test_simple( @@ -558,32 +558,74 @@ test.mark.local%0A +@pytest.mark.skip('fails intermittently')%0A def test_slow(): @@ -855,24 +855,24 @@ at(total)%0A%0A%0A - @pytest.mark @@ -870,32 +870,74 @@ test.mark.local%0A +@pytest.mark.skip('fails intermittently')%0A def test_except(
e9980d7498c0889ecd795a4d2977c1893e0ad7e3
comment on md5 usage
app/util.py
app/util.py
import bcrypt import md5 def hash_pwd(password): return bcrypt.hashpw(password, bcrypt.gensalt()) def check_pwd(password, hashed): return bcrypt.hashpw(password, hashed) == hashed def validate_time(time): return True def gravatar_html(email): h = md5.md5(email.lower()).hexdigest() html = '<img src="http://www.gravatar.com/avatar/%s.jpg?s=15" />' % h return html
Python
0
@@ -228,16 +228,57 @@ True %0A%0A +# XXX md5 module deprecated, use hashlib%0A def grav
1d443973e8db6265268dd2afe6b6ad7748526335
Add _read_test_file() function.
ipymd/utils.py
ipymd/utils.py
# -*- coding: utf-8 -*- """Utils""" #------------------------------------------------------------------------------ # Imports #------------------------------------------------------------------------------ import os import os.path as op import difflib from .six import exec_ #------------------------------------------------------------------------------ # Utils #------------------------------------------------------------------------------ def _script_dir(): return op.dirname(op.realpath(__file__)) def _test_file_path(filename): """Return the full path to an example filename in the 'examples' directory.""" return op.realpath(op.join(_script_dir(), '../examples', filename)) def _exec_test_file(filename): """Return the 'output' object defined in a Python file.""" path = _test_file_path(filename) with open(path, 'r') as f: contents = f.read() ns = {} exec_(contents, ns) return ns.get('output', None) def _diff_removed_lines(diff): return ''.join(x[2:] for x in diff if x.startswith('- ')) def _diff(text_0, text_1): """Return a diff between two strings.""" diff = difflib.ndiff(text_0.splitlines(), text_1.splitlines()) return _diff_removed_lines(diff)
Python
0.000001
@@ -959,24 +959,177 @@ t', None)%0A%0A%0A +def _read_test_file(filename):%0A %22%22%22Read a test file.%22%22%22%0A path = _test_file_path(filename)%0A with open(path, 'r') as f:%0A return f.read()%0A%0A%0A def _diff_re
95ad2c65fb1b4aacea668c8d9474183b4f107d56
Test with multi args
paver/tests/test_shell.py
paver/tests/test_shell.py
import sys from paver.deps.six import b from mock import patch, Mock from paver import easy from subprocess import PIPE, STDOUT @patch('subprocess.Popen') def test_sh_raises_BuildFailure(popen): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] try: easy.sh('foo') except easy.BuildFailure: e = sys.exc_info()[1] args = e.args assert args == ('Subprocess return code: 1', ) else: assert False, 'Failed to raise BuildFailure' assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert 'stdout' not in popen.call_args[1] @patch('paver.easy.error') @patch('subprocess.Popen') def test_sh_with_capture_raises_BuildFailure(popen, error): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] try: easy.sh('foo', capture=True) except easy.BuildFailure: e = sys.exc_info()[1] args = e.args assert args == ('Subprocess return code: 1', ) else: assert False, 'Failed to raise BuildFailure' assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert popen.call_args[1]['stdout'] == PIPE assert popen.call_args[1]['stderr'] == STDOUT assert error.called assert error.call_args == (('some stderr', ), {}) @patch('subprocess.Popen') def test_sh_ignores_error(popen): popen.return_value.communicate.return_value = [b('some stderr')] popen.return_value.returncode = 1 easy.sh('foo', ignore_error=True) assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert 'stdout' not in popen.call_args[1] @patch('subprocess.Popen') def test_sh_ignores_error_with_capture(popen): popen.return_value.returncode = 1 popen.return_value.communicate.return_value = [b('some stderr')] easy.sh('foo', capture=True, ignore_error=True) assert popen.called assert popen.call_args[0][0] == 'foo' assert popen.call_args[1]['shell'] == True assert popen.call_args[1]['stdout'] == PIPE assert popen.call_args[1]['stderr'] == STDOUT
Python
0
@@ -704,20 +704,21 @@ ('paver. -easy +shell .error') @@ -2242,28 +2242,302 @@ args%5B1%5D%5B'stderr'%5D == STDOUT%0A +%0A@patch('subprocess.Popen')%0Adef test_sh_with_multi_command(popen):%0A popen.return_value.returncode = 0%0A%0A easy.sh(%5B'foo', ' bar', 'fi%22zz'%5D)%0A%0A assert popen.called%0A assert popen.call_args%5B0%5D%5B0%5D == %22foo ' bar' 'fi%5C%22zz'%22%0A assert popen.call_args%5B1%5D%5B'shell'%5D == True%0A
d329787dc6f862e749ca6f490a155186b48553a7
Fix one more bug; interpreter still broken
bfinterp.py
bfinterp.py
import sys import collections import getch from parser import parse, optimize from parser import OUTPUT, INPUT, LOOPSTART, LOOPEND, MOVE from parser import ADD, SET, MULCOPY, SCAN BUFSIZE = 8192 def interp(code): tokens = parse(code) tokens = optimize(tokens) i = 0 loops = [] mem = bytearray(BUFSIZE) cur = int(BUFSIZE/2) skiploop = False while i < len(tokens)-1: #print("%d:%s cur:%d mem[cur]:%d" % (i, code[i], cur, mem[cur])) #print(loops) token, value = tokens[i] if skiploop: if token == LOOPEND: skiploop = False continue if token == OUTPUT: print(chr(mem[cur]), end='') elif token == INPUT: mem[cur] == ord(getch.getch()) elif token == MOVE: cur += value elif token == ADD: offset, add = value newval = mem[cur+offset] + add newval %= 256 mem[cur+offset] = newval elif token == SET: offset, val = value mem[cur+offset] = val elif token == MULCOPY: src, dest, mul = value newval = mem[cur+dest] + mem[cur+src] * mul newval %= 256 mem[cur+dest] = newval elif token == SCAN: while mem[cur] != 0: cur += value elif token == LOOPSTART: if mem[cur]: loops.append(i) else: skiploop = True elif token == LOOPEND: if mem[cur] == 0: loops.pop() else: i = loops[-1] else: raise ValueError('Token not handled') i += 1 if __name__ == '__main__': with open(sys.argv[1]) as bffile: interp(bffile.read())
Python
0
@@ -606,24 +606,43 @@ oop = False%0A + i += 1%0A
47cf1fd8990d6b791d0a6847500abcf14ab46064
Fix on_new_event
mods/email.py
mods/email.py
from django.conf.urls import url from django.http import HttpResponse, Http404 from django.core.urlresolvers import reverse from django.core.exceptions import PermissionDenied from django.template import Template, Context from mod import PatchewModule import smtplib import email import uuid import traceback from api.models import Message from event import register_handler, get_events_info from schema import * _default_config = """ [smtp] server = smtp.example.com ssl = True port = 465 username = youruser password = yourpassword from = your@email.com """ class EmailModule(PatchewModule): """ Documentation ------------- Email information is configured in "INI" style: """ + _default_config name = "email" # The notify method name default_config = _default_config email_schema = \ ArraySchema("email_notification", "Email Notification", desc="Email notification", members=[ BooleanSchema("enabled", "Enabled", desc="Whether this event is enabled", default=True), BooleanSchema("reply_to_all", "Reply to all", desc='Whether to "reply to all" if the event has an associated email message', default=False), StringSchema("to", "To", desc="Send email to"), EnumSchema("event", "Event", enums=lambda: get_events_info(), required=True, desc="Which event to trigger the email notification"), StringSchema("subject_template", "Subject template", desc="""The django template for subject""", required=True), StringSchema("body_template", "Body template", desc="""The django template for email body. If rendered to empty, the email will not be sent""", multiline=True, required=True), ]) project_property_schema = \ ArraySchema("email", desc="Configuration for email module", members=[ MapSchema("notifications", "Email notifications", desc="Email notifications", item=email_schema), ]) def __init__(self): register_handler("NewEvent", self.on_new_event) def on_new_event(self, name, params): if name == "NewEvent": return register_handler(name, self.on_event) def _get_smtp(self): server = self.get_config("smtp", "server") port = self.get_config("smtp", "port") username = self.get_config("smtp", "username") password = self.get_config("smtp", "password") ssl = self.get_config("smtp", "ssl", "getboolean") if ssl: smtp = smtplib.SMTP_SSL(server, port) else: smtp = smtplib.SMTP(server, port) smtp.login(username, password) return smtp def _send_series_recurse(self, sendmethod, s): sendmethod(s) for i in s.get_replies(): self._send_series_recurse(sendmethod, i) def _smtp_send(self, to, cc, message): from_addr = self.get_config("smtp", "from") message["From"] = from_addr if cc: message["Cc"] = cc else: message.__delitem__("Cc") smtp = self._get_smtp() smtp.sendmail(from_addr, to, message.as_string()) def www_view_email_bounce(self, request, message_id): if not request.user.is_authenticated(): raise PermissionDenied() m = Message.objects.find_series(message_id) if not m: raise Http404("Series not found: " + message_id) def send_one(m): msg = m.get_mbox() message = email.message_from_string(msg) self._smtp_send(request.user.email, None, message) self._send_series_recurse(send_one, m) return HttpResponse("email bounced") def www_url_hook(self, urlpatterns): urlpatterns.append(url(r"^email-bounce/(?P<message_id>.*)/", self.www_view_email_bounce, name="email-bounce")) def prepare_message_hook(self, request, message): if message.is_series_head and request.user.is_authenticated(): message.extra_ops.append({"url": reverse("email-bounce", kwargs={"message_id": message.message_id}), "title": "Bounce to me"}) def _sections_by_event(self, event): conf = self.get_config_obj() for sec in conf.sections(): if sec.startswith("mail ") and conf.get(sec, "event") == event: yield sec def _send_email(self, to, cc, headers, body): message = email.message.Message() for k, v in headers.iteritems(): message[k] = v message.set_payload(body) self._smtp_send(to, cc, message) def gen_message_id(self): return "<%s@patchew.org>" % uuid.uuid1() def get_notifications(self, project): ret = {} for k, v in project.get_properties().iteritems(): if not k.startswith("email.notifictions."): continue tn = k[len("email.notifications."):] if "." not in tn: continue an = tn[tn.find(".") + 1:] tn = tn[:tn.find(".")] ret.setdefault(tn, {}) ret[tn][an] = v ret[tn]["name"] = tn return ret def on_event(self, **params): obj = params.get("obj") headers = {} msg_to = [] msg_cc = [] if isinstance(obj, Project): po = obj elif isinstance(obj, Message) and obj.is_series_head: headers["In-Reply-To"] = "<%s>" % obj.message_id po = obj.project msg_to = obj.get_sender() msg_cc = obj.get_receivers() else: return for nt in self.get_notifications(po): if not nt.enabled: continue ctx = Context(params) subject = Template(nt.subject_template, ctx) body = Template(nt.body_template, ctx) to = nt.to.split(",") + msg_to cc = msg_cc if not (subject or body or to): continue headers["Subject"] = subject print to, cc, headers, body #self._send_email(to, cc, headers, body) def prepare_project_hook(self, request, project): if not project.maintained_by(request.user): return project.extra_info.append({"title": "Email notifications", "class": "info", "content": self.build_config_html(request, project)})
Python
0
@@ -2741,16 +2741,23 @@ nt(self, + event, name, p
fe785a6c7f9ea5232cfda964fa155c1ccdee3af0
remove unused import
samples/python/uff_ssd/utils/model.py
samples/python/uff_ssd/utils/model.py
# # SPDX-FileCopyrightText: Copyright (c) 1993-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Model extraction and UFF convertion utils import os import sys import tarfile import requests import tensorflow as tf import tensorrt as trt import graphsurgeon as gs import uff import time import math from utils.paths import PATHS from utils.modeldata import ModelData # UFF conversion functionality def ssd_unsupported_nodes_to_plugin_nodes(ssd_graph): """Makes ssd_graph TensorRT comparible using graphsurgeon. This function takes ssd_graph, which contains graphsurgeon DynamicGraph data structure. This structure describes frozen Tensorflow graph, that can be modified using graphsurgeon (by deleting, adding, replacing certain nodes). The graph is modified by removing Tensorflow operations that are not supported by TensorRT's UffParser and replacing them with custom layer plugin nodes. Note: This specific implementation works only for ssd_inception_v2_coco_2017_11_17 network. Args: ssd_graph (gs.DynamicGraph): graph to convert Returns: gs.DynamicGraph: UffParser compatible SSD graph """ # Create TRT plugin nodes to replace unsupported ops in Tensorflow graph channels = ModelData.get_input_channels() height = ModelData.get_input_height() width = ModelData.get_input_width() Input = gs.create_plugin_node(name="Input", op="Placeholder", dtype=tf.float32, shape=[1, channels, height, width]) PriorBox = gs.create_plugin_node( name="GridAnchor", op="GridAnchor_TRT", minSize=0.2, maxSize=0.95, aspectRatios=[1.0, 2.0, 0.5, 3.0, 0.33], variance=[0.1, 0.1, 0.2, 0.2], featureMapShapes=[19, 10, 5, 3, 2, 1], numLayers=6, ) NMS = gs.create_plugin_node( name="NMS", op="NMS_TRT", shareLocation=1, varianceEncodedInTarget=0, backgroundLabelId=0, confidenceThreshold=1e-8, nmsThreshold=0.6, topK=100, keepTopK=100, numClasses=91, inputOrder=[0, 2, 1], confSigmoid=1, isNormalized=1, ) concat_priorbox = gs.create_node("concat_priorbox", op="ConcatV2", dtype=tf.float32, axis=2) concat_box_loc = gs.create_plugin_node( "concat_box_loc", op="FlattenConcat_TRT", dtype=tf.float32, axis=1, ignoreBatch=0 ) concat_box_conf = gs.create_plugin_node( "concat_box_conf", op="FlattenConcat_TRT", dtype=tf.float32, axis=1, ignoreBatch=0 ) # Create a mapping of namespace names -> plugin nodes. namespace_plugin_map = { "MultipleGridAnchorGenerator": PriorBox, "Postprocessor": NMS, "Preprocessor": Input, "ToFloat": Input, "image_tensor": Input, "MultipleGridAnchorGenerator/Concatenate": concat_priorbox, "MultipleGridAnchorGenerator/Identity": concat_priorbox, "concat": concat_box_loc, "concat_1": concat_box_conf, } # Create a new graph by collapsing namespaces ssd_graph.collapse_namespaces(namespace_plugin_map) # Remove the outputs, so we just have a single output node (NMS). # If remove_exclusive_dependencies is True, the whole graph will be removed! ssd_graph.remove(ssd_graph.graph_outputs, remove_exclusive_dependencies=False) return ssd_graph def model_to_uff(model_path, output_uff_path, silent=False): """Takes frozen .pb graph, converts it to .uff and saves it to file. Args: model_path (str): .pb model path output_uff_path (str): .uff path where the UFF file will be saved silent (bool): if False, writes progress messages to stdout """ dynamic_graph = gs.DynamicGraph(model_path) dynamic_graph = ssd_unsupported_nodes_to_plugin_nodes(dynamic_graph) uff.from_tensorflow( dynamic_graph.as_graph_def(), [ModelData.OUTPUT_NAME], output_filename=output_uff_path, text=True ) # Model extraction functionality def maybe_print(should_print, print_arg): """Prints message if supplied boolean flag is true. Args: should_print (bool): if True, will print print_arg to stdout print_arg (str): message to print to stdout """ if should_print: print(print_arg) def maybe_mkdir(dir_path): """Makes directory if it doesn't exist. Args: dir_path (str): directory path """ if not os.path.exists(dir_path): os.makedirs(dir_path) def _extract_model(silent=False): """Extract model from Tensorflow model zoo. Args: silent (bool): if False, writes progress messages to stdout """ maybe_print(not silent, "Preparing pretrained model") model_dir = PATHS.get_models_dir_path() maybe_mkdir(model_dir) model_archive_path = PATHS.get_data_file_path("samples/python/uff_ssd/ssd_inception_v2_coco_2017_11_17.tar.gz") maybe_print(not silent, "Unpacking {}".format(model_archive_path)) with tarfile.open(model_archive_path, "r:gz") as tar: tar.extractall(path=model_dir) maybe_print(not silent, "Model ready") def prepare_ssd_model(model_name="ssd_inception_v2_coco_2017_11_17", silent=False): """Extract pretrained object detection model and converts it to UFF. The model is downloaded from Tensorflow object detection model zoo. Currently only ssd_inception_v2_coco_2017_11_17 model is supported due to model_to_uff() using logic specific to that network when converting. Args: model_name (str): chosen object detection model silent (bool): if False, writes progress messages to stdout """ if model_name != "ssd_inception_v2_coco_2017_11_17": raise NotImplementedError("Model {} is not supported yet".format(model_name)) _extract_model(silent) ssd_pb_path = PATHS.get_model_pb_path(model_name) ssd_uff_path = PATHS.get_model_uff_path(model_name) model_to_uff(ssd_pb_path, ssd_uff_path, silent)
Python
0
@@ -767,24 +767,8 @@ le%0A%0A -import requests%0A impo
7048bb1e378df0458b0a721ed056428d8688f067
add requirement and decoding
molprobity.py
molprobity.py
#!/usr/bin/env python # Copyright 2013, Joao Rodrigues """ Python script to assign histidine protonation states using Molprobity / Reduce. Joao Rodrigues @ 2013 Adapted from Sjoerd's WHATIF code syntax: molprobity.py <PDB-file> """ import io import os import sys import subprocess import tempfile def _check_molprobity_path(custom_path=None): """ Tries to find 'reduce' executable in the system path. """ if custom_path: if os.path.isfile(custom_path) and os.access(custom_path, os.X_OK): return custom_path else: raise Exception("Could not find path to 'reduce' executable: {0} does not exist or is not executable\n". format(custom_path)) else: try: path = os.getenv('PATH').split(os.pathsep) except KeyError: raise Exception("Could not find path to 'reduce' executable: environment variable PATH not defined\n") for directory in path: if not os.path.exists(directory): continue if 'reduce' in os.listdir(directory): reduce_path = os.path.join(directory, 'reduce') if os.path.isfile(reduce_path) and os.access(reduce_path, os.X_OK): return reduce_path else: raise Exception("Found 'reduce' but it is either not executable or a directory.. ({0})\n". format(reduce_path)) raise Exception("Could not find path to 'reduce' executable: Are you sure it is installed?\n") def run_molprobity(pdbdata, molprobity_executable=None): """ Reads a PDB file and outputs the corrected structure and a dictionary with protonation states. Expects either an open file handle or a string with a PDB formatted structure. Option strip_header removes all lines not starting with ATOM, TER, END, etc.. (check PDB format) """ reduce_exec = _check_molprobity_path(molprobity_executable) cmd_string = [reduce_exec, '-build', '-Xplor', '-quiet'] # File Handle vs Data String if isinstance(pdbdata, io.TextIOBase): cmd_stdin = pdbdata.read() else: cmd_stdin = pdbdata # Temporary File for Reduce tmp_file = tempfile.NamedTemporaryFile(mode="w+t") tmp_file.write(cmd_stdin) tmp_file.flush() # Force write to file otherwise might be incomplete cmd_string.append(tmp_file.name) try: process_handle = subprocess.Popen(cmd_string, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=False) except Exception as e: raise Exception("There was an error running the 'reduce': {0}\n({1})\n".format(' '.join(cmd_string), e)) p_stdout, p_stderr = process_handle.communicate() return p_stdout, p_stderr def analyze_protonation_state(pdbdata, pdbname="the PDB file"): his_db = {} hisprotonatoms = {" HD1": 'd1', " HD2": 'd2', " HE1": 'e1', " HE2": 'e2'} # Build Histidine 'Database' # Assign protonation states based on presence of atoms in the PDB file for line in pdbdata.splitlines(): if not line.startswith('ATOM'): continue aname = line[12:16] resn = line[17:20] resi = int(line[22:26]) if resn != "HIS": continue elif resn == "HIS" and aname in hisprotonatoms: if resi not in his_db: his_db[resi] = {} currhis = his_db[resi] histidine_state = hisprotonatoms.get(aname) currhis[histidine_state] = True # Decide on Protonation State for CNS/HADDOCK ret = [] for resi in his_db: his = his_db[resi] dcount = his.get('d1', 0) + his.get('d2', 0) ecount = his.get('e1', 0) + his.get('e2', 0) total_count = dcount + ecount if total_count == 4: ret.append(dict(resid=resi, state="HIS+")) elif total_count == 3: if dcount == 2: ret.append(dict(resid=resi, state="HISD")) else: ret.append(dict(resid=resi, state="HISE")) else: raise Exception("Molprobity could not guess the protonation state of histidine {0:n} in {1}: dcount={2}, " "ecount={3}".format(resi, pdbname, dcount, ecount)) return ret if __name__ == "__main__": # Quick and dirty usage example if not sys.argv[1:]: print("usage: {0} <pdb file> <pdb file 2> ...".format(sys.argv[0])) sys.exit(1) for ppath in sys.argv[1:]: print("## Executing Reduce to assign histidine protonation states") print("## Input PDB: {0} ".format(ppath)) open_fhandle = open(ppath) hadded, process_error = run_molprobity(open_fhandle) hadded = hadded.decode() # Write Optimized PDB, without USER nor Hs pname = os.path.basename(ppath)[:-4] fout = open("{0}_optimized.pdb".format(pname), 'w') for line in hadded.splitlines(): fout.write(line+'\n') fout.close() ret = analyze_protonation_state(hadded) open_fhandle.close() sorted_states = sorted(ret, key=lambda x: (x['state'], x['resid'])) for his in sorted_states: print("HIS ( {0:n} )\t-->\t{1}".format(his['resid'], his['state']))
Python
0
@@ -303,16 +303,90 @@ mpfile%0A%0A +if sys.version_info%5B0%5D %3C 3:%0A print(%22Python 3+ needed%22)%0A sys.exit()%0A%0A %0Adef _ch @@ -4961,16 +4961,23 @@ .decode( +'utf-8' )%0A
2d3c0f8072a90b9531d581ddf1b8c980310dd08c
Connect to s3 on every request.
moneypenny.py
moneypenny.py
#!/usr/bin/python """ A Flask app to post visitor information to reddit. Requirements: * Envoy account (https://signwithenvoy.com/) * reddit account (http://www.reddit.com/) * PRAW library (https://github.com/praw-dev/praw/) """ from babel.dates import format_datetime from ConfigParser import RawConfigParser from datetime import datetime from string import Template import hashlib import hmac import json from flask import Flask, abort, request import boto import collections import praw import pytz import requests import string app = Flask(__name__) app.debug = True config = RawConfigParser() config.read("moneypenny.ini") # Envoy api_key = config.get("envoy", "api_key") # AWS s3_key_id = None s3_secret_key = None if config.has_option("aws", "s3_key_id"): s3_key_id = config.get("aws", "s3_key_id") s3_secret_key = config.get("aws", "s3_secret_key") s3_bucket = config.get("aws", "s3_bucket") s3 = boto.connect_s3(s3_key_id, s3_secret_key) # reddit username = config.get("reddit", "username") password = config.get("reddit", "password") subreddit = config.get("reddit", "subreddit") link_format = Template(config.get("reddit", "link_format")) r = praw.Reddit(user_agent="Mrs. Moneypenny by /u/rram") r.login(username, password) sr = r.get_subreddit(subreddit) # locations location_db = {} for short, info in config.items("locations"): location_db[short] = map(string.strip, info.split(",")) # Moneypenny default_img_url = config.get("DEFAULT", "default_image_url") # Taken from http://documentation.mailgun.com/user_manual.html#webhooks def verify_message(token, timestamp, signature): return signature == hmac.new( key=api_key, msg="{}{}".format(timestamp, token), digestmod=hashlib.sha256).hexdigest() @app.route("/") def health(): return "Hello, James." @app.route("/visitor/<location>", methods=["POST"]) def visitor(location): loc_info = location_db[location] entry = request.form["entry"] status = request.form["status"] timestamp = request.form["timestamp"] token = request.form["token"] signature = request.form["signature"] if not verify_message(token, timestamp, signature): app.logger.warning("Message failed to verify, aborting!") abort(400) if status != "sign_in": app.logger.debug("Ignorning non-sign in: %s", status) return "" entry = json.loads(entry) date = entry.get("signed_in_time_utc") date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S") date = date.replace(tzinfo=pytz.UTC) date = date.astimezone(pytz.timezone(loc_info[1])) visitor_name = entry.get("your_full_name") # Copy the image from Envoy to our own S3 bucket r = requests.get(entry['photo_url'], stream=True) if r.status_code == 200: keyname = "{}/{}.jpg".format(location, entry["id"]) bucket = s3.get_bucket(s3_bucket) key = boto.s3.key.Key(bucket) key.key = "{}.jpg".format(entry["id"]) key.set_metadata("Content-Type", "image/jpeg") key.set_contents_from_file(r.raw) key.set_acl("public-read") img_url = "http://s3.amazonaws.com/{}".format(keyname) else: app.logger.debug("Got status code of %i, using default image", r.status_code) img_url = default_img_url title = link_format.substitute( date=format_datetime(date, locale='en_US'), location=loc_info[0], visitor_name=visitor_name ) # Note: I've patched PRAW to pass along the resubmit parameter in order to # prevent it from raising an AlreadySubmitted exception. s = sr.submit(title, url=img_url, raise_captcha_exception=True, resubmit=True) if isinstance(s, basestring): return s else: return s.short_link if __name__ == "__main__": app.run()
Python
0
@@ -922,55 +922,8 @@ et%22) -%0As3 = boto.connect_s3(s3_key_id, s3_secret_key) %0A%0A# @@ -2752,16 +2752,71 @@ == 200:%0A + s3 = boto.connect_s3(s3_key_id, s3_secret_key)%0A
4cd44a177147569767a8f53aed67cbee0f759667
bump verion to 3.0.0-alpha
pyani/__init__.py
pyani/__init__.py
# python package version # should match r"^__version__ = '(?P<version>[^']+)'$" for setup.py """Module with main code for pyani application/package.""" __version__ = '0.3.0.dev'
Python
0.000001
@@ -169,10 +169,12 @@ .3.0 -.dev +-alpha '%0A
a176d3609e0b7f0523320322ce364544b64f8721
Update contribution team (#13472)
Utils/github_workflow_scripts/handle_external_pr.py
Utils/github_workflow_scripts/handle_external_pr.py
#!/usr/bin/env python3 import json from typing import List import urllib3 from blessings import Terminal from github import Github from github.Repository import Repository from utils import get_env_var, timestamped_print urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print = timestamped_print REVIEWERS = ['adi88d', 'Noy-Maimon', 'Bargenish', 'yaakovi'] WELCOME_MSG = 'Thank you for your contribution. Your generosity and caring are unrivaled! Make sure to register your ' \ 'contribution by filling the [Contribution Registration](https://forms.gle/XDfxU4E61ZwEESSMA) form, ' \ 'so our content wizard @{selected_reviewer} will know he can start review the proposed changes. ' def determine_reviewer(potential_reviewers: List[str], repo: Repository) -> str: """Checks the number of open 'Contribution' PRs that have either been assigned to a user or a review was requested from the user for each potential reviewer and returns the user with the smallest amount Args: potential_reviewers (List): The github usernames from which a reviewer will be selected repo (Repository): The relevant repo Returns: str: The github username to assign to a PR """ label_to_consider = 'contribution' pulls = repo.get_pulls(state='OPEN') assigned_prs_per_potential_reviewer = {reviewer: 0 for reviewer in potential_reviewers} for pull in pulls: # we only consider 'Contribution' prs when computing who to assign pr_labels = [label.name.casefold() for label in pull.labels] if label_to_consider not in pr_labels: continue assignees = set([assignee.login for assignee in pull.assignees]) requested_reviewers, _ = pull.get_review_requests() requested_reviewers = set([requested_reviewer.login for requested_reviewer in requested_reviewers]) combined_list = assignees.union(requested_reviewers) for reviewer in potential_reviewers: if reviewer in combined_list: assigned_prs_per_potential_reviewer[reviewer] = assigned_prs_per_potential_reviewer.get(reviewer) + 1 selected_reviewer = sorted(assigned_prs_per_potential_reviewer, key=assigned_prs_per_potential_reviewer.get)[0] return selected_reviewer def main(): """Handles External PRs (PRs from forks) Performs the following operations: 1. If the external PR's base branch is master we create a new branch and set it as the base branch of the PR. 2. Labels the PR with the "Contribution" label. (Adds the "Hackathon" label where applicable.) 3. Assigns a Reviewer. 4. Creates a welcome comment Will use the following env vars: - CONTENTBOT_GH_ADMIN_TOKEN: token to use to update the PR - EVENT_PAYLOAD: json data from the pull_request event """ t = Terminal() payload_str = get_env_var('EVENT_PAYLOAD') if not payload_str: raise ValueError('EVENT_PAYLOAD env variable not set or empty') payload = json.loads(payload_str) print(f'{t.cyan}Processing PR started{t.normal}') org_name = 'demisto' repo_name = 'content' gh = Github(get_env_var('CONTENTBOT_GH_ADMIN_TOKEN'), verify=False) content_repo = gh.get_repo(f'{org_name}/{repo_name}') pr_number = payload.get('pull_request', {}).get('number') pr = content_repo.get_pull(pr_number) # Add 'Contribution' Label to PR contribution_label = 'Contribution' pr.add_to_labels(contribution_label) print(f'{t.cyan}Added "Contribution" label to the PR{t.normal}') # check base branch is master if pr.base.ref == 'master': print(f'{t.cyan}Determining name for new base branch{t.normal}') branch_prefix = 'contrib/' new_branch_name = f'{branch_prefix}{pr.head.label.replace(":", "_")}' existant_branches = content_repo.get_git_matching_refs(f'heads/{branch_prefix}') potential_conflicting_branch_names = [branch.ref.lstrip('refs/heads/') for branch in existant_branches] # make sure new branch name does not conflict with existing branch name while new_branch_name in potential_conflicting_branch_names: # append or increment digit if not new_branch_name[-1].isdigit(): new_branch_name += '-1' else: digit = str(int(new_branch_name[-1]) + 1) new_branch_name = f'{new_branch_name[:-1]}{digit}' master_branch_commit_sha = content_repo.get_branch('master').commit.sha # create new branch print(f'{t.cyan}Creating new branch "{new_branch_name}"{t.normal}') content_repo.create_git_ref(f'refs/heads/{new_branch_name}', master_branch_commit_sha) # update base branch of the PR pr.edit(base=new_branch_name) print(f'{t.cyan}Updated base branch of PR "{pr_number}" to "{new_branch_name}"{t.normal}') # assign reviewers / request review from reviewer_to_assign = determine_reviewer(REVIEWERS, content_repo) pr.add_to_assignees(reviewer_to_assign) pr.create_review_request(reviewers=[reviewer_to_assign]) print(f'{t.cyan}Assigned user "{reviewer_to_assign}" to the PR{t.normal}') print(f'{t.cyan}Requested review from user "{reviewer_to_assign}"{t.normal}') # create welcome comment body = WELCOME_MSG.format(selected_reviewer=reviewer_to_assign) pr.create_issue_comment(body) print(f'{t.cyan}Created welcome comment{t.normal}') if __name__ == "__main__": main()
Python
0
@@ -331,52 +331,63 @@ = %5B' -adi88d', 'Noy-Maimon', 'Bargenish', 'yaakovi +Itay4', 'yaakovpraisler', 'Shellyber', 'tomneeman151293 '%5D%0AW
35ab3d0e5a4a608339cd908ee4db1c9aefe3c5f4
Add find me service
pybotvac/robot.py
pybotvac/robot.py
import requests import hashlib import hmac import time import os.path # Disable warning due to SubjectAltNameWarning in certificate requests.packages.urllib3.disable_warnings() SUPPORTED_SERVICES = ['basic-1', 'minimal-2', 'basic-2', 'basic-3'] class UnsupportedDevice(Exception): pass class Robot: """Data and methods for interacting with a Neato Botvac Connected vacuum robot""" def __init__(self, serial, secret, traits, name=''): """ Initialize robot :param serial: Robot serial :param secret: Robot secret :param name: Name of robot (optional) :param traits: Extras the robot supports """ self.name = name self.serial = serial self.secret = secret self.traits = traits self._url = 'https://nucleo.neatocloud.com/vendors/neato/robots/{0}/messages'.format(self.serial) self._headers = {'Accept': 'application/vnd.neato.nucleo.v1'} if self.service_version not in SUPPORTED_SERVICES: raise UnsupportedDevice("Version " + self.service_version + " of service houseCleaning is not known") def __str__(self): return "Name: %s, Serial: %s, Secret: %s Traits: %s" % (self.name, self.serial, self.secret, self.traits) def _message(self, json): """ Sends message to robot with data from parameter 'json' :param json: dict containing data to send :return: server response """ cert_path = os.path.join(os.path.dirname(__file__), 'cert', 'neatocloud.com.crt') response = requests.post(self._url, json=json, verify=cert_path, auth=Auth(self.serial, self.secret), headers=self._headers) response.raise_for_status() return response def start_cleaning(self, mode=2, navigation_mode=1, category=None): # mode & naivigation_mode used if applicable to service version # mode: 1 eco, 2 turbo # navigation_mode: 1 normal, 2 extra care, 3 deep # category: 2 non-persistent map, 4 persistent map #Default to using the persistent map if we support basic-3. if category is None: category = 4 if self.service_version == 'basic-3' else 2 if self.service_version == 'basic-1': json = {'reqId': "1", 'cmd': "startCleaning", 'params': { 'category': 2, 'mode': mode, 'modifier': 1} } elif self.service_version == 'basic-3': json = {'reqId': "1", 'cmd': "startCleaning", 'params': { 'category': 2, 'mode': mode, 'modifier': 1, "navigationMode": navigation_mode} } elif self.service_version == 'minimal-2': json = {'reqId': "1", 'cmd': "startCleaning", 'params': { 'category': 2, "navigationMode": navigation_mode} } else: # self.service_version == 'basic-2' json = {'reqId': "1", 'cmd': "startCleaning", 'params': { 'category': 2, 'mode': mode, 'modifier': 1, "navigationMode": navigation_mode} } return self._message(json) def pause_cleaning(self): return self._message({'reqId': "1", 'cmd': "pauseCleaning"}) def resume_cleaning(self): return self._message({'reqId': "1", 'cmd': "resumeCleaning"}) def stop_cleaning(self): return self._message({'reqId': "1", 'cmd': "stopCleaning"}) def send_to_base(self): return self._message({'reqId': "1", 'cmd': "sendToBase"}) def get_robot_state(self): return self._message({'reqId': "1", 'cmd': "getRobotState"}) def enable_schedule(self): return self._message({'reqId': "1", 'cmd': "enableSchedule"}) def disable_schedule(self): return self._message({'reqId': "1", 'cmd': "disableSchedule"}) def get_schedule(self): return self._message({'reqId': "1", 'cmd': "getSchedule"}) @property def schedule_enabled(self): return self.get_robot_state().json()['details']['isScheduleEnabled'] @schedule_enabled.setter def schedule_enabled(self, enable): if enable: self.enable_schedule() else: self.disable_schedule() @property def state(self): return self.get_robot_state().json() @property def available_services(self): return self.state['availableServices'] @property def service_version(self): return self.available_services['houseCleaning'] class Auth(requests.auth.AuthBase): """Create headers for request authentication""" def __init__(self, serial, secret): self.serial = serial self.secret = secret def __call__(self, request): date = time.strftime('%a, %d %b %Y %H:%M:%S', time.gmtime()) + ' GMT' try: # Attempt to decode request.body (assume bytes received) msg = '\n'.join([self.serial.lower(), date, request.body.decode('utf8')]) except AttributeError: # Decode failed, assume request.body is already type str msg = '\n'.join([self.serial.lower(), date, request.body]) signing = hmac.new(key=self.secret.encode('utf8'), msg=msg.encode('utf8'), digestmod=hashlib.sha256) request.headers['Date'] = date request.headers['Authorization'] = "NEATOAPP " + signing.hexdigest() return request
Python
0.000003
@@ -4475,24 +4475,113 @@ tSchedule%22%7D) +%0A %0A def locate(self):%0A return self._message(%7B'reqId': %221%22, 'cmd': %22findMe%22%7D) %0A%0A @prope
3fda8faef7dccaefc29bb9c4a84fce4819141118
update some comments and names for readability
src/watchdog/observers/inotify_buffer.py
src/watchdog/observers/inotify_buffer.py
# -*- coding: utf-8 -*- # # Copyright 2014 Thomas Amland <thomas.amland@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import logging import threading from collections import deque from watchdog.utils import DaemonThread from .inotify_c import Inotify STOP_EVENT = object() class _Worker(DaemonThread): """ Thread that reads events from `inotify` and writes to `queue`. """ def __init__(self, inotify, queue): DaemonThread.__init__(self) self._read_events = inotify.read_events self._queue = queue def run(self): while self.should_keep_running(): inotify_events = self._read_events() for inotify_event in inotify_events: logging.debug("worker: in event %s", inotify_event) if inotify_event.is_moved_to: from_event = self._queue._catch(inotify_event.cookie) if from_event: self._queue._put((from_event, inotify_event)) else: logging.debug("worker: could not find maching move_from event") self._queue._put(inotify_event) else: self._queue._put(inotify_event) class InotifyBuffer(object): """ A wrapper for `Inotify` that keeps events in memory for `delay` seconds. IN_MOVED_FROM and IN_MOVED_TO events are paired during this time. """ def __init__(self, path, recursive=False): self.delay = 0.5 self._lock = threading.Lock() self._not_empty = threading.Condition(self._lock) self._queue = deque() self._inotify = Inotify(path, recursive) self._worker = _Worker(self._inotify, self) self._worker.start() def read_event(self): """ Returns a single event or a tuple of from/to events in case of a paired move event. """ while True: # wait for queue self._not_empty.acquire() while len(self._queue) == 0: self._not_empty.wait() head, insert_time = self._queue[0] self._not_empty.release() # wait for delay time_left = insert_time + self.delay - time.time() while time_left > 0: time.sleep(time_left) time_left = insert_time + self.delay - time.time() # return if event is still here self._lock.acquire() try: if len(self._queue) > 0 and self._queue[0][0] is head: self._queue.popleft() return head finally: self._lock.release() def close(self): self._worker.stop() self._inotify.close() self._worker.join() # Add the stop event to unblock the read_event which waits for # events in the queue... even after inotify buffer is closed. self._put(STOP_EVENT) def _put(self, elem): self._lock.acquire() self._queue.append((elem, time.time())) self._not_empty.notify() self._lock.release() def _catch(self, cookie): self._lock.acquire() ret = None for i, elem in enumerate(self._queue): event, _ = elem try: if event.is_moved_from and event.cookie == cookie: ret = event del self._queue[i] break except AttributeError: pass self._lock.release() return ret
Python
0
@@ -769,16 +769,45 @@ notify%0A%0A +__all__ = %5B'InotifyBuffer'%5D%0A%0A STOP_EVE @@ -828,17 +828,16 @@ %0A%0Aclass -_ Worker(D @@ -920,15 +920,24 @@ to -%60queue%60 +an InotifyBuffer .%0A @@ -975,21 +975,22 @@ notify, -queue +buffer ):%0A @@ -1086,21 +1086,23 @@ lf._ -queue = queue +buffer = buffer %0A%0A @@ -1411,21 +1411,22 @@ = self._ -queue +buffer ._catch( @@ -1508,29 +1508,30 @@ self._ -queue +buffer ._put((from_ @@ -1689,37 +1689,38 @@ self._ -queue +buffer ._put(inotify_ev @@ -1772,21 +1772,22 @@ self._ -queue +buffer ._put(in @@ -2263,17 +2263,16 @@ orker = -_ Worker(s @@ -3374,138 +3374,50 @@ # -Add the stop event to unblock the read_event which waits for%0A # events in the queue... even after inotify buffer is closed. +Interrupt thread calling %60self.read_event%60 %0A @@ -3464,19 +3464,20 @@ (self, e -lem +vent ):%0A @@ -3529,19 +3529,20 @@ ppend((e -lem +vent , time.t @@ -3638,24 +3638,97 @@ f, cookie):%0A + %22%22%22 Remove and return the MOVE_FROM event matching %60cookie%60. %22%22%22%0A self
f257aa683e9d7a1d9881434c03c6571391ef34af
fix a bug where django versions prior to 1.4 couldn't show headers
pennyblack/models/mail.py
pennyblack/models/mail.py
import hashlib import random from rfc822 import dump_address_pair from django.contrib import admin from django.contrib.contenttypes import generic from django.core import mail from django.core.validators import email_re from django.db import models from django.http import HttpRequest from django.template.loader import render_to_string from django.template import RequestContext from pennyblack import settings try: from django.utils.timezone import now except ImportError: from datetime import datetime now = datetime.now() #----------------------------------------------------------------------------- # Mail #----------------------------------------------------------------------------- class Mail(models.Model): """ This is a single Mail, it's part of a Job """ viewed = models.DateTimeField(default=None, null=True) bounced = models.BooleanField(default=False) sent = models.BooleanField(default=False) content_type = models.ForeignKey('contenttypes.ContentType') object_id = models.PositiveIntegerField() person = generic.GenericForeignKey('content_type', 'object_id') job = models.ForeignKey('pennyblack.Job', related_name="mails") mail_hash = models.CharField(max_length=32, blank=True) email = models.EmailField() # the address is stored when the mail is sent class Meta: verbose_name = 'mail' verbose_name_plural = 'mails' app_label = 'pennyblack' def __unicode__(self): return u'%s to %s' % (self.job, self.person,) def save(self, **kwargs): if self.mail_hash == u'': self.mail_hash = hashlib.md5(str(self.id) + str(random.random())).hexdigest() super(Mail, self).save(**kwargs) def mark_sent(self): """ Marks the email as beeing sent. """ self.sent = True self.save() def mark_viewed(self): """ Marks the email as beeing viewed and if it's not already viewed it stores the view date. """ if not self.viewed: self.viewed = now() self.save() def on_landing(self, request): """ Is executed every time a user landed on the website after clicking on a link in this email. It tries to execute the on_landing method on the person object and on the group object. """ self.mark_viewed() if hasattr(self.person, 'on_landing') and hasattr(self.person.on_landing, '__call__'): self.person.on_landing(request) if self.job.content_type is not None and \ hasattr(self.job.group_object, 'on_landing') and \ hasattr(self.job.group_object.on_landing, '__call__'): self.group_object.on_landing(request) def bounce(self): """ Is executed if this email is bounced. """ self.bounced = True self.save() self.person.on_bounce(self) def unsubscribe(self): """ Is executed if the unsubscribe link is clicked. """ return self.person.unsubscribe() def is_valid(self): """ Checks if this Mail is valid by validating the email address. """ return email_re.match(self.person.get_email()) def get_email(self): """ Gets the email address. If it has no email address set, it tries to get it from the person object. """ if self.email != '': return self.email return self.person.get_email() get_email.short_description = "E-Mail" def get_message(self): """ Returns a email message object """ self.email = self.person.get_email() if self.job.newsletter.reply_email != '': headers = {'Reply-To': self.job.newsletter.reply_email} else: headers = {} message = mail.EmailMessage( self.job.newsletter.subject, self.get_content(), dump_address_pair((self.job.newsletter.sender.name, self.job.newsletter.sender.email)), [self.email], headers=headers, ) message.content_subtype = "html" return message def get_content(self, webview=False): """ Renders the email content. If webview is True it includes also a html header and doesn't display the webview link. """ newsletter = self.job.newsletter context = self.get_context() context['newsletter'] = newsletter context['webview'] = webview request = HttpRequest() request.content_context = context return render_to_string(newsletter.template.path, context, context_instance=RequestContext(request)) def get_context(self): """ Returns the context of this email as a dict. """ return { 'person': self.person, 'group_object': self.job.group_object, 'mail': self, 'base_url': self.job.newsletter.get_base_url() } def get_header_url(self): """ Gets the header url for this email. """ return self.job.newsletter.header_url_replaced.replace('{{mail.mail_hash}}', self.mail_hash).replace('{{base_url}}', self.job.newsletter.get_base_url()) class MailInline(admin.TabularInline): model = Mail max_num = 0 can_delete = False fields = ('get_email',) readonly_fields = ('get_email',) def queryset(self, request): """ Don't display Inlines if there are more than a certain amount """ if request._pennyblack_job_obj.mails.count() > settings.JOB_MAIL_INLINE_COUNT: return super(MailInline, self).queryset(request).filter(pk=0) return super(MailInline, self).queryset(request)
Python
0
@@ -531,18 +531,16 @@ time.now -() %0A%0A%0A#----
3066837091621720be0b0338d12ed66fd24a86b1
bump version
pyiso/__init__.py
pyiso/__init__.py
import imp import os.path __version__ = '0.2.6' BALANCING_AUTHORITIES = { 'BPA': {'module': 'bpa', 'class': 'BPAClient'}, 'CAISO': {'module': 'caiso', 'class': 'CAISOClient'}, 'ERCOT': {'module': 'ercot', 'class': 'ERCOTClient'}, 'ISONE': {'module': 'isone', 'class': 'ISONEClient'}, 'MISO': {'module': 'miso', 'class': 'MISOClient'}, 'NEVP': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'NYISO': {'module': 'nyiso', 'class': 'NYISOClient'}, 'PJM': {'module': 'pjm', 'class': 'PJMClient'}, 'SPPC': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'SPP': {'module': 'spp', 'class': 'SPPClient'}, } def client_factory(client_name, **kwargs): """Return a client for an external data set""" # set up dir_name = os.path.dirname(os.path.abspath(__file__)) error_msg = 'No client found for name %s' % client_name client_key = client_name.upper() # find client try: client_vals = BALANCING_AUTHORITIES[client_key] module_name = client_vals['module'] class_name = client_vals['class'] except KeyError: raise ValueError(error_msg) # find module try: fp, pathname, description = imp.find_module(module_name, [dir_name]) except ImportError: raise ValueError(error_msg) # load try: mod = imp.load_module(module_name, fp, pathname, description) finally: # Since we may exit via an exception, close fp explicitly. if fp: fp.close() # instantiate class try: client_inst = getattr(mod, class_name)(**kwargs) except AttributeError: raise ValueError(error_msg) # set name client_inst.NAME = client_name return client_inst
Python
0
@@ -44,9 +44,9 @@ 0.2. -6 +7 '%0A%0A%0A
7e41ab10d169b1a8e6533b3287d95d9b4c6c7203
Handle None in convert_tstamp()
djstripe/utils.py
djstripe/utils.py
# -*- coding: utf-8 -*- """ .. module:: djstripe.utils. :synopsis: dj-stripe - Utility functions related to the djstripe app. .. moduleauthor:: @kavdev, @pydanny, @wahuneke """ from __future__ import unicode_literals import datetime from django.conf import settings from django.contrib.auth import get_user_model from django.contrib.auth.models import AnonymousUser from django.core.exceptions import ImproperlyConfigured from django.utils import timezone ANONYMOUS_USER_ERROR_MSG = ( "dj-stripe's payment checking mechanisms require the user " "be authenticated before use. Please use django.contrib.auth's " "login_required decorator or a LoginRequiredMixin. " "Please read the warning at " "http://dj-stripe.readthedocs.org/en/latest/usage.html#ongoing-subscriptions." ) def subscriber_has_active_subscription(subscriber, plan=None): """ Helper function to check if a subscriber has an active subscription. Throws improperlyConfigured if the subscriber is an instance of AUTH_USER_MODEL and get_user_model().is_anonymous == True. Activate subscription rules (or): * customer has active subscription If the subscriber is an instance of AUTH_USER_MODEL, active subscription rules (or): * customer has active subscription * user.is_superuser * user.is_staff :param subscriber: The subscriber for which to check for an active subscription. :type subscriber: dj-stripe subscriber :param plan: The plan for which to check for an active subscription. If plan is None and there exists only one subscription, this method will check if that subscription is active. Calling this method with no plan and multiple subscriptions will throw an exception. :type plan: Plan or string (plan ID) """ if isinstance(subscriber, AnonymousUser): raise ImproperlyConfigured(ANONYMOUS_USER_ERROR_MSG) if isinstance(subscriber, get_user_model()): if subscriber.is_superuser or subscriber.is_staff: return True from .models import Customer customer, created = Customer.get_or_create(subscriber) if created or not customer.has_active_subscription(plan): return False return True def get_supported_currency_choices(api_key): """ Pull a stripe account's supported currencies and returns a choices tuple of those supported currencies. :param api_key: The api key associated with the account from which to pull data. :type api_key: str """ import stripe stripe.api_key = api_key account = stripe.Account.retrieve() supported_payment_currencies = stripe.CountrySpec.retrieve(account["country"])["supported_payment_currencies"] return [(currency, currency.upper()) for currency in supported_payment_currencies] def dict_nested_accessor(d, name): """ Access a dictionary value, possibly in a nested dictionary. >>> dict_nested_accessor({'id': 'joe'}, 'id') "joe" >>> dict_nested_accessor({'inner': {'id': 'joe'}}, 'inner.id') "joe" :type d: dict """ names = name.split(".", 1) if len(names) > 1: return dict_nested_accessor(d[names[0]], names[1]) else: return d[name] def clear_expired_idempotency_keys(): from .models import IdempotencyKey threshold = timezone.now() - datetime.timedelta(hours=24) IdempotencyKey.objects.filter(created__lt=threshold).delete() def convert_tstamp(response): """ Convert a Stripe API timestamp response (unix epoch) to a native datetime. :rtype: datetime """ # Overrides the set timezone to UTC - I think... tz = timezone.utc if settings.USE_TZ else None return datetime.datetime.fromtimestamp(response, tz) # TODO: Finish this. CURRENCY_SIGILS = { "CAD": "$", "EUR": "€", "GBP": "£", "USD": "$", } def get_friendly_currency_amount(amount, currency): currency = currency.upper() sigil = CURRENCY_SIGILS.get(currency, "") return "{sigil}{amount} {currency}".format(sigil=sigil, amount=amount, currency=currency)
Python
0.000002
@@ -3616,24 +3616,123 @@ ime%0A %22%22%22%0A + if response is None:%0A # Allow passing None to convert_tstamp()%0A return response%0A%0A # Overri
76c8096b3aed79391614b32608ab446613c42034
Add LOG_LEVEL global set by DEBUG=True in environment
pyiso/__init__.py
pyiso/__init__.py
import imp import os.path __version__ = '0.2.11' BALANCING_AUTHORITIES = { 'AZPS': {'module': 'sveri', 'class': 'SVERIClient'}, 'BPA': {'module': 'bpa', 'class': 'BPAClient'}, 'CAISO': {'module': 'caiso', 'class': 'CAISOClient'}, 'DEAA': {'module': 'sveri', 'class': 'SVERIClient'}, 'ELE': {'module': 'sveri', 'class': 'SVERIClient'}, 'ERCOT': {'module': 'ercot', 'class': 'ERCOTClient'}, 'HGMA': {'module': 'sveri', 'class': 'SVERIClient'}, 'IID': {'module': 'sveri', 'class': 'SVERIClient'}, 'ISONE': {'module': 'isone', 'class': 'ISONEClient'}, 'GRIF': {'module': 'sveri', 'class': 'SVERIClient'}, 'MISO': {'module': 'miso', 'class': 'MISOClient'}, 'NEVP': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'NYISO': {'module': 'nyiso', 'class': 'NYISOClient'}, 'PJM': {'module': 'pjm', 'class': 'PJMClient'}, 'PNM': {'module': 'sveri', 'class': 'SVERIClient'}, 'SPPC': {'module': 'nvenergy', 'class': 'NVEnergyClient'}, 'SPP': {'module': 'spp', 'class': 'SPPClient'}, 'SRP': {'module': 'sveri', 'class': 'SVERIClient'}, 'TEPC': {'module': 'sveri', 'class': 'SVERIClient'}, 'WALC': {'module': 'sveri', 'class': 'SVERIClient'}, 'EU': {'module': 'eu', 'class': 'EUClient'}, } def client_factory(client_name, **kwargs): """Return a client for an external data set""" # set up dir_name = os.path.dirname(os.path.abspath(__file__)) error_msg = 'No client found for name %s' % client_name client_key = client_name.upper() # find client try: client_vals = BALANCING_AUTHORITIES[client_key] module_name = client_vals['module'] class_name = client_vals['class'] except KeyError: raise ValueError(error_msg) # find module try: fp, pathname, description = imp.find_module(module_name, [dir_name]) except ImportError: raise ValueError(error_msg) # load try: mod = imp.load_module(module_name, fp, pathname, description) finally: # Since we may exit via an exception, close fp explicitly. if fp: fp.close() # instantiate class try: client_inst = getattr(mod, class_name)(**kwargs) except AttributeError: raise ValueError(error_msg) # set name client_inst.NAME = client_name return client_inst
Python
0.000064
@@ -23,33 +23,418 @@ ath%0A -%0A%0A__version__ = '0.2.11'%0A +from os import environ%0Afrom logging import DEBUG, INFO%0A%0A#########################################%0A# For Testing Purposes%0A# Add caching to unittesting%0A# Print every time the testing hits the cache successfully%0Aimport requests%0Aimport requests_cache%0Arequests_cache.install_cache(expire_after=60*10)%0A%0A__version__ = '0.2.11'%0A%0Alog_dict = %7B'True': DEBUG, False: INFO%7D%0ALOG_LEVEL = log_dict%5Benviron.get('DEBUG', False)%5D %0A%0ABA @@ -2035,16 +2035,17 @@ odule'%5D%0A +%0A
42ea9fef4203d5acd73e732dbe0e4d8672e81d17
bump version for pypi
jax/version.py
jax/version.py
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "0.1.33"
Python
0
@@ -589,11 +589,11 @@ = %220.1.3 -3 +4 %22%0A
bcc8164f2e6ed4401dc5ecb74a28ebe8554f7b82
Add Windows support.
binding.gyp
binding.gyp
{ 'targets': [{ 'target_name': 'robotjs', 'include_dirs': [ '<!(node -e \'require("nan")\')' ], 'cflags': [ '-Wall', '-Wparentheses', '-Winline', '-Wbad-function-cast', '-Wdisabled-optimization' ], 'conditions': [ ['OS == "mac"', { 'include_dirs': [ 'System/Library/Frameworks/CoreFoundation.Framework/Headers', 'System/Library/Frameworks/Carbon.Framework/Headers', 'System/Library/Frameworks/ApplicationServices.framework/Headers', 'System/Library/Frameworks/OpenGL.framework/Headers', ], 'link_settings': { 'libraries': [ '-framework Carbon', '-framework CoreFoundation', '-framework ApplicationServices', '-framework OpenGL' ] } }], ['OS == "linux"', { 'link_settings': { 'libraries': [ '-lpng', '-lz', '-lX11', '-lXtst' ] }, 'sources': [ 'src/xdisplay.c' ] }] ], 'sources': [ 'src/robotjs.cc', 'src/deadbeef_rand.c', 'src/mouse.c', 'src/keypress.c', 'src/keycode.c', 'src/screen.c', 'src/screengrab.c', 'src/MMBitmap.c' ] }] }
Python
0
@@ -76,38 +76,25 @@ ' -%3C!( node - -e %5C'require(%22nan%22)%5C') +_modules/nan/ '%0A @@ -1097,24 +1097,89 @@ %5D%0A + %7D%5D,%0A %5B%22OS=='win'%22, %7B%0A 'defines': %5B%0A 'IS_WINDOWS'%5D%0A %7D%5D%0A %5D @@ -1367,24 +1367,48 @@ eengrab.c',%0A + 'src/snprintf.c',%0A 'src/M @@ -1430,9 +1430,8 @@ %5D%0A %7D%5D%0A%7D -%0A
c5da75e3acb4ba4c69204ff1ad3e7e89d6710001
Add whitespace in tests
client/tests/framework_test.py
client/tests/framework_test.py
#!/usr/bin/python3 import unittest import ok class TestProtocol(ok.Protocol): name = "test" def __init__(self, args, src_files): ok.Protocol.__init__(args, src_files) self.called_start = 0 self.called_interact = 0 def on_start(self, buf): self.called_start += 1 def on_interact(self, buf): self.called_interact += 1 class OkTest(unittest.TestCase): def setUp(self): self.hw1 = './demo_assignments/hw1.py' self.hw1_tests = './demo_assignments/hw1_tests.py' def test_parse_input(self): _ = ok.parse_input() # Does not crash and returns a value. def test_is_src_file(self): self.assertTrue(ok.is_src_file('hw1.py')) self.assertFalse(ok.is_src_file('hw1_tests.py')) self.assertFalse(ok.is_src_file('hw1_tests')) self.assertFalse(ok.is_src_file('hw1.html')) self.assertFalse(ok.is_src_file('ok.py')) def test_get_assignment(self): self.assertTrue(ok.get_assignment(self.hw1) == 'hw1') self.assertFalse(ok.get_assignment(self.hw1_tests)) def test_group_by_assignment(self): paths = [self.hw1, self.hw1_tests] groups = ok.group_by_assignment(paths) self.assertIn('hw1', groups) self.assertEqual(groups['hw1'], paths[0:1]) def test_find_assignment(self): assignment, src_files = ok.find_assignment(None, '.') self.assertEqual(assignment, 'hw1') self.assertEqual(src_files, [self.hw1]) self.assertRaises(Exception, ok.find_assignment, [None, 'tests']) self.assertRaises(Exception, ok.find_assignment, ['hw2', '.'])
Python
0.999029
@@ -40,16 +40,17 @@ ort ok%0A%0A +%0A class Te @@ -372,16 +372,17 @@ t += 1%0A%0A +%0A class Ok
c552dc428b78fae168d59d3ff5af1818cf56f0e2
use DNSServiceGetAddrInfo(…) on Mac OS
binding.gyp
binding.gyp
{ 'targets': [ { 'target_name': 'dns_sd_bindings' , 'sources': [ 'src/dns_sd.cpp' , 'src/dns_service_browse.cpp' , 'src/dns_service_enumerate_domains.cpp' , 'src/dns_service_get_addr_info.cpp' , 'src/dns_service_process_result.cpp' , 'src/dns_service_ref.cpp' , 'src/dns_service_ref_deallocate.cpp' , 'src/dns_service_ref_sock_fd.cpp' , 'src/dns_service_register.cpp' , 'src/dns_service_resolve.cpp' , 'src/mdns_utils.cpp' , 'src/txt_record_ref.cpp' , 'src/txt_record_create.cpp' , 'src/txt_record_deallocate.cpp' , 'src/txt_record_set_value.cpp' , 'src/txt_record_get_length.cpp' , 'src/txt_record_buffer_to_object.cpp' , 'src/socket_watcher.cpp' ] , 'conditions': [ [ 'OS!="mac" and OS!="win"', { 'libraries': [ '-ldns_sd' ] }] , ['OS=="win"', { 'include_dirs': [ '$(BONJOUR_SDK_HOME)Include' ] , 'defines': [ 'HAVE_DNSSERVICEGETADDRINFO' ] , 'libraries' : [ '-l$(BONJOUR_SDK_HOME)Lib/$(Platform)/dnssd.lib' , '-lws2_32.lib' ] }] ] # The following breaks the debug build, so just ignore the warning for now. #, 'msbuild_settings': { # 'ClCompile': { 'ExceptionHandling': 'Sync' } # , 'Link' : { 'IgnoreSpecificDefaultLibraries': [ 'LIBCMT' ] } # } , 'configurations': { 'Release': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 3 } , 'cflags': [ '-O3' ] , 'ldflags': [ '-O3' ] } , 'Debug': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 0 } , 'cflags': [ '-g', '-O0', ] , 'ldflags': [ '-g', '-O0' ] } , 'Coverage': { 'xcode_settings': { 'GCC_OPTIMIZATION_LEVEL': 0 , 'OTHER_LDFLAGS': ['--coverage'] , 'OTHER_CFLAGS': ['--coverage'] } , 'cflags': [ '-O0', '--coverage' ] , 'ldflags': [ '--coverage' ] } } } ] } # vim: filetype=python shiftwidth=2 softtabstop=2 :
Python
0
@@ -1028,16 +1028,18 @@ + 'librari @@ -1065,24 +1065,116 @@ %0A %7D%5D%0A + , %5B 'OS==%22mac%22', %7B%0A 'defines': %5B 'HAVE_DNSSERVICEGETADDRINFO' %5D%0A %7D%5D%0A , %5B'OS @@ -1177,32 +1177,34 @@ %5B'OS==%22win%22', %7B%0A + 'inclu @@ -1325,19 +1325,16 @@ braries' - : %5B '-l$ @@ -1403,17 +1403,16 @@ - , '-lws2 @@ -1416,25 +1416,24 @@ ws2_32.lib'%0A -
c7764ac8c1363701b4e7fab1d8ae0e3197853b48
Update __init__.py
pylsy/__init__.py
pylsy/__init__.py
#__init__.py from .pylsy import PylsyTable __version__="1.001"
Python
0.000072
@@ -53,11 +53,11 @@ __=%221.00 -1 +3 %22%0A
8eae324c0030221a93b202a419db3f7301ad486c
read config only if file exists
pymzn/__init__.py
pymzn/__init__.py
""" PyMzn is a Python library that wraps and enhances the MiniZinc tools for CSP modelling and solving. It is built on top of the libminizinc library (version 2.0) and provides a number of off-the-shelf functions to readily solve problems encoded in MiniZinc and parse the solutions into Python objects. """ import ast import yaml import appdirs import logging from . import _utils from . import bin from . import _dzn from ._dzn import * from . import _mzn from ._mzn import * __version__ = '0.10.8' __all__ = ['debug', 'config', 'bin', 'gecode'] __all__.extend(_dzn.__all__) __all__.extend(_mzn.__all__) # TODO: update python2 branch # TODO: config solver function and default arguments to solver # TODO: mzn2doc # TODO: check the import of other files in minizinc # TODO: make it work on windows # TODO: check the ctrl+C thing which seems to not work anymore _debug_handler = None _pymzn_logger = logging.getLogger(__name__) _pymzn_logger.addHandler(logging.NullHandler()) def debug(dbg=True): global _debug_handler if dbg and _debug_handler is None: _debug_handler = logging.StreamHandler() _pymzn_logger.addHandler(_debug_handler) _pymzn_logger.setLevel(logging.DEBUG) elif not dbg and _debug_handler is not None: _pymzn_logger.removeHandler(_debug_handler) _debug_handler = None _pymzn_logger.setLevel(logging.WARNING) with open(os.path.join(appdirs.user_config_dir(__name__), 'config.yml')) as f: config = yaml.load(f) # Solvers gecode = Gecode(path=config.get('gecode')) def main(): import argparse desc = 'PyMzn is a wrapper for the MiniZinc tool pipeline.' p = argparse.ArgumentParser(description=desc) p.add_argument('--debug', action='store_true', help='display debug messages on standard output') p.add_argument('mzn', help='the mzn file to solve') p.add_argument('dzn_files', nargs='*', help='additional dzn files') p.add_argument('--data', type=ast.literal_eval, help='additional inline data') p.add_argument('-k', '--keep', action='store_true', help='whether to keep generated files') p.add_argument('-o', '--output-base', help='base name for generated files') p.add_argument('-G', '--mzn-globals-dir', help='directory of global files in the standard library') p.add_argument('-f', '--fzn-fn', help='name of proxy function for the solver') p.add_argument('--fzn-args', type=ast.literal_eval, default={}, help='arguments to pass to the solver') args = p.parse_args() if args.debug: debug() other_args = {**{'data': args.data, 'keep': args.keep, 'output_base': args.output_base, 'mzn_globals_dir': args.mzn_globals_dir, 'fzn_fn': args.fzn_fn}, **args.fzn_args} print(minizinc(args.mzn, *args.dzn_files, raw_output=True, **other_args))
Python
0.000001
@@ -1389,18 +1389,31 @@ )%0A%0A%0A -with open( +config = %7B%7D%0Acfg_file = os.p @@ -1473,16 +1473,68 @@ ml') +%0Aif os.path.isfile(cfg_file):%0A with open(cfg_file ) as f:%0A @@ -1529,16 +1529,20 @@ ) as f:%0A + conf @@ -1560,16 +1560,17 @@ oad(f)%0A%0A +%0A # Solver
45141fe7f34e0522b2270047af796644406213dc
Add user help text to error output of do_fish_indent
do_fish_indent.py
do_fish_indent.py
import sublime, sublime_plugin import os.path import subprocess # Only a TextCommand can use replace() class DoFishIndentCommand(sublime_plugin.TextCommand): def is_enabled(self): # We are very incompatible with ST1 and probably ST4 one day return 2 <= int(sublime.version()[0]) <= 3 def is_visible(self): return 'source.shell.fish' in self.view.scope_name(self.view.sel()[0].begin()) def description(self): return 'Indent and Prettify' def run(self, edit): versionAPI = int(sublime.version()[0]) # Check for executable exe = 'fish_indent' pathToDir = self.view.settings().get('fish_indent_directory') if pathToDir: exe = os.path.join(pathToDir, exe) # Select the entire contents of the file fileRegion = sublime.Region(0, self.view.size()) fileContent = self.view.substr(fileRegion) # Note the file encoding, converting to lowercase as expected by Python # However, fish_indent assumes UTF-8 encoding so the user may get unexpected results if this file's encoding is different enc = self.view.encoding().lower() if enc == 'undefined': # ie, temp file enc = 'utf-8' print('Running {0} on file with encoding {1}'.format(exe, enc)) # Run the program, which is searched for on PATH if necessary try: # Pipe the file content into fish_indent and catch the outputs p = subprocess.Popen(exe, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate(input = fileContent.encode(enc)) except OSError: # Soft compatibility with Python 2 msg = "Couldn't find {0}".format(exe) sublime.error_message(msg) return if err: sublime.message_dialog(err.decode('utf-8')) # Create a copy of all current cursor positions pos = list( self.view.sel() ); # Replace the entire contents of the file with the output of fish_indent self.view.replace(edit, fileRegion, out.decode(enc)) # Note the user's current settings for this buffer indentUsingSpaces = self.view.settings().get('translate_tabs_to_spaces') tabSize = self.view.settings().get('tab_size') # Convert the format to the user's preferred format if indentUsingSpaces and tabSize == 4: # Do nothing as this is the format produced by fish_indent pass else: # Convert sets of 4 spaces to tabs # Note that running unexpand_tabs will set translate_tabs_to_spaces to False self.view.settings().set('tab_size', 4) self.view.run_command('unexpand_tabs') if not indentUsingSpaces: # User prefers tabs if tabSize == 4: # Conversion finished pass else: # Resize self.view.settings().set('tab_size', tabSize) else: # User prefers spaces, so reset to True self.view.settings().set('translate_tabs_to_spaces', True) # Resize tabs, then convert back into spaces self.view.settings().set('tab_size', tabSize) self.view.run_command('expand_tabs') # Revert back to the old cursor positions and centre on the first one self.view.sel().clear() if versionAPI == 3: self.view.sel().add_all(pos) elif versionAPI == 2: map(self.view.sel().add, pos) self.view.show_at_center(pos[0])
Python
0.000009
@@ -1635,16 +1635,172 @@ at(exe)%0A + if not pathToDir:%0A msg += %22. Specify a nonstandard install location in Preferences %3E Package Settings %3E friendly interactive shell %3E Settings%22%0A su
eaa17491581cbb52242fbe543dd09929f537a8bc
Add option to ignore static.
mysettings.py
mysettings.py
from src.markdown.makrdown import jinja_aware_markdown PREFERRED_URL_SCHEME = 'http' SERVER_NAME = 'localhost:5000' FLATPAGES_EXTENSION = '.md' FLATPAGES_HTML_RENDERER = jinja_aware_markdown FREEZER_IGNORE_404_NOT_FOUND = True FLATPAGES_AUTO_RELOAD = True GITHUB_URL = 'https://github.com/JetBrains/kotlin' TWITTER_URL = 'https://twitter.com/kotlin' EDIT_ON_GITHUB_URL = 'https://github.com/JetBrains/kotlin-web-site/edit/master/' PDF_URL = '/docs/kotlin-docs.pdf' FORUM_URL = 'http://devnet.jetbrains.com/community/kotlin' SITE_GITHUB_URL = 'http://github.com/JetBrains/kotlin-web-site' CODE_URL = 'https://github.com/JetBrains/kotlin-examples/tree/master' TEXT_USING_GRADLE = "In this tutorial we're going to be using Gradle but the same can be accomplished using either IntelliJ IDEA project structure or Maven. For details on setting up Gradle to work with Kotlin, see [Using Gradle](/docs/reference/using-gradle.html)."
Python
0
@@ -250,16 +250,46 @@ = True%0A +FREEZER_STATIC_IGNORE = %5B%22*%22%5D%0A GITHUB_U
9a40bd0d82c5215a8978a7d1c95f2910ee8f7f09
add UserToken model
api/models.py
api/models.py
from django.db import models from django.db.models import Q from django.utils import timezone class MaintenanceRecord(models.Model): start_date = models.DateTimeField() end_date = models.DateTimeField(blank=True, null=True) title = models.CharField(max_length=256) message = models.TextField() disable_login = models.BooleanField(default=True) created_date = models.DateTimeField(auto_now_add=True) modified_date = models.DateTimeField(auto_now=True) @classmethod def active(cls, provider=None): """ Return records that are active """ now = timezone.now() records = MaintenanceRecord.objects.filter( Q(start_date__lt=now), Q(end_date__gt=now) | Q(end_date__isnull=True)) return records.all() @classmethod def disable_login_access(cls, request): """ Return true if any active record wants login disabled """ disable_login = False records = MaintenanceRecord.active() for record in records: if record.disable_login: disable_login = True return disable_login
Python
0
@@ -87,16 +87,60 @@ imezone%0A +from django.contrib.auth.models import User%0A %0A%0Aclass @@ -1185,20 +1185,249 @@ return disable_login +%0A%0A%0Aclass UserToken(models.Model):%0A token = models.CharField(max_length=128)%0A user = models.ForeignKey(User)%0A created_date = models.DateTimeField(auto_now_add=True)%0A modified_date = models.DateTimeField(auto_now=True)%0A
44242e8c539c79793f4fe3a6ce716d3edbe79782
Fix follow board
pypin/__init__.py
pypin/__init__.py
""" Module doc string """ import json import requests import urllib.request class PyPin(object): """Python client consume Pinterest API""" TIMEOUT = 5 API_HOST = 'https://api.pinterest.com/' def __init__(self, accesstoken, version='v1'): self.accesstoken = accesstoken self.api_verson = version @staticmethod def call(url, method='get', params=None): """ API call to Pinterest url: String, API endpoint with access token attached method: String, HTTP method, get post put delete params: Dict, optional, supply necessary parameters fields: String, optional, expected return fields, will return default fields if not specified """ request = getattr(requests, method)(url, timeout=PyPin.TIMEOUT, data=params) print (request.json()) if request.status_code in [200, 201]: return request.json()['data'] else: raise RuntimeError('API request return status code '+str(request.status_code)) def get_me(self): """Get the authenticated user's Pinterest account info""" api_endpoint = PyPin.API_HOST + self.api_verson +'/me/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url) def get_likes(self): """Get the pins that the authenticated user likes""" api_endpoint = PyPin.API_HOST + self.api_verson +'/me/likes/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url) def get_followers(self): """Get the authenticated user's followers""" api_endpoint = PyPin.API_HOST + self.api_verson +'/me/followers/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url) def get_following_boards(self): """Get the boards that the authenticated user follows""" api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/boards/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url) def get_following_users(self): """Get the Pinterest users that the authenticated user follows""" api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/users/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url) def get_following_interests(self): """Get the interests that the authenticated user follows""" api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/interests/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url) def follow_user(self, user_name): """Follow a user parameters: name: 'user_name', description: 'user name' """ api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/users/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'post', { 'user': user_name }) def unfollow_user(self, user_name): """Unfollow a user parameters: name: 'user_name', description: 'user name' """ api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/users/' + user_name request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'delete') def follow_board(self, board_id): """Follow a board parameters: name: 'board_id', description: 'board name' """ api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/boards/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'post', board_id) def unfollow_board(self, board_id): """Unfollow a board parameters: name: 'board_id', description: 'board name' """ api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/boards/' + board_id request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'delete') def follow_interest(self, interest_id): """Follow an interest parameters: name: 'interest_id', description: 'interest name' """ api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/interests/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'post', interest_id) def unfollow_interest(self, interest_id): """Unfollow an interest parameters: name: 'interest_id', description: 'interest name' """ api_endpoint = PyPin.API_HOST + self.api_verson +'/me/following/interests/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'delete', interest_id) def get_pins(self): """Get all of authenticated users's pins""" api_endpoint = PyPin.API_HOST + self.api_verson +'/me/pins/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url) def get_boards(self): """Get all of authenticated users's boards""" api_endpoint = PyPin.API_HOST + self.api_verson +'/me/boards/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'get') def get_user(self, username): """Get the account info for a Pinterest user""" pass def create_board(self, board_info): """Create a new board parameters: name: 'board name', description: 'Board description, optional' """ api_endpoint = PyPin.API_HOST + self.api_verson +'/boards/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'post', board_info) def create_pin(self, pin_info): """Create a pin on a board pinInfo structure: board: '<username>/<board_name>' OR '<board_id>', note: 'My note' link: 'https://www.google.com', image_url: 'http://marketingland.com/pinterest-logo-white-1920.png' """ api_endpoint = PyPin.API_HOST + self.api_verson +'/pins/' request_url = api_endpoint + '?access_token=' + self.accesstoken return PyPin.call(request_url, 'post', pin_info)
Python
0.000191
@@ -3865,32 +3865,43 @@ est_url, 'post', + %7B 'board': board_id)%0A%0A @@ -3893,16 +3893,18 @@ board_id + %7D )%0A%0A d
379068d31623662c0b349f26d1cd610612963b82
add re module to be more reliable
joinstsfile.py
joinstsfile.py
#!/usr/bin/env python3 import os path='/home/ruan/git/stm/' #path为文件所在目录,windows下如‘D:\\data\\’,直接覆盖源文件,请注意保存原始数据 for file in os.listdir(path): os.rename(os.path.join(path,file),os.path.join(path,file.split('.')[2][2:])) filenu = len(os.listdir(path)) + 1 data=[] with open(os.path.join(path,'001'),'rb') as fo: for line in fo.readlines()[526:]: data.append([line.decode('ascii').split('\t')[1],line.decode('ascii').split('\t')[5]]) j=2 while j<filenu : with open(os.path.join(path,str(j).zfill(3)),'rb') as fo: i=0 for line in fo.readlines()[526:]: data[i].append(line.decode('ascii').split('\t')[5]) i+=1 j+=1 with open(os.path.join(path,'final.txt'),'w') as fout: i=len(data) j=len(data[0]) k=0 while k<i: l=0 while l<j: fout.write(data[k][l]) fout.write('\t') l+=1 fout.write('\n') k=k+1
Python
0
@@ -29,14 +29,19 @@ t os +, re %0Apath= +r '/ho @@ -62,147 +62,127 @@ m/'%0A -#path%E4%B8%BA%E6%96%87%E4%BB%B6%E6%89%80%E5%9C%A8%E7%9B%AE%E5%BD%95%EF%BC%8Cwindows%E4%B8%8B%E5%A6%82%E2%80%98D:%5C%5Cdata%5C%5C%E2%80%99,%E7%9B%B4%E6%8E%A5%E8%A6%86%E7%9B%96%E6%BA%90%E6%96%87%E4%BB%B6%EF%BC%8C%E8%AF%B7%E6%B3%A8%E6%84%8F%E4%BF%9D%E5%AD%98%E5%8E%9F%E5%A7%8B%E6%95%B0%E6%8D%AE%0Afor file in os.listdir(path):%0A os.rename(os.path.join(path,file),os.path.join(path, +namespace=%7B%7D%0Adata=%5B%5D%0Afor file in os.listdir(path):%0A if re.match('A%5Cd%7B6%7D%5C.%5Cd%7B6%7D%5C.L%5Cd%7B4%7D%5C.VERT',file): namespace%5Bint( file @@ -204,52 +204,57 @@ 2:%5D) -)%0A +%5D= file -nu = len(os.listdir(path)) + 1%0Adata=%5B%5D +%0Akeys=sorted(%5Bx for x in namespace.keys()%5D) %0Awit @@ -282,13 +282,26 @@ ath, -'001' +namespace%5Bkeys%5B0%5D%5D ),'r @@ -410,65 +410,24 @@ %5B1%5D, -line.decode('ascii').split('%5Ct')%5B5%5D%5D)%0Aj=2%0Awhile j%3Cfilenu +%5D)%0Afor i in keys :%0A @@ -460,23 +460,20 @@ ath, -str(j).zfill(3) +namespace%5Bi%5D ),'r @@ -491,17 +491,17 @@ -i +j =0%0A @@ -554,17 +554,17 @@ data%5B -i +j %5D.append @@ -613,17 +613,8 @@ - i+=1%0A @@ -681,173 +681,83 @@ -i=len( +for line in data -) +: %0A -j=len(data%5B0%5D)%0A k=0%0A while k%3Ci:%0A l=0%0A while l%3Cj:%0A fout.write(data%5Bk%5D%5Bl%5D)%0A fout.write('%5Ct')%0A l+=1 + for num in line:%0A fout.write(num+'%5Ct') %0A @@ -782,18 +782,4 @@ n')%0A - k=k+1%0A
85d5712fa1dde952783cbc8d78f904e08cfc9b50
Remove duplicated dependency
server/setup.py
server/setup.py
from pathlib import Path from setuptools import Command, find_packages, setup class GenerateCommand(Command): description = "generates manticore_server server protobuf + grpc code from protobuf specification file" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): from grpc.tools import protoc protoc.main( [ "grpc_tools.protoc", "-I.", "--python_out=.", "--grpc_python_out=.", "--mypy_out=.", "./manticore_server/ManticoreServer.proto", ] ) setup( name="manticore_server", version="0.0.1", packages=find_packages(exclude=["tests", "tests.*"]), python_requires=">=3.7", install_requires=[ f"manticore[native] @ file://{Path(__file__).parent.resolve()}/..", "protobuf~=3.20", "grpcio~=1.46", "crytic-compile>=0.2.2", ], extras_require={ "dev": [ "grpcio-tools", "mypy-protobuf", "shiv~=1.0.1", "types-setuptools", "mypy-protobuf", "black~=22.0", "isort==5.10.1", "mypy==0.942", ] }, entry_points={ "console_scripts": [ "manticore_server=manticore_server.manticore_server:main", ], "distutils.commands": ["generate = GenerateCommand"], }, cmdclass={ "generate": GenerateCommand, }, )
Python
0
@@ -1166,37 +1166,8 @@ s%22,%0A - %22mypy-protobuf%22,%0A
ba43de958266a2906f3ee4cad23b20361db2637a
Add arguments to job
scripts/submitJob.py
scripts/submitJob.py
#!/usr/bin/env python # SIM-CITY client # # Copyright 2015 Netherlands eScience Center # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ''' Combines createTask and startJob, to create a task from a command and then start a job. ''' from __future__ import print_function import simcity import argparse import sys import json if __name__ == '__main__': parser = argparse.ArgumentParser(description="start a job") parser.add_argument('command', help="command to run") parser.add_argument('host', help="host to run pilot job on") parser.add_argument( '-m', '--max', help="only run if there are less than MAX jobs running", default=2) parser.add_argument( '-c', '--config', help="configuration file", default=None) parser.add_argument( '-i', '--input', help="JSON parameter file", default=None) args = parser.parse_args() simcity.init(config=args.config) try: properties = {'command': args.command} try: with open(args.input) as f: properties['input'] = json.load(f) except TypeError: pass task, job = simcity.run_task(properties, args.host, int(args.max)) except Exception as ex: print("Task could not be added to the database: %s" % str(ex), file=sys.stderr) sys.exit(1) print("Task %s added to the database" % task.id) if job is None: print("Let task be processed by existing pilot-job scripts") else: print("Job %s (ID: %s) will process task" % (job['batch_id'], job.id))
Python
0.00008
@@ -907,16 +907,81 @@ a job%22)%0A + parser.add_argument('host', help=%22host to run pilot job on%22)%0A pars @@ -1059,45 +1059,49 @@ nt(' -host', help=%22host to run pilot job on +args', nargs='*', help=%22command arguments %22)%0A @@ -1507,16 +1507,29 @@ ties = %7B +%0A 'command @@ -1543,16 +1543,62 @@ .command +,%0A 'arguments': args.args,%0A %7D%0A%0A
8d56a45d0b01dff3e8cd041e7ba09c882d7cbb30
add logging to file and stdout
phabricator-proxy/main.py
phabricator-proxy/main.py
import flask import requests import os from urllib.parse import urlparse, parse_qs import json app = flask.Flask(__name__) app.config["DEBUG"] = False buildkite_api_token = os.getenv("BUILDKITE_API_TOKEN", "") @app.route('/', methods=['GET']) def home(): return "Hi LLVM!" @app.route('/build', methods=['POST', 'GET']) def build(): app.logger.info('request: %s %s', flask.request, flask.request.url) app.logger.info('headers: %s', flask.request.headers) if flask.request.method == 'POST': app.logger.info('data: %s', flask.request.data) app.logger.info('form: %s', flask.request.form) url = urlparse(flask.request.url) params = parse_qs(url.query) build_env = {} for k, v in params.items(): if len(v) == 1: build_env['ph_' + k] = v[0] refspec = 'main' if 'ph_scripts_refspec' in build_env: refspec = build_env['ph_scripts_refspec'] build_request = { 'commit': 'HEAD', 'branch': refspec, 'env': build_env, 'message': f'D{build_env["ph_buildable_revision"]}', } app.logger.info('buildkite request: %s', build_request) headers = {'Authorization': f'Bearer {buildkite_api_token}'} response = requests.post( 'https://api.buildkite.com/v2/organizations/llvm-project' '/pipelines/diff-checks/builds', json=build_request, headers=headers) app.logger.info('buildkite response: %s %s', response.status_code, response.text) rjs = json.loads(response.text) return rjs['web_url'] else: return "expected POST request" if __name__ == '__main__': app.run(host='0.0.0.0:8080')
Python
0
@@ -1,215 +1,932 @@ -import flask%0Aimport requests%0Aimport os%0Afrom urllib.parse import urlparse, parse_qs%0Aimport json%0A%0Aapp = flask.Flask(__name__)%0Aapp.config%5B%22DEBUG%22%5D = False%0Abuildkite_api_token = os.getenv(%22BUILDKITE_API_TOKEN%22, %22%22)%0A +from cmath import log%0Afrom flask.logging import default_handler%0Afrom urllib.parse import urlparse, parse_qs%0Aimport flask%0Aimport json%0Aimport logging%0Aimport logging.handlers%0Aimport os%0Aimport requests%0A%0A%0Abuildkite_api_token = os.getenv(%22BUILDKITE_API_TOKEN%22, %22%22)%0A%0Aapp = flask.Flask(__name__)%0Aapp.config%5B%22DEBUG%22%5D = False%0Aformatter = logging.Formatter('%25(asctime)s %25(levelname)s %25(message)s')%0AerrHandler = logging.FileHandler('error.log', encoding='utf-8',)%0AerrHandler.setLevel(logging.ERROR)%0AerrHandler.setFormatter(formatter)%0Aapp.logger.addHandler(errHandler)%0ArotatingHandler = logging.handlers.TimedRotatingFileHandler('info.log', when='D', encoding='utf-8', backupCount=8)%0ArotatingHandler.setFormatter(formatter)%0Aapp.logger.addHandler(rotatingHandler)%0Aapp.logger.setLevel(logging.INFO)%0AstdoutLog = logging.StreamHandler()%0AstdoutLog.setFormatter(formatter)%0Aapp.logger.addHandler(stdoutLog)%0Aapp.logger.removeHandler(default_handler) %0A%0A@a
c90dbc5007b5627b264493c2d16af79cff9c2af0
Add better custom has_permission check.
joku/checks.py
joku/checks.py
""" Specific checks. """ from discord.ext.commands import CheckFailure def is_owner(ctx): if not ctx.bot.owner_id == ctx.message.author.id: raise CheckFailure(message="You are not the owner.") return True
Python
0
@@ -63,16 +63,23 @@ kFailure +, check %0A%0A%0Adef i @@ -223,8 +223,595 @@ rn True%0A +%0A%0Adef has_permissions(**perms):%0A def predicate(ctx):%0A if ctx.bot.owner_id == ctx.message.author.id:%0A return True%0A msg = ctx.message%0A ch = msg.channel%0A permissions = ch.permissions_for(msg.author)%0A if all(getattr(permissions, perm, None) == value for perm, value in perms.items()):%0A return True%0A%0A # Raise a custom error message%0A raise CheckFailure(message=%22You do not have any of the required permissions: %7B%7D%22.format(%0A ', '.join(%5Bperm.upper() for perm in perms%5D)%0A ))%0A%0A return check(predicate)%0A
f9a827b41ed925e22bf1e873e5989bdd327fabbf
Add RefugeeCamp name formatting
api/models.py
api/models.py
from django.db import models class RefugeeCamp(models.Model): # Location city = models.CharField(max_length=64) postcode = models.CharField(max_length=16) street = models.CharField(max_length=128) streetnumber = models.CharField(max_length=32) class ObjectCategory(models.Model): title = models.CharField(max_length=64) def __str__(self): return self.title class ObjectSubCategory(models.Model): title = models.CharField(max_length=64) parent = models.ForeignKey(ObjectCategory) def __str__(self): return "{0}/{1}".format(self.parent, self.title) class SimpleOffer(models.Model): category = models.ForeignKey(ObjectCategory, null=True) title = models.CharField(max_length=64) description = models.CharField(max_length=4096) create_time = models.DateTimeField(auto_now_add=True) image = models.ImageField(upload_to='api.UploadedFile/bytes/filename/mimetype', blank=True, null=True) # Owner's info city = models.CharField(max_length=64) telephone = models.CharField(max_length=64) email = models.CharField(max_length=128) class HelpTimeSearch(models.Model): start_time = models.DateTimeField() end_time = models.DateTimeField() camp = models.ForeignKey(RefugeeCamp) class UploadedFile(models.Model): bytes = models.TextField() filename = models.CharField(max_length=255) mimetype = models.CharField(max_length=50)
Python
0.000001
@@ -259,16 +259,180 @@ th=32)%0A%0A + def __str__(self):%0A return %22%7B0%7D %7B1%7D: %7B2%7D %7B3%7D%22.format(self.postcode, self.city,%0A self.street, self.streetnumber)%0A%0A class Ob
c720f9c385a785b8905991465fb74c75fca42220
fix bug
cloudify_cloudinit/__init__.py
cloudify_cloudinit/__init__.py
# Copyright (c) 2017-2018 Cloudify Platform Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import yaml import base64 from cloudify import ctx class CloudInit(object): def __init__(self, operation_inputs): """ Sets the properties that all operations need. :param operation_inputs: The inputs from the operation. """ self.config = self.get_config(operation_inputs) @staticmethod def get_external_resource(config): for f in config.get('write_files', []): if not isinstance(f, dict): break try: if 'content' not in f: continue resource_type = f['content'].get('resource_type', '') resource_name = f['content'].get('resource_name', '') template_variables = f['content'].get('template_variables', {}) if 'file_resource' == resource_type: f['content'] = ctx.get_resource_and_render( resource_name, template_variables) except ValueError: ctx.logger.debug('No external resource recognized.') pass return config def get_config(self, inputs): config = ctx.node.properties.get('resource_config', {}) config.update( ctx.instance.runtime_properties.get('resource_config', {})) config.update(inputs.get('resource_config', {})) config.update(self.get_external_resource(config.copy())) return config @property def __str__(self): """Override the string implementation of object.""" cloud_init = yaml.dump(self.config) cloud_init_string = str(cloud_init).replace('!!python/unicode ', '') header = ctx.node.properties.get('header') if header: cloud_init_string = \ header + '\n' + cloud_init_string if ctx.node.properties.get('encode_base64'): cloud_init_string = \ base64.encodestring(cloud_init_string) return cloud_init_string def update(self, **_): ctx.instance.runtime_properties['resource_config'] = self.config ctx.instance.runtime_properties['cloud_config'] = self.__str__ def delete(self, **_): # cleanup runtime properties keys = ctx.instance.runtime_properties.keys() for key in keys: del ctx.instance.runtime_properties[key]
Python
0.000001
@@ -1137,60 +1137,85 @@ -if ' content -' not in f:%0A continue%0A + = f.get('content')%0A if isinstance(content, dict):%0A @@ -1238,36 +1238,31 @@ urce_type = -f%5B' content -'%5D .get('resour @@ -1283,32 +1283,36 @@ + resource_name = @@ -1311,28 +1311,23 @@ _name = -f%5B' content -'%5D .get('re @@ -1336,32 +1336,36 @@ urce_name', '')%0A + @@ -1385,28 +1385,23 @@ ables = -f%5B' content -'%5D .get('te @@ -1439,16 +1439,20 @@ + + if 'file @@ -1480,16 +1480,20 @@ e_type:%0A + @@ -1548,16 +1548,20 @@ render(%0A +
e94031c99f4ce0a07a2da86423839d963e70c224
Fix off-by-one typo
py/desisim/scripts/pixsim_nights.py
py/desisim/scripts/pixsim_nights.py
""" desisim.scripts.pixsim_nights ====================== Entry point for simulating multiple nights. """ from __future__ import absolute_import, division, print_function import sys import os import re import argparse import traceback import numpy as np from desispec.util import option_list from desispec.parallel import stdouterr_redirected import desispec.io as specio from .. import io as simio from ..io import SimSpec from . import pixsim def parse(options=None): parser = argparse.ArgumentParser( description = 'Generate pixel-level simulated DESI data for one or more nights', ) parser.add_argument("--nights", type=str, default=None, required=False, help="YEARMMDD,YEARMMDD,YEARMMDD") parser.add_argument("--verbose", action="store_true", help="Include debug log info") parser.add_argument("--overwrite", action="store_true", help="Overwrite existing raw and simpix files") parser.add_argument("--cosmics", action="store_true", help="Add simulated cosmics") parser.add_argument("--preproc", action="store_true", help="Run the preprocessing") parser.add_argument("--seed", type=int, default=123456, required=False, help="random number seed") parser.add_argument("--cameras", type=str, default=None, help="cameras, e.g. b0,r5,z9") parser.add_argument("--camera_procs", type=int, default=1, help="Number " "of MPI processes to use per camera") args = None if options is None: args = parser.parse_args() else: options = [str(x) for x in options] args = parser.parse_args(options) return args def main(args, comm=None): rank = 0 nproc = 1 if comm is not None: rank = comm.rank nproc = comm.size # Determine which nights we are using nights = None if args.nights is not None: nights = args.nights.split(",") else: if rank == 0: rawdir = os.path.abspath(specio.rawdata_root()) nights = [] nightpat = re.compile(r"\d{8}") for root, dirs, files in os.walk(rawdir, topdown=True): for d in dirs: nightmat = nightpat.match(d) if nightmat is not None: nights.append(d) break if comm is not None: nights = comm.bcast(nights, root=0) # Get the list of exposures for each night night_expid = {} all_expid = [] exp_to_night = {} if rank == 0: for nt in nights: night_expid[nt] = specio.get_exposures(nt, raw=True) all_expid.extend(night_expid[nt]) for ex in night_expid[nt]: exp_to_night[ex] = nt if comm is not None: night_expid = comm.bcast(night_expid, root=0) all_expid = comm.bcast(all_expid, root=0) exp_to_night = comm.bcast(exp_to_night, root=0) expids = np.array(all_expid, dtype=np.int32) nexp = len(expids) # Get the list of cameras cams = None if args.cameras is not None: cams = args.cameras.split(",") else: cams = [] for band in ['b', 'r', 'z']: for spec in range(10): cams.append('{}{}'.format(band, spec)) # number of cameras ncamera = len(cams) # check that our communicator is an appropriate size if comm is not None: if ncamera * args.camera_procs > comm.size: if comm.rank == 0: print("Communicator size ({}) too small for {} cameras each with {} procs".format(comm.size, ncamera, args.camera_procs), flush=True) comm.Abort() # create a set of reproducible seeds for each exposure np.random.seed(args.seed) maxexp = np.max(expids) allseeds = np.random.randint(2**32, size=maxexp) seeds = allseeds[-nexp:] taskproc = ncamera * args.camera_procs comm_group = comm comm_rank = None group = comm.rank ngroup = comm.size group_rank = 0 if comm is not None: from mpi4py import MPI if taskproc > 1: ngroup = int(comm.size / taskproc) group = int(comm.rank / taskproc) group_rank = comm.rank % taskproc comm_group = comm.Split(color=group, key=group_rank) comm_rank = comm.Split(color=group_rank, key=group) else: comm_group = MPI.COMM_SELF comm_rank = comm myexpids = np.array_split(expids, ngroup)[group] for ex in myexpids: nt = exp_to_night[ex] # path to raw file simspecfile = simio.findfile('simspec', nt, ex) rawfile = specio.findfile('raw', nt, ex) rawfile = os.path.join(os.path.dirname(simspecfile), rawfile) # Is this exposure already finished? done = True if group_rank == 0: if not os.path.isfile(rawfile): done = False if args.preproc: for c in cams: pixfile = specio.findfile('pix', night=nt, expid=ex, camera=c) if not os.path.isfile(pixfile): done = False if comm_group is not None: done = comm_group.bcast(done, root=0) if done and not args.overwrite: if group_rank == 0: print("Skipping completed exposure {:08d} on night {}".format(ex, nt)) continue # Write per-process logs to a separate directory, # since there are so many of them. logdir = "{}_logs".format(rawfile) if group_rank == 0: if not os.path.isdir(logdir): os.makedirs(logdir) if comm_group is not None: comm_group.barrier() tasklog = os.path.join(logdir, "pixsim") with stdouterr_redirected(to=tasklog, comm=comm_group): try: options = {} options["night"] = nt options["expid"] = int(ex) options["cosmics"] = args.cosmics options["seed"] = seeds[ex] options["cameras"] = ",".join(cams) options["mpi_camera"] = args.camera_procs options["verbose"] = args.verbose options["preproc"] = args.preproc optarray = option_list(options) pixargs = pixsim.parse(optarray) pixsim.main(pixargs, comm_group) except: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) print("".join(lines), flush=True)
Python
0.999999
@@ -3794,22 +3794,26 @@ 2, size= +( maxexp ++1) )%0A se
ee1f958cb3611ecc3af0329deda7fde5d5281c32
remove obsolete model creation
core/models/__init__.py
core/models/__init__.py
from core.models.allocation_strategy import Allocation, AllocationStrategy from core.models.application import Application, ApplicationMembership,\ ApplicationScore, ApplicationBookmark from core.models.application_tag import ApplicationTag from core.models.application_version import ApplicationVersion, ApplicationVersionMembership from core.models.cloud_admin import CloudAdministrator from core.models.credential import Credential, ProviderCredential from core.models.export_request import ExportRequest from core.models.group import Group, IdentityMembership,\ InstanceMembership, Leadership from core.models.identity import Identity from core.models.instance_tag import InstanceTag from core.models.profile import UserProfile from core.models.project import Project from core.models.project_instance import ProjectInstance from core.models.project_volume import ProjectVolume from core.models.provider import AccountProvider, ProviderType, PlatformType,\ Provider, ProviderInstanceAction, ProviderDNSServerIP from core.models.license import LicenseType, License, ApplicationVersionLicense from core.models.machine import ProviderMachine, ProviderMachineMembership from core.models.machine_request import MachineRequest from core.models.match import PatternMatch, MatchType from core.models.maintenance import MaintenanceRecord from core.models.instance import Instance, InstanceStatusHistory,\ InstanceStatus, InstanceAction, InstanceSource from core.models.node import NodeController from core.models.boot_script import ScriptType, BootScript, ApplicationVersionBootScript from core.models.quota import Quota from core.models.resource_request import ResourceRequest from core.models.size import Size from core.models.t import T from core.models.tag import Tag from core.models.user import AtmosphereUser from core.models.volume import Volume def get_or_create(Model, *args, **kwargs): return Model.objects.get_or_create(*args, **kwargs)[0] def create_machine_model(name, provider, provider_alias, created_by, description): name = _get_valid_name(name, provider_alias) new_machine = get_or_create(Application, name=name, description=description, created_by=created_by) provider_machine = get_or_create(ProviderMachine, machine=new_machine, provider=provider, identifier=provider_alias) return (new_machine, provider_machine) def get_or_create_instance_model(name, provider, provider_alias, image_alias, ip_address, created_by): name = _get_valid_name(name, provider_alias) provider_machine = _get_or_create_provider_machine( provider, image_alias, created_by ) return get_or_create(Instance, name=name, provider_alias=provider_alias, provider_machine=provider_machine, ip_address=ip_address, created_by=created_by) def _get_valid_name(name, alias): """ Make sure there is a good default name if no name exists. """ if name is None or len(name) == 0: name = alias return name def _get_or_create_provider_machine(provider, image_alias, created_by): """ Get or create a ProviderMachine. If ProviderMachine does not already exist create a new Machine and related ProviderMachine. """ provider_machine = None filtered_machines = ProviderMachine.objects.filter(identifier=image_alias) if filtered_machines: provider_machine = filtered_machines[0] else: (created, provider_machine) = create_machine_model( None, provider, image_alias, created_by, "Created to support instanceModel") return provider_machine
Python
0.000003
@@ -1,24 +1,92 @@ +# -*- coding: utf-8 -*-%0A# flake8: noqa%0A%22%22%22%0ACollection of models%0A%22%22%22%0A from core.models.allocat @@ -1888,16 +1888,16 @@ ereUser%0A + from cor @@ -1930,2165 +1930,4 @@ ume%0A -%0A%0Adef get_or_create(Model, *args, **kwargs):%0A return Model.objects.get_or_create(*args, **kwargs)%5B0%5D%0A%0A%0Adef create_machine_model(name, provider, provider_alias,%0A created_by, description):%0A name = _get_valid_name(name, provider_alias)%0A new_machine = get_or_create(Application,%0A name=name,%0A description=description,%0A created_by=created_by)%0A provider_machine = get_or_create(ProviderMachine,%0A machine=new_machine,%0A provider=provider,%0A identifier=provider_alias)%0A return (new_machine, provider_machine)%0A%0A%0Adef get_or_create_instance_model(name, provider, provider_alias,%0A image_alias, ip_address, created_by):%0A name = _get_valid_name(name, provider_alias)%0A provider_machine = _get_or_create_provider_machine(%0A provider,%0A image_alias,%0A created_by%0A )%0A return get_or_create(Instance,%0A name=name,%0A provider_alias=provider_alias,%0A provider_machine=provider_machine,%0A ip_address=ip_address,%0A created_by=created_by)%0A%0A%0Adef _get_valid_name(name, alias):%0A %22%22%22%0A Make sure there is a good default name if no name exists.%0A %22%22%22%0A if name is None or len(name) == 0:%0A name = alias%0A return name%0A%0A%0Adef _get_or_create_provider_machine(provider, image_alias, created_by):%0A %22%22%22%0A Get or create a ProviderMachine.%0A If ProviderMachine does not already exist%0A create a new Machine and related ProviderMachine.%0A %22%22%22%0A provider_machine = None%0A filtered_machines = ProviderMachine.objects.filter(identifier=image_alias)%0A if filtered_machines:%0A provider_machine = filtered_machines%5B0%5D%0A else:%0A (created, provider_machine) = create_machine_model(%0A None,%0A provider,%0A image_alias,%0A created_by,%0A %22Created to support instanceModel%22)%0A return provider_machine%0A
6e17556192274ea64635be61af0ea363e0ba898e
Allow running as non-root, with a warning.
snapshot.py
snapshot.py
#!/usr/bin/env python from subprocess import Popen, PIPE from sh import whoami import sys import time from parsers.tail import read_tailed_files from db import Database from util import LOGGER def parse_args(): from argparse import ArgumentParser parser = ArgumentParser(description='Snapshot statistics from a machine') parser.add_argument('--host', default='', help='connect to a remote host (recommended)') parser.add_argument('--password', help='the password for the remote user given with --user') # Multiple pids could be set using bash expansion: {1234,2345} parser.add_argument('-p', '--pid', default='*', help='the pid(s) to look up (default: *)') parser.add_argument('-u', '--user', default='root', help='user to log into remote host with (default: root)') parser.add_argument('-v', '--verbose', action='store_true', help='enable more verbose logging') parser.add_argument('--overwrite', action='store_true', help='always overwrite the db, even if it exists') parser.add_argument('-d', '--db', required=True, help='path to store the data to (sqlite format)') parser.add_argument('-c', '--count', default=1, type=int, help='the number of snapshots to collect') parser.add_argument('--period', type=int, default=0, help='number of seconds between subsequent snapshots') args = parser.parse_args() if args.count > 1 and 0 == args.period: print ('Error: You must set the period if count > 1\n') parser.print_help() sys.exit(1) return args def read_stats(args): # This is the command to grap all of the necessary info. # Note that -v is passed to tail - this is so we always the filename # given to us, which is needed for parsing. # As processes can be transient, we can get errors here about # non-existant files, so ignore them, this is expectedself. cmd = 'nice tail -v -n +1 '\ '/proc/%s/{cmdline,smaps} '\ '/proc/meminfo '\ '/proc/loadavg '\ '/proc/uptime '\ '/proc/vmstat '\ '2>/dev/null; ' \ 'nice find /proc/%s -type f -name stat '\ '-exec tail -v -n +1 {} \; 2>/dev/null | '\ 'awk \''\ '/==>/ {print} '\ '/^[0-9]/ {print \$2, \$10, \$12, \$14, \$15, \$22}\';' # Accept a space-separated list of pids as that is what pidof(8) returns and # it's quite likely you'll want to invoke this script with something like: # # --pid "`pidof foobar`" # # at some point. if args.pid.isdigit() or args.pid == '*': pids = args.pid else: pids = '{%s}' % args.pid.replace(' ', ',') if args.host == '': LOGGER.info('Loading local procfs files') if whoami().strip() != "root": LOGGER.error("Requires root privileges to run locally") sys.exit(1) cmd = "bash -c \"%s\"" % (cmd % (pids, pids)) elif args.host != '': ssh = ( "ssh %s@%s" " -o UserKnownHostsFile=/dev/null" " -o StrictHostKeyChecking=no" " -o LogLevel=error" % (args.user, args.host) ) if args.password: ssh = "sshpass -p %s %s" % (args.password, ssh) else: ssh = "%s -o PasswordAuthentication=no" % ssh cmd = """%s "%s" """ % (ssh, cmd % (pids, pids)) LOGGER.info('Reading procfs with cmd: %s' % cmd) p = Popen(cmd, shell=True, bufsize=-1, stdout=PIPE, stderr=PIPE) stats = read_tailed_files(p.stdout) if p.poll() != 0: LOGGER.error("Command failed with: %r" % p.stderr.read().strip()) sys.exit(1) return stats def main(args): import logging if args.verbose: LOGGER.setLevel(logging.DEBUG) else: LOGGER.setLevel(logging.INFO) # Get the database handle db = Database(args.db, args.overwrite) for i in range(args.count): if i > 0: time.sleep(args.period) # Read all the data we need system_stats, processes, memory_stats = read_stats(args) LOGGER.info('Found {} process(es) and {} used memory fragments'.format( len(processes), len(memory_stats))) LOGGER.info('Regions: %s' % memory_stats) db.add(args.host if len(args.host) else '[local]', system_stats, memory_stats, processes) if __name__ == '__main__': main(parse_args())
Python
0
@@ -2909,88 +2909,206 @@ -if args.host == '':%0A LOGGER.info('Loading local procfs files')%0A if +# root can see all of /proc, another user is likely not going to be able%0A # to read all of it. This isn't a hard error, but won't give a full view%0A # of the system.%0A if (args.host == '' and who @@ -3130,25 +3130,29 @@ = %22root%22 -: +) or%5C %0A LOG @@ -3147,91 +3147,199 @@ - LOGGER.error(%22Requires root privileges to run locally +(args.host != '' and args.user != 'root'):%0A LOGGER.warning(%22If not running as root you may not see all info. %22)%0A +%0A - sys.exit(1 +if args.host == '':%0A LOGGER.info('Loading local procfs files' )%0A
3f5fc855369f61e39674c052a872fec4828bebf0
[Mails] typo
bin/Mail.py
bin/Mail.py
#!/usr/bin/env python3 # -*-coding:UTF-8 -* """ The Mail Module ====================== This module is consuming the Redis-list created by the Categ module. It apply mail regexes on paste content and warn if above a threshold. """ import os import re import sys import redis import time import datetime import dns.exception from packages import Paste from packages import lib_refine from pubsublogger import publisher from pyfaup.faup import Faup from Helper import Process sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages')) import Item import signal class TimeoutException(Exception): pass def timeout_handler(signum, frame): raise TimeoutException signal.signal(signal.SIGALRM, timeout_handler) max_execution_time = 30 if __name__ == "__main__": publisher.port = 6380 publisher.channel = "Script" config_section = 'Mail' faup = Faup() p = Process(config_section) addr_dns = p.config.get("Mail", "dns") # REDIS # r_serv_cache = redis.StrictRedis( host=p.config.get("Redis_Cache", "host"), port=p.config.getint("Redis_Cache", "port"), db=p.config.getint("Redis_Cache", "db"), decode_responses=True) # ARDB # server_statistics = redis.StrictRedis( host=p.config.get("ARDB_Statistics", "host"), port=p.config.getint("ARDB_Statistics", "port"), db=p.config.getint("ARDB_Statistics", "db"), decode_responses=True) # FUNCTIONS # publisher.info("Suscribed to channel mails_categ") # FIXME For retro compatibility channel = 'mails_categ' prec_item_id = None # Log as critical if there are more that that amout of valid emails is_critical = 10 max_execution_time = 60 email_regex = "[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}" MX_values = None while True: message = p.get_from_set() if message is not None: item_id, score = message.split() if prec_item_id is None or item_id != prec_item_id: PST = Paste.Paste(item_id) # max execution time on regex signal.alarm(max_execution_time) try: l_mails = re.findall(email_regex, Item.get_item_content()) except TimeoutException: p.incr_module_timeout_statistic() # add encoder type err_mess = "Mail: processing timeout: {}".format(item_id) print(err_mess) publisher.info(err_mess) continue else: signal.alarm(0) l_mails = list(set(l_mails)) # max execution time on regex signal.alarm(max_execution_time) try: # Transforming the set into a string MXdomains = re.findall("@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", str(l_mails).lower()) except TimeoutException: p.incr_module_timeout_statistic() # add encoder type err_mess = "Mail: processing timeout: {}".format(item_id) print(err_mess) publisher.info(err_mess) continue else: signal.alarm(0) MX_values = lib_refine.checking_MX_record(r_serv_cache, MXdomains, addr_dns) if MX_values[0] >= 1: PST.__setattr__(channel, MX_values) PST.save_attribute_redis(channel, (MX_values[0], list(MX_values[1]))) to_print = 'Mails;{};{};{};Checked {} e-mail(s);{}'.\ format(PST.p_source, PST.p_date, PST.p_name, MX_values[0], PST.p_rel_path) if MX_values[0] > is_critical: publisher.warning(to_print) #Send to duplicate p.populate_set_out(item_id, 'Duplicate') msg = 'infoleak:automatic-detection="mail";{}'.format(item_id) p.populate_set_out(msg, 'Tags') #create country statistics date = datetime.datetime.now().strftime("%Y%m") for mail in MX_values[1]: print('mail;{};{};{}'.format(MX_values[1][mail], mail, PST.p_date)) p.populate_set_out('mail;{};{};{}'.format(MX_values[1][mail], mail, PST.p_date), 'ModuleStats') faup.decode(mail) tld = faup.get()['tld'] try: tld = tld.decode() except: pass server_statistics.hincrby('mail_by_tld:'+date, tld, MX_values[1][mail]) else: publisher.info(to_print) #create country statistics for mail in MX_values[1]: print('mail;{};{};{}'.format(MX_values[1][mail], mail, PST.p_date)) p.populate_set_out('mail;{};{};{}'.format(MX_values[1][mail], mail, PST.p_date), 'ModuleStats') prec_item_id = item_id else: publisher.debug("Script Mails is Idling 10s") print('Sleeping') time.sleep(10)
Python
0.999884
@@ -2250,16 +2250,23 @@ content( +item_id ))%0A
1c3e8def9f46ee0f21d1172287af0b4fadf67884
Add some more backwards compatibility: also add intent to outcome
src/wit_ros/wit_node.py
src/wit_ros/wit_node.py
#!/usr/bin/env python """ROS node for the Wit.ai API""" global APIKEY APIKEY = None import rospy import requests import json from wit import Wit from wit_ros.srv import Interpret, InterpretResponse, ListenAndInterpret, ListenAndInterpretResponse from wit_ros.msg import Outcome, Entity class WitRos(object): def __init__(self, api_key): self.wit = Wit(api_key) self.pub = rospy.Publisher('stt', Outcome, queue_size=1) def start(self): rospy.Service('wit/interpret', Interpret, self.interpret) # rospy.Service('wit/listen_interpret', ListenAndInterpret, self.listen_and_interpret) def parse_response(self, response, klass): rospy.logdebug("Data: '{0}'".format(json.dumps(response, indent=4, separators=(',', ': ')))) ros_entities = [] if "WARNING" in response: rospy.logwarn("Response contains a warning: {warn}".format(warn=response["WARNING"])) entities = [] if "entities" in response: entities = response["entities"] elif "outcomes" in response: entities = response["outcomes"][0]["entities"] for entity_name, entity_properties in entities.iteritems(): entity_properties = entity_properties[0] rospy.logdebug("Entity '{name}' has properties{prop}".format(name=entity_name, prop=entity_properties)) entity = Entity(name=str(entity_name)) if 'type' in entity_properties: entity.type = str(entity_properties["type"]) if 'value' in entity_properties: entity.value = str(entity_properties["value"]) if 'unit' in entity_properties: entity.unit = str(entity_properties["unit"]) if 'suggested' in entity_properties: entity.suggested = str(entity_properties["suggested"]) if 'confidence' in entity_properties: entity.confidence = float(entity_properties["confidence"]) rospy.logdebug("Adding {ent}".format(ent=entity)) ros_entities += [entity] outcome = Outcome(entities = ros_entities, intent = str(response.get("intent", "")), text = str(response["_text"])) response = klass( msg_body = str(response), msg_id = str(response["msg_id"]), outcome = outcome) self.pub.publish(outcome) return response def interpret(self, rosrequest): sentence = rosrequest.sentence rospy.logdebug("Interpreting '{0}'".format(sentence)) wit_response = self.wit.message(sentence) rospy.logdebug("WitResponse: {0}".format(wit_response)) #response = json.loads(wit_response) #rospy.logdebug("Response: {0}".format(response)) return self.parse_response(wit_response, InterpretResponse) # TODO: wit.voice_query_auto used to take care of oudio recording, now it needs an audio file or encoded audio byte # def listen_and_interpret(self, rosrequest): # rospy.logdebug("About to record audio") # response = json.loads(self.wit.voice_query_auto(APIKEY)) # rospy.logdebug("Response: {0}".format(response)) # if not response: # return None # # return self.parse_response(response, ListenAndInterpretResponse) if __name__ == "__main__": rospy.init_node("wit_ros", log_level=rospy.INFO) if rospy.has_param('~api_key'): APIKEY = rospy.get_param("~api_key") wr = WitRos(APIKEY) wr.start() rospy.spin() else: rospy.logerr("No API key set (via parameter server). Please set one. " + "API keys can be obtained via the http://www.wit.ai")
Python
0
@@ -923,24 +923,47 @@ ARNING%22%5D))%0A%0A + outcome = None%0A enti @@ -972,16 +972,17 @@ es = %5B%5D%0A +%0A @@ -1097,32 +1097,31 @@ -entities +outcome = response%5B @@ -1134,16 +1134,47 @@ mes%22%5D%5B0%5D +%0A entities = outcome %5B%22entiti @@ -2227,35 +2227,47 @@ str( -response.get(%22intent%22, %22%22)) +outcome%5B%22intent%22%5D) if outcome else None ,%0A
1b2f0be67a8372a652b786c8b183cd5edf1807cd
Swap back to Fuzzer, no monkey patching
config/fuzz_pox_mesh.py
config/fuzz_pox_mesh.py
from experiment_config_lib import ControllerConfig from sts.topology import MeshTopology from sts.control_flow import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use POX as our controller command_line = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer ''' '''samples.topo forwarding.l2_multi ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=../sts_socket_pipe''') controllers = [ControllerConfig(command_line, address="sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=4" dataplane_trace = "dataplane_traces/ping_pong_same_subnet_4_switches.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace, multiplex_sockets=True) control_flow = Fuzzer(simulation_config, check_interval=1, halt_on_violation=True, input_logger=InputLogger(), invariant_check=InvariantChecker.check_liveness)
Python
0
@@ -117,16 +117,29 @@ t Fuzzer +, Interactive %0Afrom st @@ -357,17 +357,28 @@ ose ---no-cli +openflow.debug ''' # sts. @@ -425,21 +425,8 @@ ''' -samples.topo forw @@ -461,16 +461,17 @@ +# '''sts.u @@ -555,26 +555,35 @@ ess= -../sts_socket_pipe +__address__ --port=__port__ ''') @@ -633,71 +633,18 @@ ne, -address=%22sts_socket_pipe%22, cwd=%22pox%22, sync=%22tcp:localhost:18899 +cwd=%22betta %22)%5D%0A @@ -706,17 +706,17 @@ witches= -4 +2 %22%0Adatapl @@ -770,19 +770,8 @@ bnet -_4_switches .tra @@ -1107,19 +1107,20 @@ sockets= -Tru +Fals e)%0A%0Acont @@ -1175,10 +1175,33 @@ val= -1, +80,%0A hal @@ -1219,11 +1219,12 @@ ion= -Tru +Fals e,%0A @@ -1337,14 +1337,93 @@ eck_ -liveness +connectivity)%0A#control_flow = Interactive(simulation_config, input_logger=InputLogger() )%0A
ffd14af829bd3f7bf52cb0af5306550b51ab8712
Remove mox from tests/unit/compute/test_compute_xen.py
nova/tests/unit/compute/test_compute_xen.py
nova/tests/unit/compute/test_compute_xen.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for expectations of behaviour from the Xen driver.""" from oslo_utils import importutils from nova.compute import power_state import nova.conf from nova import context from nova import objects from nova.objects import instance as instance_obj from nova.tests.unit.compute import eventlet_utils from nova.tests.unit import fake_instance from nova.tests.unit.virt.xenapi import stubs from nova.virt.xenapi import vm_utils CONF = nova.conf.CONF CONF.import_opt('compute_manager', 'nova.service') class ComputeXenTestCase(stubs.XenAPITestBaseNoDB): def setUp(self): super(ComputeXenTestCase, self).setUp() self.flags(compute_driver='xenapi.XenAPIDriver') self.flags(connection_url='test_url', connection_password='test_pass', group='xenserver') stubs.stubout_session(self.stubs, stubs.FakeSessionForVMTests) self.compute = importutils.import_object(CONF.compute_manager) # execute power syncing synchronously for testing: self.compute._sync_power_pool = eventlet_utils.SyncPool() def test_sync_power_states_instance_not_found(self): db_instance = fake_instance.fake_db_instance() ctxt = context.get_admin_context() instance_list = instance_obj._make_instance_list(ctxt, objects.InstanceList(), [db_instance], None) instance = instance_list[0] self.mox.StubOutWithMock(objects.InstanceList, 'get_by_host') self.mox.StubOutWithMock(self.compute.driver, 'get_num_instances') self.mox.StubOutWithMock(vm_utils, 'lookup') self.mox.StubOutWithMock(self.compute, '_sync_instance_power_state') objects.InstanceList.get_by_host(ctxt, self.compute.host, expected_attrs=[], use_slave=True).AndReturn(instance_list) self.compute.driver.get_num_instances().AndReturn(1) vm_utils.lookup(self.compute.driver._session, instance['name'], False).AndReturn(None) self.compute._sync_instance_power_state(ctxt, instance, power_state.NOSTATE) self.mox.ReplayAll() self.compute._sync_power_states(ctxt)
Python
0.000002
@@ -630,16 +630,28 @@ er.%22%22%22%0A%0A +import mock%0A from osl @@ -1996,32 +1996,73 @@ -self.mox.StubOutWithMock +@mock.patch.object(vm_utils, 'lookup')%0A @mock.patch.object (obj @@ -2107,32 +2107,26 @@ -self.mox.StubOutWithMock +@mock.patch.object (sel @@ -2176,85 +2176,26 @@ -self.mox.StubOutWithMock(vm_utils, 'lookup')%0A self.mox.StubOutWithMock +@mock.patch.object (sel @@ -2235,25 +2235,24 @@ state')%0A -%0A objects. @@ -2247,143 +2247,237 @@ -objects.InstanceList.get_by_host(ctxt,%0A self.compute.host, expected_attrs=%5B%5D,%0A use_slave=True).AndReturn( +def do_test(mock_compute_sync_powerstate,%0A mock_compute_get_num_instances,%0A mock_instance_list_get_by_host,%0A mock_vm_utils_lookup):%0A mock_instance_list_get_by_host.return_value = inst @@ -2485,17 +2485,16 @@ nce_list -) %0A @@ -2498,77 +2498,85 @@ -self.compute.driver.get_num_instances().AndReturn(1)%0A + mock_compute_get_num_instances.return_value = 1%0A mock_ vm_utils .loo @@ -2575,16 +2575,49 @@ tils -. +_ lookup -( +.return_value = None%0A%0A self @@ -2629,145 +2629,311 @@ ute. -driv +_sync_pow er -. _s -ession, instance%5B'name'%5D,%0A False).AndReturn(None)%0A self.compute._sync_instance +tates(ctxt)%0A%0A mock_instance_list_get_by_host.assert_called_once_with(%0A ctxt, self.compute.host, expected_attrs=%5B%5D, use_slave=True)%0A mock_compute_get_num_instances.assert_called_once_with()%0A mock_compute_sync _power -_ state -(ctxt, instance, +.assert_called_once_with( %0A @@ -2944,16 +2944,31 @@ +ctxt, instance, power_s @@ -2983,40 +2983,83 @@ TATE -)%0A%0A self.mox.ReplayAll()%0A +, use_slave=True)%0A mock_vm_utils_lookup.assert_called_once_with( %0A @@ -3055,32 +3055,39 @@ e_with(%0A + self.compute._sy @@ -3083,33 +3083,84 @@ compute. -_sync_pow +driv er +. _s -tates(ctxt +ession, instance%5B'name'%5D,%0A False)%0A%0A do_test( )%0A
6eeadf2246c5aa09bbec6fd5b6bb0d9fde25d348
Remove dots from rendered maze
bin/maze.py
bin/maze.py
# Use case: A randomly generated maze won when the user reaches the end # Example: from vengeance.game import Direction from vengeance.game import Game from vengeance.game import Location import random width = 8 height = 8 north = Direction('north') south = Direction('south') north.opposite = south east = Direction('east') west = Direction('west') east.opposite = west def set_exits(x, y, location_grid, visited_locations): location = location_grid[x][y] allowed_location_coords = [] if x in range(0, width-1) and not_visited(location_grid[x+1][y]): allowed_location_coords.append([x+1, y]) if x in range(1, width) and not_visited(location_grid[x-1][y]): allowed_location_coords.append([x-1, y]) if y in range(0, height-1) and not_visited(location_grid[x][y+1]): allowed_location_coords.append([x, y+1]) if y in range(1, height) and not_visited(location_grid[x][y-1]): allowed_location_coords.append([x, y-1]) count = len(allowed_location_coords) if count == 0: if len(visited_locations) != 0: previous_location = visited_locations.pop() for i in range(width): for j in range(height): current_location = location_grid[i][j] if previous_location.name == current_location.name: set_exits(i, j, location_grid, visited_locations) return visited_locations.append(location) location_coords = allowed_location_coords[random.randrange(count)] new_x = location_coords[0] new_y = location_coords[1] new_location = location_grid[new_x][new_y] direction = None if new_x < x: direction = west elif new_x > x: direction = east elif new_y < y: direction = south else: direction = north location.add_exit(direction, new_location) set_exits(new_x, new_y, location_grid, visited_locations) def not_visited(location): return not location.exits def render_maze(location_grid): result = '.' + width * '_.' result += '\n' for y in range(height-1, -1, -1): result += '|' for x in range(width): location = location_grid[x][y] if y == 0 or has_south_wall(location): result += '_' else: result += ' ' if x == width-1 or has_east_wall(location): result += '|' else: result += '.' result += '\n' return result def has_south_wall(location): for exit in location.exits: if exit.direction.name == south.name: return False return True def has_east_wall(location): for exit in location.exits: if exit.direction.name == east.name: return False return True def random_coords(): return random.randrange(width), random.randrange(height) # Create maze (a grid of locations) location_grid = [] for x in range(width): locations_at_x = [] location_grid.append(locations_at_x) for y in range(height): locations_at_x.append(Location('' + str(x) + ', ' + str(y))) # Pick a random starting location starting_x, starting_y = random_coords() visited_locations = [] set_exits(starting_x, starting_y, location_grid, visited_locations) print(render_maze(location_grid)) locations = [] for x in range(width): for y in range(height): locations.append(location_grid[x][y]) game = Game(locations) game.run()
Python
0.000002
@@ -2049,17 +2049,17 @@ sult = ' -. + ' + widt @@ -2064,17 +2064,17 @@ dth * '_ -. + '%0A re @@ -2483,17 +2483,17 @@ ult += ' -. + '%0A%0A
d0ca9aa6cf39c4743e398f65e4c7f5bbc3c03d78
Clarify API sample
api_sample.py
api_sample.py
# Add ./lib to the path for importing nassl import os import sys sys.path.insert(1, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib')) from sslyze.plugins_finder import PluginsFinder from sslyze.plugins_process_pool import PluginsProcessPool from sslyze.server_connectivity import ServerConnectivityInfo, ServerConnectivityError from sslyze.ssl_settings import TlsWrappedProtocolEnum # Setup the servers to scan and ensure they are reachable hostname = 'smtp.gmail.com' try: server_info = ServerConnectivityInfo(hostname=hostname, port=587, tls_wrapped_protocol=TlsWrappedProtocolEnum.STARTTLS_SMTP) server_info.test_connectivity_to_server() except ServerConnectivityError as e: # Could not establish an SSL connection to the server raise RuntimeError('Error when connecting to {}: {}'.format(hostname, e.error_msg)) # Get the list of available plugins sslyze_plugins = PluginsFinder() # Create a process pool to run scanning commands concurrently plugins_process_pool = PluginsProcessPool(sslyze_plugins) # Queue some scan commands print '\nQueuing some commands...' plugins_process_pool.queue_plugin_task(server_info, 'sslv3') plugins_process_pool.queue_plugin_task(server_info, 'reneg') plugins_process_pool.queue_plugin_task(server_info, 'certinfo_basic') # Process the results reneg_result = None print '\nProcessing results...' for server_info, plugin_command, plugin_result in plugins_process_pool.get_results(): # Each plugin result has attributes with the information you're looking for, specific to each plugin # All these attributes are documented within each plugin's module if plugin_result.plugin_command == 'sslv3': # Do something with the result print 'SSLV3 cipher suites' for cipher in plugin_result.accepted_cipher_list: print ' {}'.format(cipher.name) elif plugin_result.plugin_command == 'reneg': reneg_result = plugin_result print 'Client renegotiation: {}'.format(plugin_result.accepts_client_renegotiation) print 'Secure renegotiation: {}'.format(plugin_result.supports_secure_renegotiation) elif plugin_result.plugin_command == 'certinfo_basic': print 'Server Certificate CN: {}'.format(plugin_result.certificate_chain[0].as_dict['subject']['commonName']) # All plugin results also always expose two APIs: # What the SSLyze CLI would output to the console print '\nSSLyze text output' for line in reneg_result.as_text(): print line print '\nSSLyze XML node' # The XML node for the SSLyze CLI XML output print reneg_result.as_xml() # You should use the process pool to make scans quick, but you can also call plugins directly from sslyze.plugins.openssl_cipher_suites_plugin import OpenSslCipherSuitesPlugin print '\nCalling a plugin directly...' plugin = OpenSslCipherSuitesPlugin() plugin_result = plugin.process_task(server_info, 'tlsv1') for cipher in plugin_result.accepted_cipher_list: print ' {}'.format(cipher.name)
Python
0.000003
@@ -1103,16 +1103,91 @@ commands +; the commands are same as what is described in the SSLyze CLI --help text. %0Aprint '
22feb7ac3e8a5d4ef7c0e371752cae24974445a8
fix dj21: removed CommandParser argument cmd
django_smoke_tests/management/commands/smoke_tests.py
django_smoke_tests/management/commands/smoke_tests.py
import argparse import os from django.core.management import BaseCommand, CommandParser from django.core.management.base import CommandError from ...generator import SmokeTestsGenerator class Command(BaseCommand): help = "Smoke tests for Django endpoints." def create_parser(self, prog_name, subcommand): """ Override in order to skip default parameters like verbosity, version, etc. """ parser = CommandParser( self, prog="%s %s" % (os.path.basename(prog_name), subcommand), description=self.help or None, ) # create hidden options (required by BaseCommand) parser.add_argument('--no-color', help=argparse.SUPPRESS) parser.add_argument('--pythonpath', help=argparse.SUPPRESS) parser.add_argument('--traceback', help=argparse.SUPPRESS) self.add_arguments(parser) return parser def add_arguments(self, parser): methods_group = parser.add_mutually_exclusive_group() methods_group.add_argument( '--http-methods', default=None, type=str, help='comma separated HTTP methods that will be executed for all endpoints, ' 'eg. GET,POST,DELETE [default: GET,POST,PUT,DELETE]' ) methods_group.add_argument( '-g', '--get-only', action='store_true', default=False, dest='get_only', help='shortcut for --http-methods GET' ) parser.add_argument( '--allow-status-codes', default=None, type=str, help='comma separated HTTP status codes that will be considered as success responses, ' 'eg. 200,201,204 [default: 200,201,301,302,304,405]' ) parser.add_argument( '--disallow-status-codes', default=None, type=str, help='comma separated HTTP status codes that will be considered as fail responses, ' 'eg. 404,500' ) parser.add_argument( '--settings', help=( 'path to the Django settings module, eg. myproject.settings' ), ) parser.add_argument( '--configuration', help=( 'name of the configuration class to load, e.g. Development' ), ) parser.add_argument( '--fixture', help=( 'Django fixture JSON file to be loaded before executing smoke tests' ), ) parser.add_argument( '--no-migrations', dest='no_migrations', action='store_true', help='flag for skipping migrations, database will be created directly from models' ) parser.set_defaults(no_migrations=False) parser.add_argument( '--no-db', dest='no_db', action='store_true', help='flag for skipping database creation' ) parser.set_defaults(no_db=False) parser.add_argument( 'app_names', default=None, nargs='?', help='names of apps to test', ) def handle(self, *args, **options): if options.get('get_only'): methods_to_test = ['GET'] else: methods_to_test = self._get_list_from_string(options.get('http_methods')) allowed_status_codes = self._get_list_from_string(options.get('allow_status_codes')) disallowed_status_codes = self._get_list_from_string(options.get('disallow_status_codes')) disable_migrations = options.get('no_migrations') use_db = not options.get('no_db') app_names = self._get_list_from_string(options.get('app_names')) settings_module = options.get('settings') configuration = options.get('configuration') fixture_path = options.get('fixture') if allowed_status_codes and disallowed_status_codes: raise CommandError( 'You can either specify --allow-status-codes or --disallow-status-codes. ' 'You must not specify both.' ) generator = SmokeTestsGenerator( http_methods=methods_to_test, allowed_status_codes=allowed_status_codes, disallowed_status_codes=disallowed_status_codes, use_db=use_db, app_names=app_names, disable_migrations=disable_migrations, settings_module=settings_module, configuration=configuration, fixture_path=fixture_path, ) generator.execute() if generator.warnings: self.stdout.write( 'Some tests were skipped. Please report on ' 'https://github.com/kamilkijak/django-smoke-tests/issues.' ) self.stdout.write('\n'.join(generator.warnings)) @staticmethod def _get_list_from_string(options): """ Transforms comma separated string into a list of those elements. Transforms strings to ints if they are numbers. Eg.: "200,'400','xxx'" => [200, 400, 'xxx'] """ if options: return [ int(option) if option.isdigit() else option.strip('/') for option in options.split(',') ] return None
Python
0.000174
@@ -430,16 +430,53 @@ -parser = +def _create_parser(*args):%0A return Com @@ -499,21 +499,42 @@ -self, + *args,%0A prog=%22%25 @@ -596,16 +596,20 @@ + descript @@ -631,32 +631,198 @@ r None,%0A + )%0A try: # django 2.1+%0A parser = _create_parser()%0A except TypeError: # django 2.0-%0A parser = _create_parser(self )%0A # crea
aace4c7449f964109e422e38a669f72ec62c7767
Make sure filter arguments evaluates to True before setting
pysnow/request.py
pysnow/request.py
# -*- coding: utf-8 -*- import json import requests from .response import Response from .query import Query from .report import Report from .exceptions import InvalidUsage class PreparedRequest(object): """Creates a new :class:`PreparedRequest` object. :param request_params: Request parameters to pass along with the request :param session: :class:`request.Session` object :param generator_size: Generator size / internal page size :param enable_reporting: Generate a resource-response report for this request :param generator_size: Sets the size of each yield, a higher value might increases performance some but will cause pysnow to consume more memory when serving big results. :param raise_on_empty: Whether or not to raise an exception on 404 (no matching records) :param base_url: Base URL to use for requests :param base_path: Base path to use for requests (e.g. /api/now) :param api_path: API path to use for requests (e.g. /table/incident) """ def __init__(self, request_params=None, session=None, generator_size=None, enable_reporting=False, raise_on_empty=True, resource=None, base_url=None, base_path=None, api_path=None): self._request_params = request_params self._session = session self._generator_size = generator_size self._enable_reporting = enable_reporting self._raise_on_empty = raise_on_empty self._resource = resource self._base_url = base_url self._base_path = base_path self._api_path = api_path self._url = self._get_url() if self._enable_reporting: self._report = Report(resource, generator_size, session) else: self._report = None def _get_url(self, sys_id=None): """Builds a full URL using base_url, base_path and api_path :param sys_id: (optional) Appends the provided sys_id to the URL :return: URL string """ url_str = self._base_url + self._base_path + self._api_path if sys_id: return "%s/%s" % (url_str, sys_id) return url_str def _send(self, method, url=None, **kwargs): """Prepares and sends a new :class:`requests.Request` object, uses prepare() as it makes wrapping simpler. Also, sets request params in report, if reporting is enabled. :param method: Request method :param url: (optional) URL override (instead of :prop:`_url`) :param kwargs: kwargs to pass along to Request :return: :class:`requests.Response` object """ url = url or self._url params = kwargs.pop('params', self._request_params) request = requests.Request(method, url, auth=self._session.auth, params=params, **kwargs) if self._enable_reporting: self._report.request_params = params prepared = request.prepare() response = self._session.send(prepared) return response def _get_response(self, method, **kwargs): """Response wrapper - creates a :class:`requests.Response` object and passes along to :class:`pysnow.Response` for validation and parsing. :param args: args to pass along to _send() :param kwargs: kwargs to pass along to _send() :return: :class:`pysnow.Response` object """ return Response(self._send(method, **kwargs), request_callback=self._send, raise_on_empty=self._raise_on_empty, report=self._report) def _get_request_params(self, query=None, fields=list(), limit=None, order_by=list(), offset=None): """Constructs request params dictionary to pass along with a :class:`requests.PreparedRequest` object :param query: Dictionary, string or :class:`QueryBuilder` object :param limit: Limits the number of records returned :param fields: List of fields to include in the response :param order_by: List of columns used in sorting. Example: ['category', '-created_on'] would sort the category field in ascending order, with a secondary sort by created_on in descending order. :param offset: Number of records to skip before returning records :return: :class:`pysnow.Query` dictionary-like object """ query_params = Query(query, self._request_params) # Generator responses creates its "iterable chunks" using `sysparm_limit` and relies on the # use of link headers, which set_limit() disables, effectively disabling the use of generators. if not limit: query_params.set_generator_size(self._generator_size) else: query_params.set_limit(limit) query_params.set_fields(fields) query_params.set_offset(offset) query_params.set_sorting(order_by) return query_params.as_dict() def get(self, **kwargs): """Fetches one or more records, exposes a public API of :class:`pysnow.Response` :param kwargs: kwargs to pass along to :class:`requests.Request` :return: :class:`pysnow.Response` object """ request_params = self._get_request_params(**kwargs) return self._get_response('GET', params=request_params) def custom(self, method, path_append=None, headers=None, **kwargs): """Creates a custom request :param method: HTTP method :param path_append: (optional) append path to resource.api_path :param headers: (optional) Dictionary of headers to add or override :param kwargs: kwargs to pass along to :class:`requests.Request` :return: :class:`pysnow.Response` object """ if headers: self._session.headers.update(headers) if path_append is not None: if isinstance(path_append, str) and path_append.startswith('/'): self._url = "%s/%s" % (self._url, path_append) else: raise InvalidUsage("Argument 'path_append' must be a string in the following format: " "/path-to-append[/.../...]") return self._get_response(method, **kwargs) def insert(self, payload): """Creates a new record :param payload: Dictionary payload :return: Dictionary containing the inserted record """ return self._get_response('POST', data=json.dumps(payload)).one() def update(self, query, payload): """Updates a record :param query: Dictionary, string or :class:`QueryBuilder` object :param payload: Dictionary payload :return: Dictionary containing the updated record """ if not isinstance(payload, dict): raise InvalidUsage("Update payload must be of type dict") record = self.get(query=query).one() url = self._get_url(sys_id=record['sys_id']) return self._get_response('PUT', url=url, data=json.dumps(payload)).one() def delete(self, query): """Deletes a record :param query: Dictionary, string or :class:`QueryBuilder` object :return: Dictionary containing the result """ record = self.get(query=query).one() url = self._get_url(sys_id=record['sys_id']) return self._get_response('DELETE', url=url).one()
Python
0.000608
@@ -4739,24 +4739,47 @@ mit(limit)%0A%0A + if fields:%0A quer @@ -4802,24 +4802,48 @@ lds(fields)%0A +%0A if offset:%0A quer @@ -4870,16 +4870,42 @@ offset)%0A +%0A if order_by:%0A
e7dac5b5cc02f1692a00c39a82eeb62b620aeac4
normalize all paths (remove .., .)
bin/misc.py
bin/misc.py
#! /usr/bin/env python2.5 # -*- coding: utf-8 -*- # vim: set expandtab : from __future__ import with_statement __author__ = 'Alexandru Mosoi <brtzsnr@gmail.com>' import fcntl import os import socket import struct import ConfigParser DATE_FORMAT = '%Y.%m.%d %H:%M:%S' VMCHECKER_INI = 'vmchecker_storer.ini' _config = None def config(): """Returns a RawConfigParse containing vmchecker's configuration.""" global _config if _config is None: _config = ConfigParser.RawConfigParser() with open(vmcheckerPaths.config_file) as handle: _config.readfp(handle) return _config def relative_path(*args): """Joins the arguments and returns a path relative to root""" return os.path.join(vmcheckerPaths.root, os.path.join(*args)) def repository(assignment): """Returns repository where sources for assignment are stored. NOTE: Full path where they are actually stored is `repository/assignment'""" return relative_path(config().get(assignment, 'Repository')) def get_ip_address(ifname): """Returns ip address for network interface 'ifname' in standard dotted notation. Source from: http://code.activestate.com/recipes/439094/""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) return socket.inet_ntoa(fcntl.ioctl( s.fileno(), 0x8915, # SIOCGIFADDR struct.pack('256s', ifname[:15]))[20:24]) #todo def db_file(): """ The name of the DataBase file @return - absolute path of config file - None if the path isn't a file""" path = vmcheckerPaths.db_file if os.path.isfile(path): return path else: return None class VmcheckerPaths(object): """ All paths related to vmchecker. """ def __init__(self): pass def abs_path(self, relative): return os.path.join(self.root, relative) @property def root(self): assert 'VMCHECKER_ROOT' in os.environ, ( 'VMCHECKER_ROOT environment variable not defined') return os.path.abspath(os.environ['VMCHECKER_ROOT']) @property def tester_paths(self): """ A list of all the paths relevant to the tester machine.""" return [self.dir_queue] @property def storer_paths(self): """ A list of all the paths relevant to the storer machine.""" return [self.dir_unchecked, self.dir_checked, self.dir_backup, self.dir_tests] @property def dir_unchecked(self): """ The absolute path of the unchecked homeworks are kept. This path is valid on the storer machine.""" return self.abs_path("unchecked") @property def dir_checked(self): """ The absolute path of the checked homeworks are kept. This path is valid on the storer machine.""" return self.abs_path("checked") @property def dir_tests(self): """ The absolute path of the test archives for the homeworks are kept. This path is valid on the storer machine.""" return self.abs_path("tests") @property def dir_queue(self): """ The absolute path of the task queue directory. This path is valid on the tester machine.""" return self.abs_path("queue") @property def dir_backup(self): """ The absolute path of the directory where backups of tasks are kept. This path is valid on the storer machine.""" return self.abs_path("back") @property def db_file(self): """ The absolute path of the database file """ return self.abs_path("vmchecker.db") @property def config_file(self): """Returns absolute path for config file 'VMCHECKER_INI'""" path = self.abs_path(VMCHECKER_INI) assert os.path.isfile(path), '%s (%s) is not a file' % ( VMCHECKER_INI, path) return path vmcheckerPaths = VmcheckerPaths()
Python
0.000223
@@ -1838,32 +1838,49 @@ %0A return +os.path.normpath( os.path.join(sel @@ -1896,16 +1896,17 @@ elative) +) %0A%0A @p
4ca8c46eaf35c726604c6da93c7503d5b24416a1
fix race condition in getting floating ip (flake8)
perfkitbenchmarker/openstack/os_virtual_machine.py
perfkitbenchmarker/openstack/os_virtual_machine.py
# Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import threading import time from perfkitbenchmarker import virtual_machine, linux_virtual_machine from perfkitbenchmarker import flags from perfkitbenchmarker import vm_util from perfkitbenchmarker.openstack import os_disk from perfkitbenchmarker.openstack import os_network from perfkitbenchmarker.openstack import utils as os_utils UBUNTU_IMAGE = 'ubuntu-14.04' NONE = 'None' FLAGS = flags.FLAGS flags.DEFINE_boolean('openstack_config_drive', False, 'Add possibilities to get metadata from external drive') flags.DEFINE_boolean('openstack_boot_from_volume', False, 'Boot from volume instead of an image') flags.DEFINE_integer('openstack_volume_size', None, 'Size of the volume (GB)') flags.DEFINE_enum('openstack_scheduler_policy', NONE, [NONE, 'affinity', 'anti-affinity'], 'Add possibility to use affinity or anti-affinity ' 'policy in scheduling process') class OpenStackVirtualMachine(virtual_machine.BaseVirtualMachine): """Object representing an OpenStack Virtual Machine""" CLOUD = 'OpenStack' DEFAULT_USERNAME = 'ubuntu' # Subclasses should override the default image. DEFAULT_IMAGE = None _floating_ip_lock = threading.Lock() def __init__(self, vm_spec, network, firewall): """Initialize an OpenStack virtual machine. Args: vm_spec: virtual_machine.BaseVirtualMachineSpec object of the vm. network: network.BaseNetwork object corresponding to the VM. firewall: network.BaseFirewall object corresponding to the VM. """ super(OpenStackVirtualMachine, self).__init__( vm_spec, network, firewall) self.name = 'perfkit_vm_%d_%s' % (self.instance_number, FLAGS.run_uri) self.key_name = 'perfkit_key_%d_%s' % (self.instance_number, FLAGS.run_uri) self.client = os_utils.NovaClient() self.public_network = os_network.OpenStackPublicNetwork( FLAGS.openstack_public_network ) self.id = None self.pk = None self.user_name = self.DEFAULT_USERNAME self.boot_wait_time = None self.image = self.image or self.DEFAULT_IMAGE def _Create(self): image = self.client.images.findall(name=self.image)[0] flavor = self.client.flavors.findall(name=self.machine_type)[0] network = self.client.networks.find( label=FLAGS.openstack_private_network) nics = [{'net-id': network.id}] image_id = image.id boot_from_vol = [] scheduler_hints = None if FLAGS.openstack_scheduler_policy != NONE: group_name = 'perfkit_%s' % FLAGS.run_uri try: group = self.client.server_groups.findall(name=group_name)[0] except IndexError: group = self.client.server_groups.create( policies=[FLAGS.openstack_scheduler_policy], name=group_name) scheduler_hints = {'group': group.id} if FLAGS.openstack_boot_from_volume: if FLAGS.openstack_volume_size: volume_size = FLAGS.openstack_volume_size else: volume_size = flavor.disk image_id = None boot_from_vol = [{'boot_index': 0, 'uuid': image.id, 'volume_size': volume_size, 'source_type': 'image', 'destination_type': 'volume', 'delete_on_termination': True}] vm = self.client.servers.create( name=self.name, image=image_id, flavor=flavor.id, key_name=self.key_name, security_groups=['perfkit_sc_group'], nics=nics, availability_zone=self.zone, block_device_mapping_v2=boot_from_vol, scheduler_hints=scheduler_hints, config_drive=FLAGS.openstack_config_drive) self.id = vm.id @vm_util.Retry(max_retries=4, poll_interval=2) def _PostCreate(self): status = 'BUILD' instance = None while status == 'BUILD': time.sleep(5) instance = self.client.servers.get(self.id) status = instance.status with self._floating_ip_lock: self.floating_ip = self.public_network.get_or_create() instance.add_floating_ip(self.floating_ip) logging.info('floating-ip associated: {}'.format(self.floating_ip.ip)) while not self.public_network.is_attached(self.floating_ip): time.sleep(1) self.ip_address = self.floating_ip.ip self.internal_ip = instance.networks[ FLAGS.openstack_private_network][0] @os_utils.retry_authorization(max_retries=4) def _Delete(self): try: self.client.servers.delete(self.id) time.sleep(5) except os_utils.NotFound: logging.info('Instance already deleted') self.public_network.release(self.floating_ip) @os_utils.retry_authorization(max_retries=4) def _Exists(self): try: if self.client.servers.findall(name=self.name): return True else: return False except os_utils.NotFound: return False @vm_util.Retry(log_errors=False, poll_interval=1) def WaitForBootCompletion(self): # Do one longer sleep, then check at shorter intervals. if self.boot_wait_time is None: self.boot_wait_time = 15 time.sleep(self.boot_wait_time) self.boot_wait_time = 5 resp, _ = self.RemoteCommand('hostname', retries=1) if self.bootable_time is None: self.bootable_time = time.time() if self.hostname is None: self.hostname = resp[:-1] def CreateScratchDisk(self, disk_spec): name = '%s-scratch-%s' % (self.name, len(self.scratch_disks)) scratch_disk = os_disk.OpenStackDisk(disk_spec, name, self.zone, self.project) self.scratch_disks.append(scratch_disk) scratch_disk.Create() scratch_disk.Attach(self) self.FormatDisk(scratch_disk.GetDevicePath()) self.MountDisk(scratch_disk.GetDevicePath(), disk_spec.mount_point) def _CreateDependencies(self): self.ImportKeyfile() def _DeleteDependencies(self): self.DeleteKeyfile() def ImportKeyfile(self): if not (self.client.keypairs.findall(name=self.key_name)): cat_cmd = ['cat', vm_util.GetPublicKeyPath()] key_file, _ = vm_util.IssueRetryableCommand(cat_cmd) pk = self.client.keypairs.create(self.key_name, public_key=key_file) else: pk = self.client.keypairs.findall(name=self.key_name)[0] self.pk = pk @os_utils.retry_authorization(max_retries=4) def DeleteKeyfile(self): try: self.client.keypairs.delete(self.pk) except os_utils.NotFound: logging.info("Deleting key doesn't exists") class DebianBasedOpenStackVirtualMachine(OpenStackVirtualMachine, linux_virtual_machine.DebianMixin): DEFAULT_IMAGE = UBUNTU_IMAGE
Python
0
@@ -5240,16 +5240,33 @@ .format( +%0A self.flo
0e2548637d9726dc549b13abc3a6b38c51e300bd
not count , and . values in allele count
franklin/snv/readers.py
franklin/snv/readers.py
''' Created on 2011 aza 21 @author: peio ''' class VcfParser(object): 'A vcf reader' def __init__(self, fpath): 'Class initiator' self._fpath = fpath self.header = None self._get_header() self._index = None def _get_version(self): 'version of the vcf' version_unformat = self.header['format'] return version_unformat.split('v')[1] version = property(_get_version) def _get_header(self): 'it returns the header' if self.header is not None: return self.header headers = {} for line in open(self._fpath): if not line.startswith('#'): break if line.startswith('##'): line = line.strip() line = line.lstrip('##') kind, value = line.split('=', 1) if kind == 'FILTER': if kind not in headers: headers[kind] = {} filter_type, filter_info = value.split(',', 1) headers[kind][filter_type] = filter_info.strip('"') elif kind in ('FORMAT', 'INFO'): if kind not in headers: headers[kind] = {} name, example, type_, desc = value.split(',') headers[kind][name] = {'type':type_, 'example':example, 'description':desc} else: headers[kind] = value else: line = line.lstrip('#') headers['colnames'] = line.split() self.header = headers def _get_vcfs(self): 'vcf generator' colnames = self.header['colnames'] for line in open(self._fpath): if line.startswith('#'): continue yield self._parse_vcf_line(line, colnames) vcfs = property(_get_vcfs) def _parse_vcf_line(self, line, colnames): '''It parses the cvf svn line''' vcf_items = line.split() vcf = dict(zip(colnames[:9], vcf_items[:9])) # reformat FILTER vcf['FILTER'] = vcf['FILTER'].split(';') # REformat INFO info = vcf['INFO'] vcf['INFO'] = {} for info_ in info.split(';'): info_key, info_value = info_.split('=') vcf['INFO'][info_key] = info_value # reformat FORMAT format_string = vcf['FORMAT'] vcf['FORMAT'] = {} for format_ in format_string.split(';'): format_key, format_value = format_.split(':') vcf['FORMAT'][format_key] = format_value vcf['samples'] = {} for samples in zip(colnames[9:], vcf_items[9:]): allele_count = {} alleles, values = samples[1].split(':') for index , allele in enumerate(alleles.split('|')): allele = vcf['REF'] if allele == 0 else vcf['ALT'] try: count_ = int(values[index]) except ValueError: continue allele_count[allele] = count_ vcf['samples'][samples[0]] = allele_count return vcf def _make_index(self): '''it makes an index of the vcf file. It takes the vcf position (chrom, position) as index''' if self._index is not None: return self._index index = {} fhand = open(self._fpath, 'rt') rawline = 'filled' while len(rawline) != 0: prior_tell = fhand.tell() rawline = fhand.readline() if rawline and rawline[0] == '#': continue index[tuple(rawline.split()[:2])] = prior_tell self._index = index def get_snv(self, position): 'It returns an snv giving it position' colnames = self.header['colnames'] if self._index is None: self._make_index() fhand = open(self._fpath) file_position = self._index[position] fhand.seek(file_position) return self._parse_vcf_line(fhand.readline(), colnames)
Python
0.999987
@@ -3016,16 +3016,27 @@ t(values +.split(',') %5Bindex%5D)