commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
6fb6fdee06410d1d051134f0b9dcb47ad2ac1885
Simplify code around re-raising an error.
azurectl/setup_account_task.py
azurectl/setup_account_task.py
# Copyright (c) 2015 SUSE Linux GmbH. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ usage: azurectl setup account -h | --help azurectl setup account list azurectl setup account remove --name=<configname> azurectl setup account add --name=<configname> --publish-settings-file=<file> --storage-account-name=<storagename> --container-name=<containername> [--subscription-id=<subscriptionid>] azurectl setup account help commands: list list configured account sections remove remove specified account section add add a new account section to the config file help show manual page for config command options: --name=<configname> section name to identify this account --publish-settings-file=<file> path to the Microsoft Azure account publish settings file --storage-account-name=<storagename> storage account name to use by default --container-name=<containername> container name for storage account to use by default --subscription-id=<subscriptionid> subscription id, if more than one subscription is included in your publish settings file. """ import sys # project from cli_task import CliTask from logger import log from help import Help from account_setup import AccountSetup from data_collector import DataCollector from data_output import DataOutput from azurectl_exceptions import AzureAccountLoadFailed from config_file_path import ConfigFilePath class SetupAccountTask(CliTask): """ Process setup config commands """ def __init__(self, load_config=True): """ Override CliTask's init, gracefully handle the case where config file does not exist, so a new one may be added. """ try: CliTask.__init__(self, load_config) self.config_file = self.config.config_file except AzureAccountLoadFailed: t, v, tb = sys.exc_info() if self.command_args['add']: self.config_file = ConfigFilePath().default_new_config() else: raise t, v, tb def process(self): self.manual = Help() if self.__help(): return self.setup = AccountSetup(self.config_file) self.result = DataCollector() self.out = DataOutput( self.result, self.global_args['--output-format'], self.global_args['--output-style'] ) if self.command_args['list']: self.__list() elif self.command_args['remove']: self.__remove() elif self.command_args['add']: self.__add() def __help(self): if self.command_args['help']: self.manual.show('azurectl::setup::account') else: return False return self.manual def __add(self): if self.setup.add( self.command_args['--name'], self.command_args['--publish-settings-file'], self.command_args['--storage-account-name'], self.command_args['--container-name'], self.command_args['--subscription-id'] ): log.info('Added Account %s', self.command_args['--name']) def __list(self): account_info = self.setup.list() if not account_info: log.info('There are no accounts configured') else: self.result.add('accounts', account_info) self.out.display() def __remove(self): if self.setup.remove( self.command_args['--name'] ): log.info('Removed Account %s', self.command_args['--name'])
Python
0.999998
@@ -2477,46 +2477,8 @@ ed:%0A - t, v, tb = sys.exc_info()%0A @@ -2630,17 +2630,8 @@ aise - t, v, tb %0A%0A
e12f0d753817c4774af9e59aff81a6fa6b98b119
Version devel
src/robotide/version.py
src/robotide/version.py
# Automatically generated by `pavement.py`. VERSION = '1.4.1'
Python
0
@@ -52,13 +52,24 @@ = ' -1.4.1 +devel-2015-08-13 '%0A
03cae330bcad76fc01ea591e527790bc35139490
Fix itypes param typo
client/movie_rec_app.py
client/movie_rec_app.py
from appdata import AppData import predictionio import sys from app_config import APP_KEY, API_URL ENGINE_NAME = 'movie-rec' SIM_ENGINE_NAME = 'movie-sim' class App: def __init__(self): self._app_data = AppData() self._client = predictionio.Client(APP_KEY, 1, API_URL) def run(self): state = "[Main Menu]" prompt = "\n"\ "%s\n"\ "%s\n"\ "Please input selection:\n"\ " 0: Quit application.\n"\ " 1: Get personalized recommendation.\n"\ " 2: Display user's data.\n"\ " 3: Display movie data.\n" % (state, '-'*len(state)) while True: print prompt choice = raw_input().lower() if choice == '0': print "\nGood Bye!\n" break elif choice == '1': self.recommend_task(state) elif choice == '2': self.display_user_task(state) elif choice == '3': self.get_similar_movies_task(state) else: print '[Error] \'%s\' is not a valid selection.' % choice self._client.close() def recommend_task(self, prev_state): state = prev_state + " / [Get Recommendations]" prompt = "\n"\ "%s\n"\ "%s\n"\ "Please enter user id:" % (state, '-'*len(state)) while True: print prompt choice = raw_input().lower() u = self._app_data.get_user(choice) if u: n = 10 print "[Info] Getting top %s item recommendations for user %s..." % (n, u.uid) try: self._client.identify(u.uid) rec = self._client.get_itemrec_topn(ENGINE_NAME, n) u.rec = rec['pio_iids'] self.display_items(u.rec) except predictionio.ItemRecNotFoundError: print "[Info] Recommendation not found" print "[Info] Go back to previous menu..." break else: print "[Error] invalid user id %s. Go back to previous menu..." % choice break def get_similar_movies_task(self, prev_state): state = prev_state + "/ [Get Similar Movies]" prompt = "\n"\ "%s\n"\ "%s\n"\ "Please enter movie id:" % (state, '-'*len(state)) while True: print prompt choice = raw_input().lower() i = self._app_data.get_item(choice) if i: n = 10 self.display_items((i.iid,), all_info=False) print "\n[Info] People who liked this may also liked..." try: rec = self._client.get_itemsim_topn(SIM_ENGINE_NAME, i.iid, n, { 'pio_itypes' : v.genres }) self.display_items(rec['pio_iids'], all_info=False) except predictionio.ItemSimNotFoundError: print "[Info] Similar movies not found" print "[Info] Go back to previous menu..." break else: print "[Error] invalid item id %s. Go back to previous menu..." % choice break def display_user_task(self, prev_state): state = prev_state + " / [Display User]" prompt = "\n"\ "%s\n"\ "%s\n"\ "Please enter user id:" % (state, '-'*len(state)) while True: print prompt choice = raw_input().lower() u = self._app_data.get_user(choice) if u: print "[Info] User %s:" % u.uid n = 10 topn_rate_actions = self._app_data.get_top_rate_actions(u.uid, n) print "\n[Info] Top %s movies rated by this user:" % n self.display_rate_actions(topn_rate_actions) print "\n[Info] Movies recommended to this user:" self.display_items(u.rec) self.wait_for_ack() print "\n[Info] Go back to previous menu..." break else: print "[Error] invalid user id %s. Go back to previous menu..." % choice break def display_items(self, iids, all_info=False): """print item info for each iid in the list """ if iids: for iid in iids: item = self._app_data.get_item(iid) if item: if all_info: print "[Info] %s" % item else: print "[Info] (%s) %s %s" % (item.iid, item.name, item.release_date.strftime("%d-%b-%Y")) else: print "[Error] Invalid item id %s" % iid else: print "[Info] Empty." def display_rate_actions(self, actions): """print iid and rating """ if actions: for a in actions: item = self._app_data.get_item(a.iid) if item: print "[Info] %s, rating = %s" % (item.name, a.rating) else: print "[Error] Invalid item id %s" % a.iid else: print "[Info] Empty." def wait_for_ack(self): prompt = "\nPress enter to continue..." print prompt choice = raw_input().lower() if __name__ == '__main__': print "\nWelcome To PredictionIO Python-SDK Demo App!" print "============================================\n" my_app = App() my_app.run()
Python
0.000895
@@ -2248,17 +2248,17 @@ ypes' : -v +i .genres
202cec13f135988e5e73764790f1d10427d6f139
fix import path for InstagramException, been causing exceptions when running examples
InstagramAPI/src/InstagramRegistration.py
InstagramAPI/src/InstagramRegistration.py
import hmac import json import pycurl import re import urllib from collections import OrderedDict from InstagramAPI import InstagramException try: from StringIO import StringIO as BytesIO except ImportError: from io import BytesIO from Constants import Constants from Utils import * class InstagramRegistration(object): def __init__(self, debug=False, IGDataPath=None): self.debug = None self.IGDataPath = None self.username = None self.uuid = None self.userAgent = None self.proxy = None # Proxy self.proxy_auth = None # Proxy Auth self.username = '' self.debug = debug self.uuid = self.generateUUID(True) if IGDataPath is not None: self.IGDataPath = IGDataPath else: self.IGDataPath = os.path.join( os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data'), '' ) self.userAgent = 'Instagram ' + Constants.VERSION + ' Android (18/4.3; 320dpi; 720x1280; Xiaomi; HM 1SW; armani; qcom; en_US)' def setProxy(self, proxy, port=None, username=None, password=None): """ Set the proxy. :type proxy: str :param proxy: Full proxy string. Ex: user:pass@192.168.0.0:8080 Use $proxy = "" to clear proxy :type port: int :param port: Port of proxy :type username: str :param username: Username for proxy :type password: str :param password: Password for proxy :raises: InstagramException """ if proxy == "": self.proxy = "" return proxy = parse_url(proxy) if port and isinstance(port, int): proxy['port'] = int(port) if username and password: proxy['user'] = username proxy['pass'] = password if proxy['host'] and proxy['port'] and isinstance(proxy['port'], int): self.proxy = proxy['host'] + ':' + proxy['port'] else: raise InstagramException('Proxy host error. Please check ip address and port of proxy.') if proxy['user'] and proxy['pass']: self.proxy_auth = proxy['user'] + ':' + proxy['pass'] def checkUsername(self, username): """ Checks if the username is already taken (exists). :type username: str :param username: :rtype: object :return: Username availability data """ data = json.dumps( OrderedDict([ ('_uuid', self.uuid), ('username', username), ('_csrftoken', 'missing'), ]) ) return self.request('users/check_username/', self.generateSignature(data))[1] def createAccount(self, username, password, email): """ Register account. :type username: str :param username: :type password: str :param password: :type email: str :param email: :rtype: object :return: Registration data """ data = json.dumps( OrderedDict([ ('phone_id', self.uuid), ('_csrftoken', 'missing'), ('username', username), ('first_name', ''), ('guid', self.uuid), ('device_id', self.generateDeviceId(hashlib.md5(username + password).hexdigest())), ('email', email), ('force_sign_up_code', ''), ('qs_stamp', ''), ('password', password), ]) ) result = self.request('accounts/create/', self.generateSignature(data)) if 'account_created' in result[1] and result[1]['account_created'] == True: self.username_id = result[1]['created_user']['pk'] file_put_contents(self.IGDataPath + username + "-userId.dat", self.username_id) match = re.search(r'^Set-Cookie: csrftoken=([^;]+)', result[0], re.MULTILINE) token = match.group(1) if match else '' self.username = username file_put_contents(self.IGDataPath + username + "-token.dat", token) os.rename(self.IGDataPath + 'cookies.dat', self.IGDataPath + username + "-cookies.dat") return result def generateDeviceId(self, seed): return 'android-'+hashlib.md5(seed).hexdigest()[16:] def generateSignature(self, data): hash_var_renamed = hmac.new(Constants.IG_SIG_KEY, data, hashlib.sha256).hexdigest() # todo renamed variable hash return 'ig_sig_key_version=' + Constants.SIG_KEY_VERSION + '&signed_body=' + hash_var_renamed + '.' + urllib.quote_plus( data) def generateUUID(self, type): ##todo finish mt_rand uuid = '%04x%04x-%04x-%04x-%04x-%04x%04x%04x' % ( mt_rand(0, 0xffff), mt_rand(0, 0xffff), mt_rand(0, 0xffff), mt_rand(0, 0x0fff) | 0x4000, mt_rand(0, 0x3fff) | 0x8000, mt_rand(0, 0xffff), mt_rand(0, 0xffff), mt_rand(0, 0xffff) ) return uuid if type else uuid.replace('-', '') def request(self, endpoint, post=None): buffer = BytesIO() ch = pycurl.Curl() ch.setopt(pycurl.URL, Constants.API_URL + endpoint) ch.setopt(pycurl.USERAGENT, self.userAgent) ch.setopt(pycurl.WRITEFUNCTION, buffer.write) ch.setopt(pycurl.FOLLOWLOCATION, True) ch.setopt(pycurl.HEADER, True) ch.setopt(pycurl.VERBOSE, False) if os.path.isfile(self.IGDataPath + self.username + "-cookies.dat"): ch.setopt(pycurl.COOKIEFILE, self.IGDataPath + self.username + "-cookies.dat") ch.setopt(pycurl.COOKIEJAR, self.IGDataPath + self.username + "-cookies.dat") else: ch.setopt(pycurl.COOKIEFILE, self.IGDataPath + 'cookies.dat') ch.setopt(pycurl.COOKIEJAR, self.IGDataPath + 'cookies.dat') if post is not None: ch.setopt(pycurl.POST, True) ch.setopt(pycurl.POSTFIELDS, post) if self.proxy: ch.setopt(pycurl.PROXY, self.proxy) if self.proxy_auth: ch.setopt(pycurl.PROXYUSERPWD, self.proxy_auth) ch.perform() resp = buffer.getvalue() header_len = ch.getinfo(pycurl.HEADER_SIZE) header = resp[0: header_len] body = resp[header_len:] ch.close() if self.debug: print "REQUEST: " + endpoint if post is not None: if not isinstance(post, list): print "DATA: " + str(post) print "RESPONSE: " + body return [header, json.loads(body)]
Python
0
@@ -109,16 +109,20 @@ agramAPI +.src import
8b927f90c7416cfe1ba50ac1ad073e859206410d
increase max number length from 9 to 12
plugins/conversion.py
plugins/conversion.py
import logging log = logging.getLogger(__name__) import collections import datetime import json import re import urllib.error import botologist.http import botologist.plugin def format_number(number): if not isinstance(number, int): number = float(number) if number % 1 == 0.0: number = int(number) if isinstance(number, int): f_number = '{:,}'.format(number) else: f_number = '{:,.2f}'.format(float(number)) if len(f_number) > 9: f_number = '{:.2}'.format(number) return f_number def get_duckduckgo_data(url): response = botologist.http.get(url) content = response.read().decode() return json.loads(content) def get_conversion_result(*args): query = ' '.join([str(arg) for arg in args]) params = collections.OrderedDict([ ('q', query), ('format', 'json'), ('no_html', 1) ]) qs = urllib.parse.urlencode(params) url = 'http://api.duckduckgo.com/?' + qs.lower() try: data = get_duckduckgo_data(url) except urllib.error.URLError: log.warning('DuckDuckGo request failed', exc_info=True) return False if data['AnswerType'] == 'conversions' and data['Answer']: return data['Answer'] def get_currency_data(): url = 'http://www.ecb.europa.eu/stats/eurofxref/eurofxref-daily.xml' try: response = botologist.http.get(url) content = response.read().decode() except urllib.error.URLError: log.warning('ECB exchange data request failed', exc_info=True) return {} matches = re.findall(r'<Cube currency=["\']([A-Za-z]{3})["\'] rate=["\']([\d.]+)["\']/>', content) currency_data = {} for currency, exchange_rate in matches: currency_data[currency.upper()] = float(exchange_rate) log.info('Found %d currencies', len(currency_data)) return currency_data class Currency: last_fetch = None currency_data = None aliases = {'NIS': 'ILS', 'EURO': 'EUR'} @classmethod def currencies(cls): cls.load() return cls.currency_data.keys() @classmethod def convert(cls, amount, from_cur, to_cur): cls.load() try: amount = float(amount) except ValueError: return None from_cur = from_cur.upper() to_cur = to_cur.upper() if from_cur in cls.aliases: from_cur = cls.aliases[from_cur] if to_cur in cls.aliases: to_cur = cls.aliases[to_cur] if from_cur == 'EUR': if to_cur not in cls.currency_data: return None return amount * cls.currency_data[to_cur] if to_cur == 'EUR': if from_cur not in cls.currency_data: return None return amount / cls.currency_data[from_cur] if from_cur in cls.currency_data and to_cur in cls.currency_data: amount = amount / cls.currency_data[from_cur] return amount * cls.currency_data[to_cur] return None @classmethod def load(cls): now = datetime.datetime.now() if cls.last_fetch: diff = now - cls.last_fetch if not cls.last_fetch or diff.seconds > 3600: cls.currency_data = get_currency_data() cls.last_fetch = now class ConversionPlugin(botologist.plugin.Plugin): amount_pattern = r'((?:[\d][\d,. ]*?|[\.][\d]*?)[km]??)' unit_pattern = r'((?:(?:square|cubic) )?[a-z.]+)' pattern = re.compile(amount_pattern + r' ?' + unit_pattern + r' (into|in|to) ' + unit_pattern, re.I) @botologist.plugin.reply() def convert(self, msg): match = self.pattern.search(msg.message) if not match: return amount = match.group(1).lower().replace(' ', '').replace(',', '') try: if amount.endswith('k'): real_amount = float(amount[:-1]) * 1000 elif amount.endswith('m'): real_amount = float(amount[:-1]) * 1000000 else: real_amount = float(amount) except ValueError: return if real_amount % 1 == 0.0: real_amount = int(real_amount) conv_from = match.group(2) conv_to = match.group(4) result = Currency.convert(real_amount, conv_from, conv_to) if result: format_amount = format_number(real_amount) format_result = format_number(result) return '{} {} = {} {}'.format(format_amount, conv_from, format_result, conv_to) result = get_conversion_result(real_amount, conv_from, match.group(3), conv_to) if result: return result
Python
0.999874
@@ -446,9 +446,10 @@ ) %3E -9 +12 :%0A%09%09
bd98a1b1119b34a5435855478d733ca582ebcf0c
Update version to dev
powerpool/__init__.py
powerpool/__init__.py
__version__ = "0.6.2" __version_info__ = (0, 6, 2)
Python
0
@@ -12,17 +12,21 @@ = %220.6. -2 +3-dev %22%0A__vers @@ -49,7 +49,7 @@ 6, -2 +3 )%0A
9814f3565690f11d0d40c5470c1afdbe05037499
Use `type(self)` as a constructor
ppb_vector/vector2.py
ppb_vector/vector2.py
import typing import collections from math import acos, atan2, cos, degrees, hypot, isclose, radians, sin from numbers import Real from collections.abc import Sequence __all__ = 'Vector2', VectorLike = typing.Union[ 'Vector2', typing.List[Real], # TODO: Length 2 typing.Tuple[Real, Real], typing.Dict[str, Real], # TODO: Length 2, keys 'x', 'y' ] def is_vector_like(value: typing.Any) -> bool: return isinstance(value, (Vector2, list, tuple, dict)) _fakevector = collections.namedtuple('_fakevector', ['x', 'y']) def _mkvector(value, *, castto=_fakevector): if isinstance(value, Vector2): return value # FIXME: Allow all types of sequences elif isinstance(value, (list, tuple)) and len(value) == 2: return castto(value[0], value[1]) # FIXME: Allow all types of mappings elif isinstance(value, dict) and 'x' in value and 'y' in value and len(value) == 2: return castto(value['x'], value['y']) else: raise ValueError(f"Cannot use {value} as a vector-like") class Vector2(Sequence): def __init__(self, x: Real, y: Real): self.x = x self.y = y self.length = hypot(x, y) @classmethod def convert(cls, value: VectorLike) -> 'Vector2': """ Constructs a vector from a vector-like. """ return _mkvector(value, castto=type(cls)) def __len__(self) -> int: return 2 def __add__(self, other: VectorLike) -> 'Vector2': try: other = _mkvector(other) except ValueError: return NotImplemented rtype = type(other) if isinstance(other, Vector2) else type(self) return rtype(self.x + other.x, self.y + other.y) def __sub__(self, other: VectorLike) -> 'Vector2': try: other = _mkvector(other) except ValueError: return NotImplemented rtype = type(other) if isinstance(other, Vector2) else type(self) return rtype(self.x - other.x, self.y - other.y) def __mul__(self, other: VectorLike) -> 'Vector2': if is_vector_like(other): try: other = _mkvector(other) except ValueError: return NotImplemented return self.x * other.x + self.y * other.y elif isinstance(other, Real): return Vector2(self.x * other, self.y * other) else: return NotImplemented def __rmul__(self, other: VectorLike) -> 'Vector2': return self.__mul__(other) def __xor__(self, other: VectorLike) -> Real: """ Computes the magnitude of the cross product """ other = _mkvector(other) return self.x * other.y - self.y * other.x def __getitem__(self, item: typing.Union[str, int]) -> Real: if hasattr(item, '__index__'): item = item.__index__() if isinstance(item, str): if item == 'x': return self.x elif item == 'y': return self.y else: raise KeyError elif isinstance(item, int): if item == 0: return self.x elif item == 1: return self.y else: raise IndexError else: raise TypeError def __repr__(self) -> str: return f"{type(self).__name__}({self.x}, {self.y})" def __eq__(self, other: VectorLike) -> bool: if is_vector_like(other): other = _mkvector(other) return self.x == other.x and self.y == other.y else: return False def __ne__(self, other: VectorLike) -> bool: if is_vector_like(other): other = _mkvector(other) return self.x != other.x or self.y != other.y else: return True def __iter__(self) -> typing.Iterator[Real]: yield self.x yield self.y def __neg__(self) -> 'Vector2': return self * -1 def angle(self, other: VectorLike) -> Real: other = _mkvector(other, castto=Vector2) rv = degrees( atan2(other.x, -other.y) - atan2(self.x, -self.y) ) # This normalizes the value to (-180, +180], which is the opposite of # what Python usually does but is normal for angles if rv <= -180: rv += 360 elif rv > 180: rv -= 360 return rv def isclose(self, other: 'Vector2', *, rel_tol: float=1e-06, abs_tol: float=1e-3): """ Determine whether two vectors are close in value. rel_tol maximum difference for being considered "close", relative to the magnitude of the input values abs_tol maximum difference for being considered "close", regardless of the magnitude of the input values Return True if self is close in value to other, and False otherwise. For the values to be considered close, the difference between them must be smaller than at least one of the tolerances. """ diff = (self - other).length return ( diff < rel_tol * max(self.length, other.length) or diff < abs_tol ) def rotate(self, degrees: Real) -> 'Vector2': r = radians(degrees) r_cos = cos(r) r_sin = sin(r) x = self.x * r_cos - self.y * r_sin y = self.x * r_sin + self.y * r_cos return Vector2(x, y) def normalize(self) -> 'Vector2': return self.scale(1) def truncate(self, max_length: Real) -> 'Vector2': if self.length > max_length: return self.scale(max_length) return self def scale(self, length: Real) -> 'Vector2': try: scale = length / self.length except ZeroDivisionError: scale = 1 return self * scale def reflect(self, surface_normal: VectorLike) -> 'Vector2': """ Calculate the reflection of the vector against a given surface normal """ surface_normal = _mkvector(surface_normal, castto=Vector2) if not (0.99999 < surface_normal.length < 1.00001): raise ValueError("Reflection requires a normalized vector.") return self - (2 * (self * surface_normal) * surface_normal)
Python
0.000058
@@ -2343,23 +2343,26 @@ return -Vector2 +type(self) (self.x @@ -5502,23 +5502,26 @@ return -Vector2 +type(self) (x, y)%0A%0A
49cf5af6c62bb23c8fce660f4b649bb0775ecdbc
494. Target Sum
problems/test_0494.py
problems/test_0494.py
import unittest from typing import List import utils # O(len(nums) * sum(nums)) time. O(len(nums) * sum(nums)) space. DP, 0-1 knapsack. class Solution: def findTargetSumWays(self, nums: List[int], S: int) -> int: sum_nums = sum(nums) if not (-sum_nums <= S <= sum_nums): return 0 max_num = max(nums) # dp[i][j]: how many ways to assign symbols to make sum of nums[:i] equal to target j dp = [[0] * ((sum_nums + max_num) * 2 + 1) for _ in range(len(nums) + 1)] dp[0][0] = 1 for i in range(1, len(nums) + 1): num = nums[i - 1] for j in range(-sum_nums - num, sum_nums + num + 1): dp[i][j] = dp[i - 1][j - num] + dp[i - 1][j + num] return dp[len(nums)][S] class Test(unittest.TestCase): def test(self): cases = utils.load_test_json(__file__).test_cases for case in cases: args = str(case.args) actual = Solution().findTargetSumWays(**case.args.__dict__) self.assertEqual(case.expected, actual, msg=args) if __name__ == '__main__': unittest.main()
Python
0.999999
@@ -57,32 +57,33 @@ # O(len(nums) * +( sum(nums)) time. @@ -75,16 +75,29 @@ um(nums) + + max(nums)) ) time. @@ -110,16 +110,17 @@ nums) * +( sum(nums @@ -120,16 +120,29 @@ um(nums) + + max(nums)) ) space. @@ -249,24 +249,80 @@ -sum_nums +if not nums:%0A return 1 if S == 0 else 0%0A%0A sum_ = sum(n @@ -347,20 +347,16 @@ t (-sum_ -nums %3C= S %3C= @@ -360,20 +360,16 @@ %3C= sum_ -nums ):%0A @@ -397,19 +397,16 @@ max_ -num = max(n @@ -409,16 +409,75 @@ ax(nums) +%0A bound = sum_ + max_%0A range_ = bound * 2 + 1 %0A%0A @@ -588,38 +588,14 @@ %5D * -((sum_nums + max_num) * 2 + 1) +range_ for @@ -752,34 +752,14 @@ sum_ -nums - num, sum_nums + num +, sum_ + 1
6da626bf1a999101af188d3d20710a6dddc8dbae
shell=True
ide.py
ide.py
# NOTE: pass -d to this to print debugging info when the server crashes. from flask import Flask, render_template, url_for, request from subprocess import Popen, PIPE, check_call import sys, os, string, glob, logging app = Flask(__name__) app.logger.addHandler(logging.StreamHandler(sys.stdout)) app.logger.setLevel(logging.ERROR) def compileO(): r = check_call(['gcc', 'o.c', '-DIDE', '-o', 'o-ide', '-lm']) print("o-ide: " + "".join(glob.glob("o-ide*"))) if r != 0: print("O code could not be compile. Error: " + r) @app.route('/', methods=['GET', 'POST']) def index(): url_for('static', filename='logo.ico') if request.method == 'POST': #Check files that start with 'o-ide*' files = glob.glob("o-ide*") print(files) #Check if C was compiled if len(files) < 1: print("Compiling O...") compileO() #Run code code = request.form['code'] input = request.form['input'].replace('\r\n', '\n') print('Got code:', code, 'input:', input) print('Running O code...') p = Popen(['o-ide', '-e', code], stdout=PIPE, stderr=PIPE, stdin=PIPE, universal_newlines=True) output, error = p.communicate(input) #Output to IDE print('Output:', output, 'error:', error) if p.returncode: return render_template('error.html', code=code, input=input, error=error) else: return render_template('code.html', code=code, input=input, output=output, stack=error or '[]') else: return render_template('primary.html') @app.route('/link/') @app.route('/link/<link>') def link(link='code="Error in linking code"o&input='): url_for('static', filename='logo.ico') print('Link:', link) return render_template('link.html', link=link) if __name__ == '__main__': print('Compiling O...') compileO() print('Starting server...') app.run(debug='-d' in sys.argv[1:])
Python
0.999989
@@ -1188,16 +1188,28 @@ nes=True +, shell=True )%0A
ae7ffdaf67e156922b3af3fd21f82c41a7aca764
fix data
log.py
log.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function import time from datetime import datetime from struct import * import adafruit_dht import board import csv import paho.mqtt.client as mqtt import json import sqlite3 as lite # sudo apt install postgresql-client libpq-dev # sudo pip install psycopg2 import psycopg2 as pg ROOTDIR = "/home/henry/pyLogger" millis = lambda: int(round(time.time() * 1000)) octlit = lambda n:int(n, 8) start_time = time.time() runtime = lambda: '{0:0.2f}s'.format(time.time()-start_time) log_lastupdate = millis()-60001 ext_temperature = None int_humidity={ 'value': None, 'timestamp': None, 'unit': '%' } int_temperature={ 'value': None, 'timestamp': None, 'unit': '°C' } # set up csv logger #logwriter = None #csvfile = open('/home/henry/pyLogger/data.csv', 'ab') #if csvfile: # logwriter = csv.writer(csvfile, delimiter=';') con = None cur = None try: con = lite.connect(ROOTDIR+'/temperature.db', detect_types=lite.PARSE_DECLTYPES) cur = con.cursor() except lite.Error as e: print("Error {}:".format(e.args[0])) sys.exit(1) pg_con = None pg_cur = None try: pg_con = pg.connect("dbname='home' user='postgres' host='omv4.fritz.box' password='postgres'") pg_cur = pg_con.cursor() except Exception as e: print("Postgres Error {}:".format(e)) #setup mqtt stuff # The callback for when the client receives a CONNACK response from the server. def on_connect(client, userdata, flags, rc): print("Connected to mqtt broker with result code "+str(rc)) # Subscribing in on_connect() means that if we lose the connection and # reconnect then subscriptions will be renewed. client.subscribe("home/out/temp") # The callback for when a PUBLISH message is received from the server. def on_message(client, userdata, msg): global ext_temperature # print(msg.topic+" "+msg.payload) ext_temperature = json.loads(msg.payload) ext_temperature["timestamp"] = int(ext_temperature["timestamp"]) #print(" "+str(ext_temperature)) client = mqtt.Client("pyLogger") client.on_connect = on_connect client.on_message = on_message client.connect("omv4.fritz.box") client.loop_start() dhtDevice = adafruit_dht.DHT22(board.D4) try: while 1: # save received data if (millis() - log_lastupdate) > 60000: log_lastupdate = millis() try: int_humidity["value"] = dhtDevice.temperature int_temperature["value"] = dhtDevice.humidity except: pass int_humidity["value"] = int(int_humidity["value"] - 0) # offset correction int_temperature["value"] = round(int_temperature["value"], 1) int_temperature["timestamp"] = int(time.time()) int_humidity["timestamp"] = int(time.time()) client.publish("home/in/temp", json.dumps(int_temperature), retain=True) client.publish("home/in/temp/value", '{0:0.1f}'.format(int_temperature["value"]), retain=True) client.publish("home/in/hum", json.dumps(int_humidity), retain=True) client.publish("home/in/hum/value", int_humidity["value"], retain=True) cur.execute("INSERT INTO livingroom(time, temp, hum) VALUES(?, ?, ?)", (datetime.now(), int_temperature["value"], int_humidity["value"])) pg_cur.execute("INSERT INTO livingroom(timestamp, temperature, humidity) VALUES(%s, %s, %s)", (datetime.utcnow(), int_temperature["value"], int_humidity["value"])) if (ext_temperature == None) or (( (millis()/1000 - ext_temperature["timestamp"]) > 600) and (ext_temperature["timestamp"] > 0)) : pass else: cur.execute("INSERT INTO outside(time, temp, hum) VALUES(?, ?, ?)", (datetime.now(), ext_temperature["value"], None)) pg_cur.execute("INSERT INTO outside(timestamp, temperature, humidity) VALUES(%s, %s, %s)", (datetime.utcnow(), ext_temperature["value"], None)) con.commit() pg_con.commit() # if (ext_temperature == None) or (( (millis()/1000 - ext_temperature["timestamp"]) > 600) and (ext_temperature["timestamp"] > 0)) : # logwriter.writerow([datetime.now().strftime("%Y-%m-%d %H:%M:%S"), '{0:0.1f}'.format(int_temperature["value"]), '{0:0.0f}'.format(int_humidity["value"]), '-']) # else: # logwriter.writerow([datetime.now().strftime("%Y-%m-%d %H:%M:%S"), '{0:0.1f}'.format(int_temperature["value"]), '{0:0.0f}'.format(int_humidity["value"]), '{0:0.1f}'.format(ext_temperature["value"])]) # csvfile.flush() time.sleep(1) except KeyboardInterrupt: pass finally: if con: con.close()
Python
0.000176
@@ -2457,32 +2457,35 @@ int_ -humidity +temperature %5B%22value%22%5D = @@ -2522,35 +2522,32 @@ int_ -temperature +humidity %5B%22value%22%5D =
ff99addf5ac6589b4ee2c53ef1debf4e9c07b47d
Bump version 0.2 Stable
__tryton__.py
__tryton__.py
#This file is part of Tryton. The COPYRIGHT file at the top level of #this repository contains the full copyright notices and license terms. { 'name': 'Nereid Catalog', 'version': '2.0.0.1', 'author': 'Openlabs Technologies & Consulting (P) LTD', 'email': 'info@openlabs.co.in', 'website': 'http://www.openlabs.co.in/', 'description': '''Nereid Catalog''', 'depends': [ 'product', 'nereid', ], 'xml': [ 'product.xml', 'urls.xml', ], 'translation': [ ], }
Python
0.000001
@@ -193,9 +193,9 @@ 0.0. -1 +2 ',%0A
d826e54996bc504d245286b50f7ab5671f1999ae
Update solution.py
data_structures/linked_list/problems/find_pattern_in_linked_list/py/solution.py
data_structures/linked_list/problems/find_pattern_in_linked_list/py/solution.py
import LinkedList # Problem description: Find a pattern represented as a linked list in a target linked list. # Solution time complexity: O(n^2) # Comments: A brute force solution w/o any optimizations. Simply traverse a list looking for the pattern. # If the node traversing the "pattern" list ever reaches the end (i.e. pnode == null), it is in # the list. The case where a pnode may be equal to null due to the pattern being null, is ruled # out by a test at the beginning of the function. # Linked List Node inside the LinkedList module is defined as: # # class Node: # def __init__(self, val, nxt=None): # self.val = val # self.nxt = nxt # def FindPatternInLinkedList(head: LinkedList.Node, pattern: LinkedList.Node) -> int: if head == None or pattern == None: return -1 index = 0 tslow = head pnode = pattern while tslow != None: if tslow.val == pattern.val: tfast = tslow pnode = pattern while tfast != None and pnode != None: if tfast.val == pnode.val: tfast = tfast.nxt pnode = pnode.nxt else: break if pnode == None: return index tslow = tslow.nxt index += 1 return -1
Python
0.000001
@@ -47,16 +47,23 @@ Find a +string pattern
1fe7a69eb4d40684ef4100a578620593727470c5
fix one error add split large ip range function change output
sgi.py
sgi.py
#!/usr/bin/python # -*- coding: utf-8 -*- # Scan Google IPs import argparse import multiprocessing import os import re import subprocess __author__ = 'gino' IGNORE_IP = ['64.18.0.0/20', '72.14.192.0/18', '74.125.0.0/16', '173.194.0.0/16', '66.102.0.0/20', '66.249.80.0/20', '216.58.192.0/19', '216.239.32.0/19', '207.126.144.0/20', '209.85.128.0/17']#'64.233.160.0/19' EXTRA_IP = [] # '87.245.192.0/18', # Get Google ip range def get_google_ip_range(): cmd = os.popen('nslookup -q=TXT _netblocks.google.com 8.8.8.8') output = cmd.read() pattern = re.compile(r'ip4:(.*?) ') ip_list = pattern.findall(output) for item in IGNORE_IP: ip_list.remove(item) ip_list.extend(EXTRA_IP) return ip_list # Split large ip range into small for multiprocessing def split_large_ip_range(ip_list): final_list = [] for item in ip_list: if int(item.split('/')[1]) == 16: print('split ip range %s to: ' % item) new_item = item.replace('/16', '/18') final_list.append(new_item) print('\t %s' % new_item) for x in xrange(4): if x == 0: continue final_list.append(new_item.replace('.0.', '.%s.' % str(64*x))) print('\t %s' % new_item.replace('.0.', '.%s.' % str(64*x))) else: final_list.append(item) return final_list # nmap process scan port 433 class ScanProcess(multiprocessing.Process): def __init__(self, ip_add, outfile, lock): multiprocessing.Process.__init__(self) self.ip_add = ip_add self.outfile = outfile self.lock = lock def run(self): cmd = 'nmap -Pn -T5 -p443 --host-timeout 1000 ' + self.ip_add print(cmd) pipe = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) result = pipe.communicate()[0] self.lock.acquire() print(cmd + ' write into file!') self.outfile.write(result) self.outfile.flush() self.lock.release() # scan ip range def scan_ip_range(ranges, mnum): output = open('raw_output', 'w') lock = multiprocessing.Lock() processes = [] for item in ranges: processes.append(ScanProcess(item, output, lock)) print('%d items will be checked.' % len(processes)) import datetime print('start: %s' % datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")) # initial runtemp gourp runtemp = [] for i in xrange(mnum) if len(ranges) > mnum else xrange(len(ranges)): item = processes.pop() item.start() runtemp.append(item) while True: for i in xrange(len(runtemp)): runtemp[i].join(1) if not runtemp[i].is_alive() and processes: runtemp[i] = processes.pop() runtemp[i].start() elif not runtemp[i].is_alive() and not processes: runtemp.pop(i) break if not runtemp: break print('end: %s' % datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")) output.close() def parse_args(): parser = argparse.ArgumentParser(description='Number of multiprocess') parser.add_argument('integers', metavar='Num', type=int, nargs='?', help='an integer for the number of multiprocess', default=4) return parser.parse_args().integers if __name__ == '__main__': arg_num = parse_args() ip_range = get_google_ip_range() final_ip_range = split_large_ip_range(ip_range) scan_ip_range(final_ip_range, arg_num)
Python
0.000003
@@ -171,245 +171,9 @@ = %5B -'64.18.0.0/20', '72.14.192.0/18', '74.125.0.0/16',%0A '173.194.0.0/16', '66.102.0.0/20',%0A '66.249.80.0/20', '216.58.192.0/19', '216.239.32.0/19',%0A '207.126.144.0/20', '209.85.128.0/17'%5D#'64.233.160.0/19' +%5D %0A%0AEX
7ceaec12381e8bc7f597b1cc32d50655d30d9843
use inplace installs (#5865)
nox.py
nox.py
# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import import os import nox LOCAL_DEPS = ( os.path.join('..', 'api_core'), ) @nox.session def default(session): """Default unit test session. This is intended to be run **without** an interpreter set, so that the current ``python`` (on the ``PATH``) or the version of Python corresponding to the ``nox`` binary on the ``PATH`` can run the tests. """ # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. session.run( 'py.test', '--quiet', '--cov=google.cloud.container', '--cov=google.cloud.container_v1', '--cov=tests.unit', '--cov-append', '--cov-config=.coveragerc', '--cov-report=', os.path.join('tests', 'unit'), *session.posargs ) @nox.session @nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) def unit(session, py): """Run the unit test suite.""" session.interpreter = 'python{}'.format(py) session.virtualenv_dirname = 'unit-' + py default(session) @nox.session @nox.parametrize('py', ['2.7', '3.6']) def system(session, py): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py # Use pre-release gRPC for system tests. session.install('--pre', 'grpcio') # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest') session.install('../test_utils/') session.install('.') # Run py.test against the system tests. session.run('py.test', '--quiet', 'tests/system/') @nox.session def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' session.install('docutils', 'pygments') session.run('python', 'setup.py', 'check', '--restructuredtext', '--strict')
Python
0
@@ -1057,28 +1057,29 @@ install -this +local package in-plac @@ -1070,16 +1070,17 @@ package +s in-plac @@ -1136,21 +1136,82 @@ cov' -, *LOCAL_DEPS +)%0A for local_dep in LOCAL_DEPS:%0A session.install('-e', local_dep )%0A @@ -2612,32 +2612,112 @@ t')%0A -session.install( +for local_dep in LOCAL_DEPS:%0A session.install('-e', local_dep)%0A session.install('-e', '../test @@ -2746,16 +2746,22 @@ install( +'-e', '.')%0A%0A
7c527f486e2e129861915f73e0625ec00388e15e
Fix failing MPI tests
test/hoomd_script/test_multiple_contexts.py
test/hoomd_script/test_multiple_contexts.py
# -*- coding: iso-8859-1 -*- from hoomd_script import * import hoomd_script; context.initialize() import unittest import os # unit test to run a simple polymer system with pair and bond potentials class multi_context(unittest.TestCase): def test_run(self): self.c1 = context.SimulationContext() self.c2 = context.SimulationContext() with self.c1: init.create_random(N=500, phi_p=0.2) lj = pair.lj(r_cut=3.0) lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0) integrate.mode_standard(dt=0.005) integrate.nvt(group=group.all(), T=1.2, tau=0.5) with self.c2: init.create_random(N=200, phi_p=0.02) lj = pair.lj(r_cut=3.0) lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0) integrate.mode_standard(dt=0.005) integrate.nvt(group=group.all(), T=1.2, tau=0.5) with self.c1: run(10) with self.c2: run(10) if __name__ == '__main__': unittest.main(argv = ['test.py', '-v'])
Python
0.000013
@@ -404,17 +404,18 @@ andom(N= -5 +20 00, phi_ @@ -685,17 +685,18 @@ andom(N= -2 +10 00, phi_
67351bb6cf56d8ccf4e78e809ec9f2a6fc67146a
Fix sanitization test to fit new exit codes
test/integration/command_line_sanitizing.py
test/integration/command_line_sanitizing.py
#!/usr/bin/python from test_common import * # returns (return_value, stdout, stderr) or None in case the process did not terminate within timeout seconds # run_rethinkdb() automatically fills in -f, -s, -c and -p if not specified in flags def run_rethinkdb(opts, flags = [], timeout = 10): executable_path = get_executable_path(opts, "rethinkdb") db_data_dir = os.path.join(make_test_dir(), "db_data") if not os.path.isdir(db_data_dir): os.mkdir(db_data_dir) db_data_path = os.path.join(db_data_dir, "data_file") has_c_flag = False has_s_flag = False has_f_flag = False has_p_flag = False for flag in flags: has_c_flag = has_c_flag or flag == "-c" has_s_flag = has_s_flag or flag == "-s" has_f_flag = has_f_flag or flag == "-f" has_p_flag = has_p_flag or flag == "-p" if not has_c_flag: flags += ["-c", str(opts["cores"])] if not has_s_flag: flags += ["-s", str(opts["slices"])] if not has_f_flag: flags += ["-f", db_data_path] if not has_p_flag: flags += ["-p", str(find_unused_port())] command_line = [executable_path] + flags print "Executing " + subprocess.list2cmdline(command_line) # Redirect output to temporary files to circumvent all the problems with pipe buffers filling up stdout_path = os.path.join(make_test_dir(), "rethinkdb_stdout.txt") stderr_path = os.path.join(make_test_dir(), "rethinkdb_stderr.txt") rethinkdb_stdout = file(stdout_path, "w") rethinkdb_stderr = file(stderr_path, "w") server = subprocess.Popen(command_line, stdout = rethinkdb_stdout, stderr = rethinkdb_stderr); return_value = wait_with_timeout(server, timeout) dead = return_value is not None if not dead: # did not terminate return None else: return (return_value, open(stdout_path, 'r').read(), open(stderr_path, 'r').read()) # expected_return_value = None means we expect server to not terminate def check_rethinkdb_flags(opts, flags, expected_return_value): assert opts["database"] == "rethinkdb" create_db_timeout = 15 rethinkdb_check_timeout = 10 # Create an empty database file create_result = run_rethinkdb(opts, ["--create", "--force"], create_db_timeout) if create_result is None: raise ValueError("Server took longer than %d seconds to create database." % server_create_time) if create_result[0] != 0: raise ValueError("Server failed to create database.") # Run RethinkDB with the specified flags rethinkdb_result = run_rethinkdb(opts, flags, rethinkdb_check_timeout) if expected_return_value is None and rethinkdb_result is not None: raise ValueError("RethinkDB did exit with a return value of %i, although it was not expected to. Flags were: %s" % (rethinkdb_result[0], flags)) else: if expected_return_value > 127: expected_return_value = 128 - expected_return_value # We have to wrap over it seems if rethinkdb_result[0] != expected_return_value: raise ValueError("RethinkDB gave a return value of %i, expected a value of %i. Flags were: %s" % (rethinkdb_result[0], expected_return_value, flags)) def test_function(opts, mc): exit_code_expected_on_error = 134 # That's what we get when abort() is called for (flags, expected_return_value) in [ (["-c", "string"], exit_code_expected_on_error), (["-c", "128"], exit_code_expected_on_error), (["-c", "1.5"], exit_code_expected_on_error), (["-c", "-1"], exit_code_expected_on_error), (["-m", "-1"], exit_code_expected_on_error), (["--active-data-extents", "65"], exit_code_expected_on_error), (["--active-data-extents", "0"], exit_code_expected_on_error), (["--active-data-extents", "-1"], exit_code_expected_on_error), (["--active-data-extents", "5.5"], exit_code_expected_on_error), (["--active-data-extents", "nonumber"], exit_code_expected_on_error), (["--flush-threshold", "-1"], exit_code_expected_on_error), (["--flush-threshold", "100"], exit_code_expected_on_error), (["--flush-threshold", "nonumber"], exit_code_expected_on_error), (["-p", "-1"], exit_code_expected_on_error), (["-p", "65536"], exit_code_expected_on_error), (["-p", "string"], exit_code_expected_on_error), (["-p", "5.5"], exit_code_expected_on_error), (["-f", "/"], exit_code_expected_on_error), (["-s", "-1"], exit_code_expected_on_error), (["-s", "0"], exit_code_expected_on_error), (["-s", "string"], exit_code_expected_on_error), (["-s", "5.5"], exit_code_expected_on_error), (["--block-size", "4096000000", "--extent-size", "4096000000"], exit_code_expected_on_error)]: print "Testing command line flags %s, expecting a return value of %i" % (flags, expected_return_value) check_rethinkdb_flags(opts, flags, expected_return_value) # TODO: Also check -l behvior on missing permissions and slashes in file name if __name__ == "__main__": op = make_option_parser() opts = op.parse(sys.argv) opts["mclib"] = "memcache" opts["auto"] = False # Never start server automatically simple_test_main(test_function, opts, timeout = 5)
Python
0.000001
@@ -2904,144 +2904,8 @@ se:%0A - if expected_return_value %3E 127:%0A expected_return_value = 128 - expected_return_value # We have to wrap over it seems%0A @@ -3197,55 +3197,57 @@ r = -134 # That's what +255 # we -get when abort() is called +return -1 given a bad command line input %0A%0A
988ed4be9152632b2844e962e225adac63d869db
Fix crash on windows
spd.py
spd.py
#!/usr/bin/env python3 import os import re import sys from subprocess import call from urllib.request import Request, urlopen def getWebPage(url): print("\ngetting: "+url) h = Request(url) h.add_header('User-Agent', 'SPD/1.0') webpage = urlopen(h).read() return str(webpage) # convert from bytes to string def getSubmittedPage(userName): return getWebPage('https://www.reddit.com/user/' + userName + '/submitted/') def downloadImage(link): print('downloading: ' + link) # open wget in the background call(['wget', '-b', '-N', '-o', '/dev/null', link]) def downloadImageGallery(link): webpage = getWebPage(link) link = link.replace('http:', 'https:') if re.search(r"gfycat\.com/", link): link = link.replace('gfycat', 'giant.gfycat') + '.gif' downloadImage(link) elif re.search(r"imgur\.com/", link): for image in getAllImageURLs(webpage): downloadImage(image) pass def isGallery(link): if re.match(r"https://(?:imgur\.com/|gfycat\.com/)", link): return True return False def getAllImageURLs(webpage): urlList = re.findall( r'src="//(i\.imgur\.com/[a-zA-Z0-9]{7}\.(?:[a-z]{3,4})(?:\?[0-9]+?)?)"', webpage) return urlList def getAllImages(webpage): for link in re.findall( "<a class=\"title may-blank ?\" href=\"(https?://" + "(?:gfycat\\.com/[a-zA-Z]+|" + "imgur\\.com/(?:[a-zA-Z0-9]{7}|a/[a-zA-Z0-9]{5})|" + "i\\.imgur\\.com/[a-zA-Z0-9]{7}\\.(?:[a-z]{3,4})(?:\?[0-9]+?)?))", webpage): link = link.replace('http:', 'https:') if isGallery(link): downloadImageGallery(link) else: downloadImage(link) def pageGetNextPage(webpage, userName): nextPage = re.findall( "(https?://www\\.reddit\\.com/user/" + userName + "/submitted/\\?count=[0-9]{2,4}&amp;after=t[0-9]_[a-z0-9]{6})", webpage) if not nextPage == []: return getWebPage(nextPage[0].replace('amp;', '')) else: return None userName = sys.argv[1] if not os.path.exists("~/Pictures/SPD/" + userName): os.makedirs("~/Pictures/SPD/" + userName) os.chdir("~/Pictures/SPD/" + userName) userSubmitted = getSubmittedPage(userName) getAllImages(userSubmitted) while True: userSubmitted = pageGetNextPage(userSubmitted, userName) if userSubmitted is None: break getAllImages(userSubmitted)
Python
0
@@ -49,16 +49,63 @@ rt sys%0A%0A +from platform import system as operatingSystem%0A from sub @@ -625,16 +625,149 @@ kground%0A + if operatingSystem() == 'Windows':%0A # NUL ~ /dev/null%0A call(%5B'wget', '-b', '-N', '-o', 'NUL', link%5D)%0A else:%0A call @@ -2343,15 +2343,77 @@ %5B1%5D%0A -%0A if -not +len(sys.argv) %3E 2:%0A basePath = sys.argv%5B2%5D%0Aelse:%0A basePath = os. @@ -2423,12 +2423,16 @@ h.ex -ists +panduser (%22~/ @@ -2437,32 +2437,66 @@ ~/Pictures/SPD/%22 +)%0A%0A%0Aif not os.path.exists(basePath + userName):%0A @@ -2509,33 +2509,24 @@ akedirs( -%22~/Pictures/SPD/%22 +basePath + userN @@ -2539,33 +2539,24 @@ s.chdir( -%22~/Pictures/SPD/%22 +basePath + userN
1e8094a187284961a380bea94bbc806aa4430a3d
fix to t2m.py
t2m.py
t2m.py
#! /usr/bin/env python # Requires python 2.7 import sys import socket import datetime import json import logging if os.path.exists('/vagrant'): logfilename = '/vagrant/.t2m.log' else: logfilename = '/tmp/.t2m.log' logging.basicConfig( filename=logfilename, level=logging.DEBUG) BUFSIZE = 1024 DEBUG = True def main(): if len(sys.argv) < 2: usage() try: server() except Exception, e: logging.critical("server() exited with exception %s" % str(e)) def usage(): sys.stdout = sys.stderr print('Usage: %s listen_ip udp_port' % sys.argv[0]) sys.exit(2) def debug(msg): if DEBUG: logging.debug(msg) def s_to_datetime(dts): try: dtobj = datetime.datetime.strptime(dts, "%d/%m/%Y %H:%M") return dtobj except ValueError: return False def parse_data(data): """ Quick n dirty data parsing. string -> datetime, string """ dtobj = None msg = None if ']' not in data: return dtobj, msg try: dts, msg = data.split(']') dts = dts.strip('[') if msg[0] != ' ': return dtobj, msg msg = msg[1:] dtobj = s_to_datetime(dts) except ValueError: pass return dtobj, msg def server(): if len(sys.argv) > 2: host = sys.argv[1] port = eval(sys.argv[2]) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind((host, port)) debug('udp server ready on %s:%s' % (host,port)) while 1: data, addr = s.recvfrom(BUFSIZE) debug('server received %r from %r' % (data, addr)) dt, msg = parse_data(data) if dt and msg: #send data back datadict = { "timestamp": (dt - datetime.datetime(1970,1,1)).total_seconds(), "message": msg } jsondata = json.dumps(datadict) debug(jsondata) print(jsondata) s.sendto(jsondata, addr) else: debug('invalid message received.') if __name__ == '__main__': main()
Python
0.999996
@@ -108,16 +108,27 @@ ogging%0A%0A +import os%0A%0A if os.pa
4557568d940aff86d269f7418b3519b5cd9ae322
Fix för Importfel: Deadline exceeded while waiting for HTTP response #58
scoutnet.py
scoutnet.py
# -*- coding: utf-8 -*- # cannot use requests library, doesn't work in gae! #import requests import urllib import urllib2 import cookielib import codecs import logging import sys import json from flask import render_template from google.appengine.api import mail from google.appengine.runtime import apiproxy_errors from google.appengine.ext.webapp.mail_handlers import BounceNotificationHandler from data import * def GetScoutnetMembersAPIJsonData(groupid, api_key): request = urllib2.Request('https://www.scoutnet.se/api/group/memberlist?id=' + groupid + '&key=' + api_key) response = urllib2.urlopen(request) # "let it throw, let it throw, let it throw..." return response.read() def GetValueFromJsonObject(p, key, value_name='value'): if key in p: return p[key][value_name] return '' def GetScoutnetDataListJson(json_data): j = json.loads(json_data) result = [] for pid in j['data']: p = j['data'][pid] m = {} m["group"] = GetValueFromJsonObject(p, 'group') m["group_id"] = GetValueFromJsonObject(p, 'group', 'raw_value') m["troop"] = GetValueFromJsonObject(p, 'unit') m["troop_id"] = GetValueFromJsonObject(p, 'unit', 'raw_value') m["id"] = int(pid) # must be int m["firstname"] = GetValueFromJsonObject(p, 'first_name') m["lastname"] = GetValueFromJsonObject(p, 'last_name') m["personnr"] = GetValueFromJsonObject(p, 'ssno') sex = GetValueFromJsonObject(p, 'sex') if sex == "Annat": continue # ignore non-persons m["female"] = sex != 'Man' m["patrool"] = GetValueFromJsonObject(p, 'patrol') m["active"] = GetValueFromJsonObject(p, 'status') == 'Aktiv' m["email"] = GetValueFromJsonObject(p, 'email') phone = FixCountryPrefix(GetValueFromJsonObject(p, 'contact_home_phone')) if phone == "": phone = FixCountryPrefix(GetValueFromJsonObject(p, 'contact_telephone_home')) # scoutnet has both "Telefon hem" and "Hemtelefon" pick one! m["phone"] = phone m["mobile"] = FixCountryPrefix(GetValueFromJsonObject(p, 'contact_mobile_phone')) m["street"] = GetValueFromJsonObject(p, 'address_1') m["zip_code"] = GetValueFromJsonObject(p, 'postcode') m["zip_name"] = GetValueFromJsonObject(p, 'town') m["troop_roles"] = filter(None, GetValueFromJsonObject(p, 'unit_role').lower().split(',')) m["group_roles"] = filter(None, GetValueFromJsonObject(p, 'group_role').lower().split(',')) result.append(m) return result def FixCountryPrefix(phone): if len(phone) > 8 and phone[0] != '0': # "46 31 123456" return '+' + phone return phone class ScoutnetException(Exception): pass class ContactFields(): Mobiltelefon=1 Hemtelefon=2 Alternativ_epost=9 Anhorig_1_namn=14 Anhorig_1_e_post=33 Anhorig_1_mobiltelefon=38 Anhorig_1_hemtelefon=43 Anhorig_2_namn=16 Anhorig_2_e_post=34 Anhorig_2_mobiltelefon=39 Anhorig_2_hemtelefon=44 def AddPersonToWaitinglist(scoutgroup, firstname, lastname, personnummer, emailaddress, address_line1, zip_code, zip_name, phone, mobile, troop): form = {} form['profile[first_name]']=firstname form['profile[last_name]']=lastname form['profile[ssno]']=personnummer form['profile[email]']=emailaddress form['profile[date_of_birth]']=personnummer[0:4] + '-' + personnummer[4:6] + '-' + personnummer[6:8] form['profile[sex]']='1' if int(personnummer[-2])&1 == 1 else '2' form['address_list[addresses][address_1][address_line1]']=address_line1 form['address_list[addresses][address_1][zip_code]']=zip_code form['address_list[addresses][address_1][zip_name]']=zip_name form['address_list[addresses][address_1][address_type]']=0 # 0=Hemadress, 1=Tillfällig adress form['address_list[addresses][address_1][country_code]']=752 # Sweden form['address_list[addresses][address_1][is_primary]']=1 form['profile[product_subscription_8]'] = 1 # Medlemstidningen form['contact_list[contacts][contact_1][details]']=mobile form['contact_list[contacts][contact_1][contact_type_id]']=ContactFields.Mobiltelefon form['contact_list[contacts][contact_2][details]']=phone form['contact_list[contacts][contact_2][contact_type_id]']=ContactFields.Hemtelefon form['membership[status]']=1 logging.info('Adding %s %s to waitinglist' % (firstname, lastname)) url = 'https://www.scoutnet.se/api/organisation/register/member?id=' + scoutgroup.scoutnetID + '&key=' + scoutgroup.apikey_waitinglist + '&' + urllib.urlencode(form) logging.info(url) request = urllib2.Request(url) try: response = urllib2.urlopen(request) except urllib2.HTTPError as e: result_json = e.read() logging.error("Failed to add person, code=%d, msg=%s", e.code, result_json) # Typical responses: """{"profile":[{"key":"ssno","value":null,"msg":"Personnumret \u00e4r redan registrerat p\u00e5 medlem '######'. Kontakta Scouternas kansli p\u00e5 scoutnet@scouterna.se f\u00f6r att f\u00e5 hj\u00e4lp."}]}""" """{"contact_list":[{"key":"contact_17","value":"karin.modig-pallin@vgregion.se","subkey":"contact_type_id","msg":"Invalid. Please choose contact type"}]}""" #j = json.loads(result_json) raise ScoutnetException(result_json.decode('unicode_escape')) # display the raw json message return False if 200 <= response.getcode() <= 201: result_json = response.read() logging.info("Added person: " + result_json) sendRegistrationQueueInformationEmail(scoutgroup) return True def sendRegistrationQueueInformationEmail(scoutgroup): try: message = mail.EmailMessage( sender="noreply@skojjt.appspotmail.com", subject=u"Ny person i scoutnets kölista", body=render_template("email_queueinfo.txt", scoutgroup=scoutgroup) ) user=UserPrefs.current() message.to=user.getemail() message.send() except apiproxy_errors.OverQuotaError as message: # Log the error. logging.error(message)
Python
0
@@ -607,16 +607,28 @@ (request +, timeout=25 ) # %22let
4a5a20835e75a0096287064487ab4885e7c03d9f
fix overlapping name, cmdline test
bin/tools/amount_to_text_en.py
bin/tools/amount_to_text_en.py
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## #------------------------------------------------------------- #ENGLISH #------------------------------------------------------------- from tools.translate import _ to_19 = ( 'Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen', 'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen' ) tens = ( 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety') denom = ( '', 'Thousand', 'Million', 'Billion', 'Trillion', 'Quadrillion', 'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion', 'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion', 'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' ) # convert a value < 100 to English. def _convert_nn(val): if val < 20: return to_19[val] for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens)): if dval + 10 > val: if val % 10: return dcap + '-' + to_19[val % 10] return dcap # convert a value < 1000 to english, special cased because it is the level that kicks # off the < 100 special case. The rest are more general. This also allows you to # get strings in the form of 'forty-five hundred' if called directly. def _convert_nnn(val): word = '' (mod, rem) = (val % 100, val // 100) if rem > 0: word = to_19[rem] + ' Hundred' if mod > 0: word = word + ' ' if mod > 0: word = word + _convert_nn(mod) return word def english_number(val): if val < 100: return _convert_nn(val) if val < 1000: return _convert_nnn(val) for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom))): if dval > val: mod = 1000 ** didx l = val // mod r = val - (l * mod) ret = _convert_nnn(l) + ' ' + denom[didx] if r > 0: ret = ret + ', ' + english_number(r) return ret def amount_to_text(number, currency): number = '%.2f' % number units_name = currency list = str(number).split('.') start_word = english_number(int(list[0])) end_word = english_number(int(list[1])) cents_number = int(list[1]) cents_name = (cents_number > 1) and 'Cents' or 'Cent' final_result = start_word +' '+units_name+' and ' + end_word +' '+cents_name return final_result #------------------------------------------------------------- # Generic functions #------------------------------------------------------------- _translate_funcs = {'en' : amount_to_text} #TODO: we should use the country AND language (ex: septante VS soixante dix) #TODO: we should use en by default, but the translation func is yet to be implemented def amount_to_text(nbr, lang='en', currency='euro'): """ Converts an integer to its textual representation, using the language set in the context if any. Example: 1654: thousands six cent cinquante-quatre. """ import netsvc # if nbr > 10000000: # netsvc.Logger().notifyChannel('translate', netsvc.LOG_WARNING, _("Number too large '%d', can not translate it")) # return str(nbr) if not _translate_funcs.has_key(lang): netsvc.Logger().notifyChannel('translate', netsvc.LOG_WARNING, _("no translation function found for lang: '%s'" % (lang,))) #TODO: (default should be en) same as above lang = 'en' return _translate_funcs[lang](abs(nbr), currency) if __name__=='__main__': from sys import argv lang = 'nl' if len(argv) < 2: for i in range(1,200): print i, ">>", int_to_text(i, lang) for i in range(200,999999,139): print i, ">>", int_to_text(i, lang) else: print int_to_text(int(argv[1]), lang)
Python
0.000603
@@ -3136,24 +3136,25 @@ rn ret%0A%0Adef +_ amount_to_te @@ -3155,16 +3155,19 @@ _to_text +_en (number, @@ -3728,16 +3728,17 @@ %7B'en' : +_ amount_t @@ -3743,16 +3743,19 @@ _to_text +_en %7D%0A %0A# @@ -4789,33 +4789,36 @@ print i, %22%3E%3E%22, -i +amou nt_to_text(i, la @@ -4888,17 +4888,20 @@ , %22%3E%3E%22, -i +amou nt_to_te @@ -4936,17 +4936,20 @@ print -i +amou nt_to_te
96f93e39ad12d893d8672dd2bb2abea4e6020799
update the file name in the send file to uri
bin/utils/sftp_transactions.py
bin/utils/sftp_transactions.py
import pysftp as sftp import sys import os from email_transactions import email_transactions import paramiko # This addresses the issues with relative paths file_dir = os.path.dirname(os.path.realpath(__file__)) goal_dir = os.path.join(file_dir, "../") proj_root = os.path.abspath(goal_dir)+'/' sys.path.insert(0, proj_root+'bin') class sftp_transactions: """A class for handling the sftp transactions. This class contains functions for getting a file from sftp server and putting a file to a sftp server""" def __init__(self): self.data = [] def send_file_to_uri(self, site_URI, uname, password, remotepath, localpath, contact_email): '''This function puts the specified file to the given uri. Authentication is done using the uname and password remotepath - the path where the file needs to be put localpath - the path where the file is picked from contact_email - the email of the concerned authority to mail to incase of failed transaction ''' # make a connection with uri and credentials bridge = paramiko.Transport((site_URI, 22)) bridge.connect(username = uname, password = password) connect = paramiko.SFTPClient.from_transport(bridge) # import here to eliminate circular dependancy try: connect.chdir(remotepath) except IOError: connect.mkdir(remotepath) connect.chdir(remotepath) try: # put the file at the designated location in the server connect.put(localpath, remotepath+'smi.xml') # close the connection connect.close() except Exception, e: # closing the connection incase there is any exception connect.close() '''Report should be sent to the concerned authority with the error message ''' print 'Error sending file to '+site_URI print 'Check the credentials/remotepath/localpath/Server URI' email_transactions().send_mail('please-do-not-reply@ufl.edu', contact_email, str(e)) print str(e) pass def get_file_from_uri(self, site_URI, uname, password, remotepath, localpath, contact_email): '''This function gets the specified file to the given uri. Authentication is done using the uname and password remotepath - the path where the file needs to be put localpath - the path where the file is picked from contact_email - the email of the concerned authority to mail to incase of failed transaction ''' # make a connection with uri and credentials connect = sftp.Connection(host=site_URI, username=uname, password=password) try: # get the file from the designated location in the server connect.get(remotepath, localpath) # close the connection connect.close() except Exception, e: # closing the connection incase there is any exception connect.close() '''Report should be sent to the concerned authority with the error message ''' email_transactions().send_mail('please-do-not-reply@ufl.edu', contact_email, str(e)) print str(e) pass
Python
0.000001
@@ -564,20 +564,16 @@ ta = %5B%5D%0A - %0A def @@ -626,32 +626,43 @@ ord, remotepath, + file_name, localpath, cont @@ -1025,33 +1025,25 @@ transaction%0A - %0A + '''%0A @@ -1270,25 +1270,17 @@ bridge)%0A - %0A + @@ -1608,17 +1608,17 @@ ath+ -'smi.xml' +file_name )%0A @@ -2637,17 +2637,9 @@ ion%0A - %0A + @@ -3330,28 +3330,28 @@ print str(e)%0A - pass @@ -3330,28 +3330,29 @@ print str(e)%0A pass +%0A
0d4c041d239e7d7ed234f359ae483523b05e367b
correct the 'View Credentials' icon
openstack_dashboard/dashboards/project/access_and_security/api_access/tables.py
openstack_dashboard/dashboards/project/access_and_security/api_access/tables.py
# Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.template.defaultfilters import title # noqa from django.utils.translation import ugettext_lazy as _ from openstack_auth import utils from horizon import tables from openstack_dashboard import api from openstack_dashboard.dashboards.project.access_and_security.api_access \ import forms as project_forms from openstack_dashboard import policy def pretty_service_names(name): name = name.replace('-', ' ') if name in ['ec2', 's3']: name = name.upper() else: name = title(name) return name class DownloadEC2(tables.LinkAction): name = "download_ec2" verbose_name = _("Download EC2 Credentials") verbose_name_plural = _("Download EC2 Credentials") icon = "download" url = "horizon:project:access_and_security:api_access:ec2" policy_rules = (("compute", "compute_extension:certificates"),) def allowed(self, request, datum=None): return api.base.is_service_enabled(request, 'ec2') class DownloadOpenRC(tables.LinkAction): name = "download_openrc" verbose_name = _("Download OpenStack RC File v3") verbose_name_plural = _("Download OpenStack RC File v3") icon = "download" url = "horizon:project:access_and_security:api_access:openrc" def allowed(self, request, datum=None): return utils.get_keystone_version() >= 3 class DownloadOpenRCv2(tables.LinkAction): name = "download_openrc_v2" verbose_name = _("Download OpenStack RC File v2.0") verbose_name_plural = _("Download OpenStack RC File v2.0") icon = "download" url = "horizon:project:access_and_security:api_access:openrcv2" class ViewCredentials(tables.LinkAction): name = "view_credentials" verbose_name = _("View Credentials") classes = ("ajax-modal", ) icon = "plus" url = "horizon:project:access_and_security:api_access:view_credentials" class RecreateCredentials(tables.LinkAction): name = "recreate_credentials" verbose_name = _("Recreate EC2 Credentials") classes = ("ajax-modal",) icon = "refresh" url = \ "horizon:project:access_and_security:api_access:recreate_credentials" policy_rules = (("compute", "compute_extension:certificates")) action_type = "danger" def allowed(self, request, datum=None): try: target = {"target.credential.user_id": request.user.id} if (api.base.is_service_enabled(request, 'ec2') and project_forms.get_ec2_credentials(request) and policy.check((("identity", "identity:ec2_create_credential"), ("identity", "identity:ec2_delete_credential")), request, target=target)): return True except Exception: pass return False class EndpointsTable(tables.DataTable): api_name = tables.Column('type', verbose_name=_("Service"), filters=(pretty_service_names,)) api_endpoint = tables.Column('public_url', verbose_name=_("Service Endpoint")) class Meta(object): name = "endpoints" verbose_name = _("API Endpoints") multi_select = False table_actions = (DownloadOpenRCv2, DownloadOpenRC, DownloadEC2, ViewCredentials, RecreateCredentials)
Python
0.000003
@@ -2383,12 +2383,11 @@ = %22 -plus +eye %22%0A
402e9515419f0db7f449eac9f810389a4608b878
Comment out venue for now, since we don't have one yet
settings.py
settings.py
# -*- encoding: utf-8 -*- import os from wafer.settings import * try: from localsettings import * except ImportError: pass from django.utils.translation import ugettext_lazy as _ from django.core.urlresolvers import reverse_lazy pyconzadir = os.path.dirname(__file__) STATICFILES_DIRS = ( os.path.join(pyconzadir, 'static'), os.path.join(pyconzadir, 'bower_components'), ) TEMPLATE_DIRS = ( os.path.join(pyconzadir, 'templates'), ) + TEMPLATE_DIRS WAFER_MENUS += ( {"menu": "about", "label": _("About"), "items": []}, {"name": "venue", "label": _("Venue"), "url": reverse_lazy("wafer_page", args=("venue",))}, {"menu": "sponsors", "label": _("Sponsors"), "items": [ {"name": "sponsors", "label": _("Our sponsors"), "url": reverse_lazy("wafer_sponsors")}, {"name": "packages", "label": _("Sponsorship packages"), "url": reverse_lazy("wafer_sponsorship_packages")}, ]}, {"menu": "talks", "label": _("Talks"), "items": [ {"name": "schedule", "label": _("Schedule"), "url": reverse_lazy("wafer_full_schedule")}, {"name": "schedule-next-up", "label": _("Next up"), # Once conference has started use: # "url": reverse_lazy("wafer_current")}, "url": "/schedule/current/?day=2015-10-01&time=08:30"}, # {"name": "accepted-talks", "label": _("Accepted Talks"), # "url": reverse_lazy("wafer_users_talks")}, ]}, {"menu": "events", "label": _("News"), "items": []}, {"menu": "previous-pycons", "label": _("Past PyConZAs"), "items": [ {"name": "pyconza2012", "label": _("PyConZA 2012"), "url": "http://2012.za.pycon.org/"}, {"name": "pyconza2013", "label": _("PyConZA 2013"), "url": "http://2013.za.pycon.org/"}, {"name": "pyconza2014", "label": _("PyConZA 2014"), "url": "http://2014.za.pycon.org/"}, ]}, {"name": "twitter", "label": "Twitter", "image": "/static/img/twitter.png", "url": "https://twitter.com/pyconza"}, {"name": "googleplus", "label": "Google+", "image": "/static/img/googleplus.png", "url": "https://plus.google.com/u/0/events/cpnt1h6tf6m59k8i4uvhmrvguis"}, {"name": "facebook", "label": "Facebook", "image": "/static/img/facebook.png", "url": "https://www.facebook.com/events/1467196980209896/"}, ) CRISPY_TEMPLATE_PACK = 'bootstrap3' MARKITUP_FILTER = ('markdown.markdown', { 'safe_mode': False, 'extensions': ['outline'], }) # Use HTTPS jquery URL so it's accessible on HTTPS pages (e.g. editing a talk) JQUERY_URL = 'https://ajax.googleapis.com/ajax/libs/jquery/2.0.3/jquery.min.js'
Python
0
@@ -545,32 +545,33 @@ tems%22: %5B%5D%7D,%0A +# %7B%22name%22: %22venue%22 @@ -593,24 +593,25 @@ enue%22),%0A +# %22url%22: reve
39586322784382e9dfd4a961bda4253bb27bca5f
Add support for the django debug toolbar
settings.py
settings.py
# Django settings for authentic project. import os DEBUG = True TEMPLATE_DEBUG = DEBUG PROJECT_PATH = os.path.dirname(os.path.abspath(__file__)) ADMINS = ( # ('Your Name', 'your_email@domain.com'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'authentic.db', # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'Europe/Paris' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale USE_L10N = True # Absolute path to the directory that holds media. # Example: "/home/media/media.lawrence.com/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash if there is a path component (optional in other cases). # Examples: "http://media.lawrence.com", "http://example.com/media/" MEDIA_URL = '' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". ADMIN_MEDIA_PREFIX = '/media/' # Make this unique, and don't share it with anybody. SECRET_KEY = '0!=(1kc6kri-ui+tmj@mr+*0bvj!(p*r0duu2n=)7@!p=pvf9n' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.csrf.middleware.CsrfMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) ROOT_URLCONF = 'authentic.urls' TEMPLATE_DIRS = ( os.path.join(PROJECT_PATH, 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.admin', 'authentic.idp' )
Python
0
@@ -59,16 +59,41 @@ = True%0A +USE_DEBUG_TOOLBAR = True%0A TEMPLATE @@ -3260,8 +3260,182 @@ .idp'%0A)%0A +%0AINTERNAL_IPS = ('127.0.0.1',)%0A%0Aif USE_DEBUG_TOOLBAR:%0A MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)%0A INSTALLED_APPS += ('debug_toolbar',)%0A
0a019a6d23b88a49dce081c743cfb46fe568ba6d
Remove brute force protection
settings.py
settings.py
# Django settings for Sal project. from system_settings import * from settings_import import ADMINS, TIME_ZONE, LANGUAGE_CODE, ALLOWED_HOSTS, DISPLAY_NAME, DEFAULT_MACHINE_GROUP_KEY,DEBUG, BRUTE_COOLOFF, BRUTE_LIMIT from os import path import saml2 from saml2.saml import NAMEID_FORMAT_PERSISTENT import logging import sys SAML_DJANGO_USER_MAIN_ATTRIBUTE = 'email' SAML_USE_NAME_ID_AS_USERNAME = True SAML_CREATE_UNKNOWN_USER = True SAML_ATTRIBUTE_MAPPING = { 'uid': ('username', ), 'mail': ('email', ), 'cn': ('first_name', ), 'sn': ('last_name', ), } if DEBUG == True: root = logging.getLogger() root.setLevel(logging.DEBUG) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) root.addHandler(ch) INSTALLED_APPS += ('djangosaml2',) AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', 'djangosaml2.backends.Saml2Backend', ) LOGIN_URL = '/saml2/login/' SESSION_EXPIRE_AT_BROWSER_CLOSE = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': os.path.join(PROJECT_DIR, 'db/sal.db'), # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } if os.environ.has_key('MEMCACHED_PORT_11211_TCP_ADDR'): CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': [ '%s:%s' % (os.environ['MEMCACHED_PORT_11211_TCP_ADDR'], os.environ['MEMCACHED_PORT_11211_TCP_PORT']), ] } } if BRUTE_PROTECT==True: INSTALLED_APPS+= ('axes',) MIDDLEWARE_CLASSES+=('axes.middleware.FailedLoginMiddleware',) # PG Database host = None port = None if os.environ.has_key('DB_HOST'): host = os.environ.get('DB_HOST') port = os.environ.get('DB_PORT') elif os.environ.has_key('DB_PORT_5432_TCP_ADDR'): host = os.environ('DB_PORT_5432_TCP_ADDR') port = os.environ.get('DB_PORT_5432_TCP_PORT', '5432') if host and port: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': os.environ['DB_NAME'], 'USER': os.environ['DB_USER'], 'PASSWORD': os.environ['DB_PASS'], 'HOST': host, 'PORT': port, } } BASEDIR = path.dirname(path.abspath(__file__)) SAML_CONFIG = { # full path to the xmlsec1 binary programm 'xmlsec_binary': '/usr/bin/xmlsec1', # your entity id, usually your subdomain plus the url to the metadata view 'entityid': 'http://YOU/saml2/metadata/', # directory with attribute mapping 'attribute_map_dir': path.join(BASEDIR, 'attributemaps'), # this block states what services we provide 'service': { # we are just a lonely SP 'sp' : { 'authn_requests_signed': False, "allow_unsolicited": True, 'want_assertions_signed': True, 'allow_unknown_attributes': True, 'name': 'Federated Django sample SP', 'name_id_format': NAMEID_FORMAT_PERSISTENT, 'endpoints': { # url and binding to the assetion consumer service view # do not change the binding or service name 'assertion_consumer_service': [ ('http://YOU/saml2/acs/', saml2.BINDING_HTTP_POST), ], # url and binding to the single logout service view # do not change the binding or service name 'single_logout_service': [ ('http://YOU/saml2/ls/', saml2.BINDING_HTTP_REDIRECT), ('http://YOU/saml2/ls/post', saml2.BINDING_HTTP_POST), ], }, # attributes that this project need to identify a user 'required_attributes': ['uid'], # attributes that may be useful to have but not required # 'optional_attributes': ['eduPersonAffiliation'], # in this section the list of IdPs we talk to are defined 'idp': { # we do not need a WAYF service since there is # only an IdP defined here. This IdP should be # present in our metadata # the keys of this dictionary are entity ids 'https://YOURID': { 'single_sign_on_service': { saml2.BINDING_HTTP_REDIRECT: 'https://YOURSSOURL', }, 'single_logout_service': { saml2.BINDING_HTTP_REDIRECT: 'https://YOURSLOURL', }, }, }, }, }, # where the remote metadata is stored 'metadata': { 'local': [path.join(BASEDIR, 'metadata.xml')], }, # set to 1 to output debugging information 'debug': 1, # certificate # 'key_file': path.join(BASEDIR, 'mycert.key'), # private part # 'cert_file': path.join(BASEDIR, 'mycert.pem'), # public part # own metadata settings # 'contact_person': [ # {'given_name': 'Lorenzo', # 'sur_name': 'Gil', # 'company': 'Yaco Sistemas', # 'email_address': 'lgs@yaco.es', # 'contact_type': 'technical'}, # {'given_name': 'Angel', # 'sur_name': 'Fernandez', # 'company': 'Yaco Sistemas', # 'email_address': 'angel@yaco.es', # 'contact_type': 'administrative'}, # ], # you can set multilanguage information here # 'organization': { # 'name': [('Someone', 'en'), # 'display_name': [('Someone', 'en')], # 'url': [('http://www.someone.com', 'en')], # }, 'valid_for': 24, # how long is our metadata valid }
Python
0.000054
@@ -184,36 +184,8 @@ EBUG -, BRUTE_COOLOFF, BRUTE_LIMIT %0Afro @@ -2017,131 +2017,8 @@ %7D%0A%0A -if BRUTE_PROTECT==True:%0A INSTALLED_APPS+= ('axes',)%0A MIDDLEWARE_CLASSES+=('axes.middleware.FailedLoginMiddleware',)%0A%0A # PG
145c31a283bb458bdce72169ea16f07040236ee5
add comment about settings.py
settings.py
settings.py
SECRET_KEY = 'x' DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'travis_ci_test', } } INSTALLED_APPS=( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.gis', 'boundaries', )
Python
0
@@ -1,8 +1,95 @@ +%22%22%22%0ATo run %60django-admin.py syncdb --settings settings --noinput%60 before testing.%0A%22%22%22%0A%0A SECRET_K
e2f8175fd53893b4af47f40e02a3f721b1882cd1
Fix bug in calling function
tests/convergence_tests/convergence_lspr.py
tests/convergence_tests/convergence_lspr.py
import re import os import sys import time import numpy import pickle import datetime try: import pycuda except ImportError: ans = input('PyCUDA not found. Regression tests will take forever. Do you want to continue? [y/n] ') if ans in ['Y', 'y']: pass else: sys.exit() from pygbe.main import main as pygbe mesh = ['500', '2K', '8K', '32K'] mesh_multiple = ['500-100', '2K-500', '8K-2K', '32K-8K'] def picklesave(test_outputs): with open('tests.pickle','wb') as f: pickle.dump(test_outputs, f, 2) def pickleload(): with open('tests.pickle', 'rb') as f: test_outputs = pickle.load(f) return test_outputs def mesh_ratio(N): """ Calculates the mesh refinement ratio between consecutive meshes. Arguments: ---------- N: list, Number of elements / avg_density in test (depends on what applies). Returns: -------- mesh_ratio: list of float, mesh refinement ratio between consequtive meshes. """ mesh_ratio = [] for i in range(len(N)-1): mesh_ratio.append(N[i+1]/N[i]) return mesh_ratio def report_results(error, N, expected_rate, iterations, Cext_0, total_time, analytical=None, rich_extra=None, avg_density=None, test_name=None): """ Prints out information for the convergence tests. Inputs: ------- error : list of float, error of the calculation per mesh case. N : list , Number of elements in test. expected_rate: float, expected error rate acording to mesh refinement. iterations : list of int, Number of iterations to converge. Cext_0 : float, Cross extinction section of the main sphere. total_time : list of float, total wall time of run i. analytical : float, analytical solution of the Cross extinction section (when applicable). rich_extra : float, richardson extrapolation solution of the Cross extinction section (when applicable). avg_density : list, avegerage density per mesh, N_total/total_Area (when applicable). test_name : str, name you want to assign to the test in report. """ with open('convergence_test_results', 'a') as f: print('-' * 60, file=f) print('{:-^60}'.format('Running: ' + test_name), file=f) print('-' * 60, file=f) print(datetime.datetime.now(), file=f) flag = 0 for i in range(len(error)-1): rate = error[i]/error[i+1] if abs(rate-expected_rate)>0.4: flag = 1 print('Bad convergence for mesh {} to {}, with rate {}'. format(i, i+1, rate), file=f) if flag==0: print('Passed convergence test!', file=f) print('\nNumber of elements : {}'.format(N), file=f) if avg_density: print('Average density, elem/nm^2 : {}'.format(avg_density), file=f) print('Number of iteration: {}'.format(iterations), file=f) print('Cross extinction section surface 0 nm^2: {}'.format(Cext_0), file=f) if analytical: print('Analytical solution: {} nm^2'.format(analytical), file=f) if rich_extra: print('Richardson extrapolation solution: {} nm^2'.format(rich_extra), file=f) print('Error : {}'.format(error), file=f) print('Total time : {}'.format(total_time), file=f) def run_convergence(mesh, test_name, problem_folder, param, total_Area=None): """ Runs convergence tests over a series of mesh sizes Inputs: ------ mesh : array of mesh suffixes problem_folder: str, name of folder containing meshes, etc... param : str, name of param file total_Area : float, total area of the meshes involved. (Provide when avg_density needed for convergence test) Returns: ------- N : len(mesh) array, elements of problem. iterations: len(mesh) array, number of iterations to converge. Cext_0 : len(mesh) array of float, Cross extinction section of the main sphere. Time : len(mesh) array of float, time to solution (wall-time) """ print('Runs lspr case of silver sphere in water medium') N = numpy.zeros(len(mesh)) iterations = numpy.zeros(len(mesh)) avg_density = ['NA']*4 Cext_0 = numpy.zeros(len(mesh)) Time = numpy.zeros(len(mesh)) for i in range(len(mesh)): try: print('Start run for mesh '+mesh[i]) results = pygbe(['', '-p', '{}'.format(param), '-c', '{}_{}.config'.format(test_name, mesh[i]), '-o', 'output_{}_{}'.format(test_name, mesh[i]), '-g', './', '{}'.format(problem_folder),], return_results_dict=True) N[i] = results['total_elements'] if total_Area: avg_density[i] = results['total_elements']/total_Area iterations[i] = results['iterations'] Cext_0[i] = results.get('Cext_0') #We do convergence analysis in the main sphere Time[i] = results['total_time'] except (pycuda._driver.MemoryError, pycuda._driver.LaunchError) as e: print('Mesh {} failed due to insufficient memory.' 'Skipping this test, but convergence test should still complete'.format(mesh[i])) time.sleep(4) if total_Area: mesh_rate = mesh_ratio(avg_density) expected_rate = 0 else: mesh_rate = mesh_ratio(N) expected_rate = 0 if all(ratio==mesh_rate[0] for ratio in mesh_rate): expected_rate = mesh_rate[0] else: print('Mesh ratio inconsistency. \nCheck that the mesh ratio' 'remains constant along refinement' 'Convergence test report will bad convergence for this reason') return(N, avg_density, iterations, expected_rate, Cext_0, Time) def richardson_extrapolation_lspr(test_result): """ Performs an estimate of the exact solution using Richardson extrapolation, given by f_ex = (f_1 * f_3 - f_2^2) / (f_3 - 2*f_2+f_1) where f_1 is a result from the finest grid and f_3 is from the coarsest. The grids f_1, f_2, f_3 should have the same refinement ratio (e.g. 2 -> 4 -> 8) Arguments: ---------- test_result: Returns: -------- f_ex : float, richardson_extrapolation estimated exact solution. """ #We perform the richardson extrapolation in the main body. The body we #meassure try: Cext_0 = compiled_results['Cext_0'] except KeyError: print('No results found for main body cross extinction section \n' 'Something has gone wrong.') sys.exit() # assuming 4 runs f1 = Cext_0[3] f2 = Cext_0[2] f3 = Cext_0[1] f_ex = (f1 * f3 - f2**2) / (f3 - 2 * f2 + f1) return f_ex
Python
0.000004
@@ -6848,24 +6848,19 @@ 0 = -compiled +test _result -s %5B'Ce
4a3aa9e539d1ebad025f3d5978f6b8185c37a7ae
Use built-in assertions
tests/e2e/data/services/test_bq_datasets.py
tests/e2e/data/services/test_bq_datasets.py
import unittest import pandas import geopandas from shapely import wkt import uuid from cartoframes.data.services import BQUserDataset, BQJob from io import StringIO EXPECTED_CSV_SAMPLE = """state_fips_code,county_fips_code,geo_id,tract_name,internal_point_geo 60,10,60010950100,9501.0,POINT (-170.5618796 -14.2587411) 60,10,60010950200,9502.0,POINT (-170.5589852 -14.2859572) 60,10,60010950300,9503.0,POINT (-170.6310985 -14.2760947) 60,10,60010950500,9505.0,POINT (-170.6651925 -14.2713653) 60,10,60010950600,9506.0,POINT (-170.701028 -14.252446) """ class TestBQDataset(unittest.TestCase): def test_can_upload_from_dataframe(self): sample = StringIO(EXPECTED_CSV_SAMPLE) df = pandas.read_csv(sample) unique_table_name = 'cf_test_table_' + str(uuid.uuid4()).replace('-', '_') BQUserDataset.name(unique_table_name).upload(df) def test_can_upload_from_file_object(self): file_object = StringIO(EXPECTED_CSV_SAMPLE) unique_table_name = 'cf_test_table_' + str(uuid.uuid4()).replace('-', '_') BQUserDataset.name(unique_table_name).upload_file_object(file_object) # TODO: it needs the create_dataset method to be able to import a datase from GCS to BQ def test_can_import_a_dataset(self): file_object = StringIO(EXPECTED_CSV_SAMPLE) unique_table_name = 'cf_test_table_' + str(uuid.uuid4()).replace('-', '_') BQUserDataset.name(unique_table_name).upload_file_object(file_object) job = BQUserDataset.name(unique_table_name).import_dataset() self.assertIsInstance(job, BQJob) # TODO: it needs the create_dataset method to be able to import a datase from GCS to BQ def test_can_get_status_from_import(self): file_object = StringIO(EXPECTED_CSV_SAMPLE) unique_table_name = 'cf_test_table_' + str(uuid.uuid4()).replace('-', '_') BQUserDataset.name(unique_table_name).upload_file_object(file_object) job = BQUserDataset.name(unique_table_name).import_dataset() status = job.status() self.assertIn(status, ['done', 'running', 'waiting', 'failed']) # TODO: it needs the create_dataset method to be able to import a datase from GCS to BQ def test_can_wait_for_job_completion(self): file_object = StringIO(EXPECTED_CSV_SAMPLE) unique_table_name = 'cf_test_table_' + str(uuid.uuid4()).replace('-', '_') BQUserDataset.name(unique_table_name).upload_file_object(file_object) job = BQUserDataset.name(unique_table_name).import_dataset() status = job.result() self.assertEqual(status, 'done') def test_can_upload_a_dataframe_and_wait_for_completion(self): sample = StringIO(EXPECTED_CSV_SAMPLE) df = pandas.read_csv(sample) unique_table_name = 'cf_test_table_' + str(uuid.uuid4()).replace('-', '_') status = BQUserDataset.name(unique_table_name).upload_dataframe(df) self.assertEqual(status, 'done') def test_can_download_to_dataframe(self): result = BQUserDataset.name('census_tracts_american_samoa').download_stream() df = pandas.read_csv(result) self.assertEqual(df.shape, (18, 13)) # do some checks on the contents sample = pandas.DataFrame( df.head(), columns=( 'state_fips_code', 'county_fips_code', 'geo_id', 'tract_name', 'internal_point_geo' ) ) sample['internal_point_geo'] = df['internal_point_geo'].apply(wkt.loads) geosample = geopandas.GeoDataFrame(sample, geometry='internal_point_geo') self.assertEqual(geosample.to_csv(index=False), EXPECTED_CSV_SAMPLE)
Python
0.999306
@@ -2570,37 +2570,34 @@ self.assert -Equal +In (status, 'done') @@ -2581,38 +2581,50 @@ ssertIn(status, +%5B 'done' +, 'failed'%5D )%0A%0A def test_ @@ -2939,21 +2939,18 @@ f.assert -Equal +In (status, @@ -2950,22 +2950,34 @@ status, +%5B 'done' +, 'failed'%5D )%0A%0A d
417ab5241c852cdcd072143bc2444f20f2117623
Update capture profiler to new spec of providing board instead of string.
tests/execution_profiles/capture_profile.py
tests/execution_profiles/capture_profile.py
import cProfile from pqhelper import capture def main(): cProfile.run('test_solution(catapult)') def test_solution(board_string): print capture.capture(board_string) skeleton = ''' ..*..*.. .gm..mg. .ms..sm. .rs..sr. .ggmmgg. .rsggsr. .rsrrsr. ssgssgss''' giant_rat = ''' ...mm... ..mrym.. .mgyrgm. mygrygym ryxssxyr rxgbbgxr xygssgyx rybssbyr''' griffon = ''' .r..s... .b.sy... .b.yys.. .r.yxg.g .g.x*b.r .g.xxb.r rybyxygy ygyxybyr''' catapult = ''' ........ ........ ..mbgm.. .mxgbrm. my*gb*gm yrrxrxxg ymxyyrmg ssxssrss''' easy = ''' ........ ........ ........ ........ .......x ....xx.r ....rr.r ..rryyry''' if __name__ == '__main__': main()
Python
0
@@ -132,16 +132,56 @@ tring):%0A + board = capture.Board(board_string)%0A prin @@ -203,23 +203,16 @@ re(board -_string )%0A%0Askele
5829afb4345d09a04fca61e51624f580a95c408d
remove None.appspot.com from settings.PLAYGROUND_HOSTS
settings.py
settings.py
"""Module containing global playground constants and functions.""" import os from google.appengine.api import app_identity from google.appengine.api import backends import appids import secret DEBUG = True # user content hostname prefix USER_CONTENT_PREFIX = 'user-content' # RFC1113 formatted 'Expires' to prevent HTTP/1.0 caching LONG_AGO = 'Mon, 01 Jan 1990 00:00:00 GMT' # 10 minutes TEMPLATE_MEMCACHE_TIME = 3600 # owners of template projects PUBLIC_PROJECT_TEMPLATE_OWNER = 'PUBLIC_TEMPLATE' MANUAL_PROJECT_TEMPLATE_OWNER = 'MANUAL_TEMPLATE' PROJECT_TEMPLATE_OWNERS = [ PUBLIC_PROJECT_TEMPLATE_OWNER, MANUAL_PROJECT_TEMPLATE_OWNER ] # whether or not we're running in the dev_appserver _DEV_MODE = os.environ['SERVER_SOFTWARE'].startswith('Development/') # namespace for playground specific data PLAYGROUND_NAMESPACE = '_playground' # template projects location TEMPLATE_PROJECT_DIR = 'repos/' # project access_key query parameter name ACCESS_KEY_SET_COOKIE_PARAM_NAME = 'set_access_key_cookie' ACCESS_KEY_HTTP_HEADER = 'X-Cloud-Playground-Access-Key' ACCESS_KEY_COOKIE_NAME = 'access_key' ACCESS_KEY_COOKIE_ARGS = { 'httponly': True, 'secure': not _DEV_MODE, } # name for the session cookie SESSION_COOKIE_NAME = 'session' SESSION_COOKIE_ARGS = { 'httponly': True, 'secure': not _DEV_MODE, } XSRF_COOKIE_ARGS = { 'httponly': False, 'secure': not _DEV_MODE, } WSGI_CONFIG = { 'webapp2_extras.sessions': { 'secret_key': secret.GetSecret('webapp2_extras.sessions', entropy=128), 'cookie_args': SESSION_COOKIE_ARGS, } } # One hour MIN_EXPIRATION_SECONDS = 3600 # One week DEFAULT_EXPIRATION_SECONDS = 604800 # Extensions to exclude when creating template projects SKIP_EXTENSIONS = ('swp', 'pyc', 'svn') if _DEV_MODE: PLAYGROUND_HOSTS = ('localhost:8080', '127.0.0.1:8080', # port 7070 for karma e2e test 'localhost:7070', '127.0.0.1:7070', app_identity.get_default_version_hostname()) # PLAYGROUND_USER_CONTENT_HOST = backends.get_hostname('user-content-backend') PLAYGROUND_USER_CONTENT_HOST = None MIMIC_HOST = backends.get_hostname('exec-code-backend') else: PLAYGROUND_HOSTS = ('{}.appspot.com'.format(appids.PLAYGROUND_APP_ID), '{}.appspot.com'.format(appids.PLAYGROUND_APP_ID_ALIAS)) # PLAYGROUND_USER_CONTENT_HOST = ('{0}-dot-{1}.appspot.com' # .format(USER_CONTENT_PREFIX, # appids.PLAYGROUND_APP_ID)) PLAYGROUND_USER_CONTENT_HOST = None MIMIC_HOST = '{0}.appspot.com'.format(appids.MIMIC_APP_ID)
Python
0.000005
@@ -1815,17 +1815,17 @@ HOSTS = -( +%5B 'localho @@ -2028,17 +2028,17 @@ stname() -) +%5D %0A # PLA @@ -2234,17 +2234,17 @@ HOSTS = -( +%5B '%7B%7D.apps @@ -2284,20 +2284,108 @@ _APP_ID) -, +%5D %0A +if appids.PLAYGROUND_APP_ID_ALIAS:%0A PLAYGROUND_HOSTS.append('%7B%7D.appspot.com'%0A @@ -2396,32 +2396,16 @@ -'%7B%7D.appspot.com' .format(
08edcd99f379962cbd761c743a727e86095b7a48
Convert to list in case it is not
setup.in.py
setup.in.py
# # Copyright 2012-2019 CNRS-UM LIRMM, CNRS-AIST JRL # from __future__ import print_function try: from setuptools import setup from setuptools import Extension except ImportError: from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize import hashlib import numpy import os import subprocess import sys win32_build = os.name == 'nt' this_path = os.path.dirname(os.path.realpath(__file__)) with open(this_path + '/sch/__init__.py', 'w') as fd: fd.write('from .sch import *\n') sha512 = hashlib.sha512() src_files = ['sch/c_sch_private.pxd', 'sch/sch.pyx', 'sch/c_sch.pxd', 'sch/sch.pxd', 'include/sch_wrapper.hpp'] src_files = [ '{}/{}'.format(this_path, f) for f in src_files ] for f in src_files: chunk = 2**16 with open(f, 'r') as fd: while True: data = fd.read(chunk) if data: sha512.update(data.encode('ascii')) else: break version_hash = sha512.hexdigest()[:7] class pkg_config(object): def __init__(self): self.compile_args = [ '-D' + x for x in '@COMPILE_DEFINITIONS@'.split(';') if len(x) ] self.compile_args += ['-std=c++11'] if win32_build: self.compile_args.append('-DWIN32') self.include_dirs = [ x for x in '$<TARGET_PROPERTY:SpaceVecAlg::SpaceVecAlg,INTERFACE_INCLUDE_DIRECTORIES>;$<TARGET_PROPERTY:sch-core::sch-core,INCLUDE_DIRECTORIES>'.split(';') if len(x) ] self.include_dirs.append('@Boost_INCLUDE_DIR@') self.include_dirs.append(this_path + '/include') self.include_dirs = filter(len, self.include_dirs) self.library_dirs = [ x for x in '$<TARGET_PROPERTY:sch-core::sch-core,LINK_FLAGS>'.split(';') if len(x) ] location = '$<TARGET_PROPERTY:sch-core::sch-core,LOCATION_$<CONFIGURATION>>' self.library_dirs.append(os.path.dirname(location) + "/../lib/") if "$<CONFIGURATION>".lower() == "debug": self.libraries = ['sch-core_d'] else: self.libraries = ['sch-core'] configs = pkg_config() def GenExtension(name, pkg, ): pyx_src = name.replace('.', '/') cpp_src = pyx_src + '.cpp' pyx_src = pyx_src + '.pyx' ext_src = pyx_src return Extension(name, [ext_src], extra_compile_args = pkg.compile_args, include_dirs = pkg.include_dirs + [numpy.get_include()], library_dirs = pkg.library_dirs, libraries = pkg.libraries) extensions = [ GenExtension('sch.sch', configs) ] extensions = [ x for x in extensions if x is not None ] packages = ['sch'] data = ['__init__.py', 'c_sch.pxd', 'sch.pxd'] cython_packages = [ x for x in packages if any([ext.name.startswith(x) for ext in extensions]) ] extensions = cythonize(extensions) setup( name = 'sch', version='@PROJECT_VERSION@-{}'.format(version_hash), ext_modules = extensions, packages = packages, package_data = { 'sch': data } )
Python
1
@@ -2136,16 +2136,58 @@ pyx_src%0A + pkg.include_dirs=list(pkg.include_dirs)%0A return
a12d61e9a9b85c436d5b21b39862497b7e3ed903
update tpp.py for gen3
tpp.py
tpp.py
#This script will show updates to the Twitch Plays Pokemon live feed on reddit. #You can only show important updates by passing the --important flag when you run the script #This could be easily adapted for other live feeds (or totally generic) but for now #it is hardcoded for the TPP feed. #python-requests is required to run this. #Install using: #pip install requests from __future__ import print_function import requests import time import argparse import sys _parser = argparse.ArgumentParser(description="Live Twitch Plays Pokemon updates in your console.") _parser.add_argument("--important", action="store_true") _args = _parser.parse_args() _api_url = "http://api.reddit.com/live/m5n42gvbpyi6hdisciytwamw3a" _headers = {"User-Agent": "TTPConsole/1.1 by sc00ty"} _timeout = 60 #1 Minute _last_id = "" while True: try: #Request the JSON data for the live feed payload = {"before": _last_id} feed = requests.get(_api_url, params=payload, headers=_headers).json() #Iterate backwards through the list, making it so items are shown chronologically for feed_item in feed["data"]["children"][::-1]: #Store the last seen id _last_id = feed_item["data"]["name"] body_text = feed_item["data"]["body"] #If all text should be shown OR only important stuff, and this is important... show the update! if not _args.important or ("**" in body_text and _args.important): print("%s\n" % (body_text,)) #Progress towards next update. for i in range (0, _timeout): print("Checking for update in %ss.\r" % (_timeout - i), end="") sys.stdout.flush() time.sleep(1) except KeyboardInterrupt: break except Exception: print("Encountered an error while retrieving data. Exiting...")
Python
0
@@ -691,34 +691,34 @@ ive/ -m5n42gvbpy +nawsz3vn7u i6hd -i s +g ciytw -amw3a +cxadi %22%0A_h
c81eb510c72511c1f692f02f7bb63ef4caa51d27
Add management functions for migration
apps/concept/management/commands/update_concept_totals.py
apps/concept/management/commands/update_concept_totals.py
from optparse import make_option import sys from django.core.management.base import BaseCommand from education.models import Concept class Command(BaseCommand): args = "" help = "Update concept total_question counts (post db import)" def handle(self, *args, **options): for concept in Concept.objects.all(): concept.total_questions = concept.question_set.count() concept.save()
Python
0
@@ -98,17 +98,15 @@ rom -education +concept .mod
6a39a514ae82f412c107dd87944cdb17b6a9d036
remove isinstance assert in test_remove_site_packages_64bit
tests/test_server32_remove_site_packages.py
tests/test_server32_remove_site_packages.py
import os import sys try: import pytest except ImportError: # the 32-bit server does not need pytest installed class Mark(object): @staticmethod def skipif(condition, reason=None): def func(function): return function return func class pytest(object): mark = Mark from msl.loadlib import Server32, Client64, IS_MAC from msl.examples.loadlib import EXAMPLES_DIR class Site32(Server32): def __init__(self, host, port, **kwargs): super(Site32, self).__init__( os.path.join(kwargs['ex_dir'], 'cpp_lib32'), 'cdll', host, port ) def remove(self): return self.remove_site_packages_64bit() @staticmethod def contains(path): return path in sys.path class Site64(Client64): def __init__(self): super(Site64, self).__init__(__file__, ex_dir=EXAMPLES_DIR) def remove(self): return self.request32('remove') def contains(self, path): return self.request32('contains', path) @pytest.mark.skipif(IS_MAC, reason='the 32-bit server for macOS does not exist') def test_remove_site_packages_64bit(): s = Site64() path = s.remove() assert isinstance(path, str) assert path in sys.path assert not s.contains(path)
Python
0.000007
@@ -1215,41 +1215,8 @@ e()%0A - assert isinstance(path, str)%0A
7d47ab3aed3fb1c591966fe1a84e7c5f8d4ce909
Print usage if only an optional option sent
qos.py
qos.py
#!/usr/bin/python # Author: Anthony Ruhier # Set QoS rules import os import subprocess import argparse import sys import logging try: from config import DEBUG except ImportError: DEBUG = False import tools def run_as_root(): """ Restart the script as root """ # Need to be root if os.geteuid() != 0: print("You need to be root to run this script. Relaunching with " "sudo...\n") subprocess.call(["sudo", sys.executable] + sys.argv) exit() def apply_qos(): run_as_root() # Clean old rules reset_qos() # Setting new rules print("Setting new rules") setup_qos() def reset_qos(): run_as_root() print("Removing tc rules") ifnames = get_ifnames() tools.qdisc_del(ifnames, "htb", stderr=subprocess.DEVNULL) return def show_qos(): ifnames = get_ifnames() print("\n\t QDiscs details\n\t================\n") tools.qdisc_show(ifnames, "details") print("\n\t QDiscs stats\n\t==============\n") tools.qdisc_show(ifnames, "details") def set_debug(level): if level or DEBUG: log_level = logging.DEBUG else: log_level = logging.WARNING logging.basicConfig( stream=sys.stderr, format="[%(levelname)s] %(message)s (%(filename)s:%(lineno)d) ", level=log_level ) if __name__ == '__main__': # Set all arguments possible for this script parser = argparse.ArgumentParser( description="Script to set, show or delete QoS rules with TC" ) # Start/Stop/Show command sp_action = parser.add_subparsers() sp_start = sp_action.add_parser("start", help="set QoS rules") sp_stop = sp_action.add_parser("stop", help="Remove all QoS rules") sp_show = sp_action.add_parser("show", help="Show QoS rules") # Set function to call for each options sp_start.set_defaults(func=apply_qos) sp_stop.set_defaults(func=reset_qos) sp_show.set_defaults(func=show_qos) # Debug option parser.add_argument('-d', '--debug', help="Set the debug level", dest="debug", action="store_true") # Different ways to create QoS parser_group = parser.add_mutually_exclusive_group() # Use class rules parser_group.add_argument('-c', '--class', help="Use class rules (default)", dest="class", action="store_true") # Use tree rules parser_group.add_argument('-t', '--tree', help="Use tree rules", dest="tree", action="store_true") # If no argument provided show help if len(sys.argv) == 1: parser.print_help() sys.exit(1) # Parse argument args = parser.parse_args() # Set debug mode set_debug(args.debug) if args.tree: from rules_parser import setup_qos, get_ifnames else: from class_parser import setup_qos, get_ifnames # Execute correct function args.func()
Python
0.000005
@@ -2959,21 +2959,126 @@ tion -%0A args.func( +, or print usage%0A try:%0A args.func()%0A except AttributeError:%0A parser.print_usage()%0A sys.exit(1 )%0A
6fc065dc2f88c0c59037f5a6efa89738d963977e
Support XML-RPC marshalling of mx.DateTime.
rpc.py
rpc.py
import xmlrpclib import traceback from cStringIO import StringIO allowed = ('package_releases', 'package_urls', 'package_data', 'search', 'list_packages', 'release_urls', 'release_data', 'updated_releases', 'changelog', 'post_cheesecake_for_release') def handle_request(webui_obj): webui_obj.handler.send_response(200, 'OK') webui_obj.handler.send_header('Content-type', 'text/xml') webui_obj.handler.send_header('charset', 'UTF-8' ); webui_obj.handler.end_headers() try: methodArgs, methodName = xmlrpclib.loads(webui_obj.handler.rfile.read()) if methodName in allowed: response = globals()[methodName](webui_obj.store, *methodArgs) else: raise KeyError, "Method %r does not exist" % (methodName,) if response is None: response = '' # xmlrpclib.dumps encodes Unicode as UTF-8 xml = xmlrpclib.dumps((response,), methodresponse=True, allow_none=True) webui_obj.handler.wfile.write(xml) except: out = StringIO() traceback.print_exc(file=out) result = xmlrpclib.dumps(xmlrpclib.Fault(1, out.getvalue()), methodresponse=True) webui_obj.handler.wfile.write(result) def list_packages(store): result = store.get_packages() return [row['name'] for row in result] def package_releases(store, package_name, show_hidden=False): if show_hidden: hidden = None else: hidden = False result = store.get_package_releases(package_name, hidden=hidden) return [row['version'] for row in result] def release_urls(store, package_name, version): result = [] for file in store.list_files(package_name, version): info = file.as_dict() info['url'] = store.gen_file_url(info['python_version'], package_name, info['filename']) result.append(info) # TODO do something with release_urls when there is something to do #info = store.get_package(package_name, version) #if info['download_url']: # result.append({'url': info['download_url']}) return result package_urls = release_urls # "deprecated" def release_data(store, package_name, version): info = store.get_package(package_name, version).as_dict() del info['description_html'] for col in ('requires', 'provides', 'obsoletes'): rows = store.get_release_relationships(package_name, version, col) info[col] = [row['specifier'] for row in rows] classifiers = [r[0] for r in store.get_release_classifiers(package_name, version)] info['classifiers' ] = classifiers return info package_data = release_data # "deprecated" def search(store, spec, operator='and'): spec['_pypi_hidden'] = 'FALSE' return [row.as_dict() for row in store.query_packages(spec, operator)] def updated_releases(store, since): result = store.updated_releases(since) return [(row['name'], row['version']) for row in result] def changelog(store, since): result = store.changelog(since) return [(row['name'],row['version'], int(row['submitted_date'].gmticks()), row['action']) for row in result] def post_cheesecake_for_release(store, name, version, score_data, password): if password != store.config.cheesecake_password: raise ValuError("Bad password.") store.save_cheesecake_score(name, version, score_data) store.commit()
Python
0
@@ -254,16 +254,333 @@ ease')%0A%0A +# monkey-patch xmlrpclib to marshal mx.DateTime correctly.%0Aimport mx.DateTime%0Adef dump_DateTime(self, value, write):%0A write(%22%3Cvalue%3E%3CdateTime.iso8601%3E%22)%0A write(value.strftime(%22%25Y%25m%25dT%25H:%25M:%25S%22))%0A write(%22%3C/dateTime.iso8601%3E%3C/value%3E%5Cn%22)%0Axmlrpclib.Marshaller.dispatch%5Bmx.DateTime.DateTimeType%5D = dump_DateTime%0A%0A def hand
d72171899b75eaf4675e5fd3d8586aa300b04583
Fix APU object construction with id_name
apsuite/commissioning_scripts/measure_id_integrals.py
apsuite/commissioning_scripts/measure_id_integrals.py
""".""" from threading import Thread as _Thread, Event as _Event import time as _time import numpy as _np from siriuspy.devices import SOFB, APU, Tune from epics import PV from pymodels import si from .base import BaseClass class IDParams: """.""" def __init__(self, phases=None, meas_type='static'): """.""" self.phases = phases self.meas_type = meas_type if self.meas_type == 'static': self.phase_speed = 0.5 self.sofb_mode = 'SlowOrb' self.sofb_buffer = 20 self.wait_sofb = 1 self.wait_to_move = 0 elif self.meas_type == 'dynamic': self.phase_speed = 0.5 self.sofb_mode = 'Monit1' self.sofb_buffer = 1 self.wait_sofb = 10 self.wait_to_move = 1 def __str__(self): """.""" ftmp = '{0:26s} = {1:9.6f} {2:s}\n'.format stmp = '{0:35s}: {1:} {2:s}\n'.format dtmp = '{0:26s} = {1:9d} {2:s}\n'.format stg = ftmp('phase_speed', self.phase_speed, '') stg += stmp('sofb_mode', self.sofb_mode, '') stg += dtmp('sofb_buffer', self.sofb_buffer, '') return stg class MeasIDIntegral(BaseClass): """.""" def __init__(self, model, id_name=None, phases=None, meas_type='static'): """.""" super().__init__() self.model = model self.famdata = si.get_family_data(model) self.params = IDParams(phases, meas_type) self.id_name = id_name self.devices['apu'] = APU(APU.DEVICES.APU22_09SA) self.devices['tune'] = Tune(Tune.DEVICES.SI) self.devices['sofb'] = SOFB(SOFB.DEVICES.SI) self.devices['study_event'] = PV('AS-RaMO:TI-EVG:StudyExtTrig-Cmd') self.devices['current_info'] = PV('SI-Glob:AP-CurrInfo:Current-Mon') self.ph_dyn_tstamp = [] self.ph_dyn_mon = [] self.data['measure'] = dict() self._stopevt = _Event() if self.params.meas_type == 'static': self._meas_func = self._meas_integral_static elif self.params.meas_type == 'dynamic': self._meas_func = self._meas_integral_dynamic self._thread = _Thread( target=self._meas_func, daemon=True) def start(self): """.""" if self._thread.is_alive(): return self._stopevt.clear() self._thread = _Thread(target=self._meas_func, daemon=True) self._thread.start() def stop(self): """.""" self._stopevt.set() @property def ismeasuring(self): """.""" return self._thread.is_alive() def get_orbit(self): """.""" # reset SOFB buffer and wait for filling sofb = self.devices['sofb'] sofb.cmd_reset() sofb.wait_buffer() # get orbit orb = _np.vstack((sofb.orbx, sofb.orby)) return orb def get_mt_traj(self): """.""" # reset SOFB buffer and wait for filling sofb = self.devices['sofb'] sofb.cmd_reset() sofb.wait_buffer() # get trajectory traj = _np.vstack((sofb.mt_trajx, sofb.mt_trajy)) return traj, sofb.mt_time def get_tunes(self): """.""" return self.devices['tune'].tunex, self.devices['tune'].tuney def get_stored_curr(self): """.""" return self.devices['current_info'].value def apu_move(self, phase, phase_speed): """.""" print('- moving to phase {} ... '.format(phase), end='') apu = self.devices['apu'] apu.phase_speed = phase_speed apu.phase = phase apu.cmd_move() apu.wait_move() print('ok') def cmd_trigger_study(self): """.""" self.devices['study_event'].value = 1 def _meas_integral_static(self): ph_spd = self.params.phase_speed # sending to initial phase self.apu_move(self.params.phases[0], ph_spd) orb0 = self.get_orbit() nux, nuy = self.get_tunes() curr = self.get_stored_curr() orb = [] phs_rb = [] for phs in self.params.phases: _time.sleep(self.params.wait_to_move) self.apu_move(phs, ph_spd) _time.sleep(self.params.wait_sofb) orb.append(self.get_orbit()) phs_rb.append(self.devices['apu'].phase) _time.sleep(self.params.wait_sofb) orbf = self.get_orbit() meas = dict() meas['initial_orbit'] = orb0 meas['final_orbit'] = orbf meas['tunex'] = nux meas['tuney'] = nuy meas['stored_current'] = curr meas['phases'] = phs_rb meas['orbits'] = orb self.data['measure'] = meas print('finished!') def _meas_integral_dynamic(self): ph_spd = self.params.phase_speed # sending to initial phase self.apu_move(self.params.phases[0], ph_spd) apu_phase_mon = self.devices['apu'].pv_object('Phase-Mon') ph_mon = [] ph_tstamp = [] def phase_cb(**kwargs): nonlocal ph_mon, ph_tstamp ph_mon.append(kwargs['value']) ph_tstamp.append(kwargs['timestamp']) nux, nuy = self.get_tunes() curr = self.get_stored_curr() self.cmd_trigger_study() apu_phase_mon.add_callback(phase_cb) _time.sleep(self.params.wait_sofb) traj0, tstamp0 = self.get_mt_traj() self.cmd_trigger_study() _time.sleep(self.params.wait_to_move) self.apu_move(self.params.phases[-1], ph_spd) _time.sleep(self.params.wait_sofb) traj, tstamp = self.get_mt_traj() self.cmd_trigger_study() _time.sleep(self.params.wait_sofb) trajf, tstampf = self.get_mt_traj() apu_phase_mon.clear_callbacks() meas = dict() meas['initial_traj'] = traj0 meas['initial_timestamp'] = tstamp0 meas['final_traj'] = trajf meas['final_timestamp'] = tstampf meas['tunex'] = nux meas['tuney'] = nuy meas['stored_current'] = curr meas['phases'] = ph_mon meas['phases_timestamp'] = ph_tstamp meas['traj'] = traj meas['timestamp'] = tstamp self.data['measure'] = meas print('finished!')
Python
0.000001
@@ -1584,30 +1584,20 @@ APU( -APU.DEVICES.APU22_09SA +self.id_name )%0A
79ef0fe21b136b80889a8e6e06339074ac73a1f1
Comment out section
run.py
run.py
__author__ = 'matt' import datetime import blockbuster blockbuster.app.debug = blockbuster.config.debug_mode blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@") blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@ BlockBuster " + blockbuster.__version__ + " " "@@@@@@@@@@@@@@@@@@") blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@") blockbuster.bb_logging.logger.info("=== Application startup - " + str(datetime.datetime.now()) + " ====") blockbuster.bb_logging.logger.info( '================Time restriction disabled================') \ if not blockbuster.config.timerestriction else blockbuster.bb_logging.logger.info( '================Time restriction enabled================') blockbuster.bb_logging.logger.info("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@") if blockbuster.config.debug_mode: blockbuster.bb_logging.logger.info("========= APPLICATION IS RUNNING IN DEBUG MODE ==========") # This section only applies when you are running run.py directly if __name__ == '__main__': blockbuster.bb_logging.logger.info("Running http on port 5000") blockbuster.app.run(host='0.0.0.0', debug=True)
Python
0
@@ -14,16 +14,18 @@ 'matt'%0A%0A +# import d @@ -32,16 +32,16 @@ atetime%0A - import b @@ -52,16 +52,18 @@ buster%0A%0A +# blockbus @@ -108,17 +108,20 @@ ug_mode%0A -%0A +#%0A# blockbus @@ -200,32 +200,34 @@ @@@@@@@@@@@@@%22)%0A +# blockbuster.bb_l @@ -311,16 +311,17 @@ _ + %22 %22%0A +# @@ -416,16 +416,17 @@ + %22@@@@@@@ @@ -431,32 +431,34 @@ @@@@@@@@@@@@@%22)%0A +# blockbuster.bb_l @@ -537,16 +537,18 @@ @@@@@%22)%0A +# blockbus @@ -646,16 +646,18 @@ ====%22)%0A +# blockbus @@ -676,32 +676,34 @@ ng.logger.info(%0A +# '=========== @@ -753,16 +753,18 @@ ===') %5C%0A +# if n @@ -842,16 +842,18 @@ r.info(%0A +# '=== @@ -908,17 +908,20 @@ =====')%0A -%0A +#%0A# blockbus @@ -1008,17 +1008,20 @@ @@@@@%22)%0A -%0A +#%0A# if block @@ -1046,16 +1046,18 @@ g_mode:%0A +# bloc
35028b84d4757e1343a97da653670db049ac5e8d
replace default handler with static handler
web.py
web.py
#!/usr/bin/env python2 import tornado import log import magic from tornado import web, httpserver _http_server = None _https_server = None _html_root = './' _log = None _magic = None class DefaultHandler(tornado.web.RequestHandler): def get(self, match): _log.info("incoming request: {}".format(self.request)) _log.info("matched default match: {}".format(match)) self.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0") self.set_header("Connection", "close") if match: fname = _html_root + '/' + match else: fname = _html_root + '/index.html' _log.info("fname: {}".format(fname)) try: with open(fname, 'rb') as fd: content = fd.read() mime_type = _magic.file(fname) self.set_header("Content-type", mime_type) self.finish(content) except: self.set_status(404) self.finish("Not found: {}".format(match)) _app = tornado.web.Application([ ('/(.*)', DefaultHandler) ]) # TODO: SSL needs this # ssl_options['certfile'] - server certificate # ssl_options['keyfile'] - server key # ssl_options['ca_certs'] - CA certificate def run_server(ssl_options = {}, http_port = 80, https_port = 443, log_facility = None, html_root = './'): global _http_server global _https_server global _log global _magic global _html_root # http://www.zak.co.il/tddpirate/2013/03/03/the-python-module-for-file-type-identification-called-magic-is-not-standardized/ try: _magic = magic.open(magic.MAGIC_MIME_TYPE) _magic.load() except AttributeError,e: _magic = magic.Magic(mime=True) _magic.file = _magic.from_file if log_facility: _log = log_facility else: _log = log.TrivialLogger() _html_root = html_root _log.info("creating servers") _http_server = tornado.httpserver.HTTPServer(_app, no_keep_alive = False) _https_server = tornado.httpserver.HTTPServer(_app, no_keep_alive = False, ssl_options = ssl_options) _log.info("setting up TCP ports") _http_server.listen(http_port) _https_server.listen(https_port) _log.info("starting server loop") tornado.ioloop.IOLoop.instance().start() _log.info("server loop exited")
Python
0
@@ -47,21 +47,8 @@ log%0A -import magic%0A from @@ -155,1612 +155,391 @@ one%0A -_magic = None%0A%0Aclass DefaultHandler(tornado.web.RequestHandler):%0A def get(self, match):%0A _log.info(%22incoming request: %7B%7D%22.format(self.request))%0A _log.info(%22matched default match: %7B%7D%22.format(match))%0A self.set_header(%22Cache-Control%22, %22no-store, no-cache, must-revalidate, max-age=0%22)%0A self.set_header(%22Connection%22, %22close%22)%0A if match:%0A fname = _html_root + '/' + match%0A else:%0A fname = _html_root + '/index.html'%0A _log.info(%22fname: %7B%7D%22.format(fname))%0A try:%0A with open(fname, 'rb') as fd:%0A content = fd.read()%0A mime_type = _magic.file(fname)%0A self.set_header(%22Content-type%22, mime_type)%0A self.finish(content)%0A except:%0A self.set_status(404)%0A self.finish(%22Not found: %7B%7D%22.format(match))%0A%0A_app = tornado.web.Application(%5B%0A ('/(.*)', DefaultHandler)%0A%5D)%0A%0A# TODO: SSL needs this%0A# ssl_options%5B'certfile'%5D - server certificate%0A# ssl_options%5B'keyfile'%5D - server key%0A# ssl_options%5B'ca_certs'%5D - CA certificate%0A%0Adef run_server(ssl_options = %7B%7D, http_port = 80, https_port = 443, log_facility = None, html_root = './'):%0A global _http_server%0A global _https_server%0A global _log%0A global _magic%0A global _html_root%0A%0A # http://www.zak.co.il/tddpirate/2013/03/03/the-python-module-for-file-type-identification-called-magic-is-not-standardized/%0A try:%0A _magic = magic.open(magic.MAGIC_MIME_TYPE)%0A _magic.load()%0A except AttributeError,e:%0A _magic = magic.Magic(mime=True)%0A _magic.file = _magic.from_file +%0A# TODO: SSL needs this%0A# ssl_options%5B'certfile'%5D - server certificate%0A# ssl_options%5B'keyfile'%5D - server key%0A# ssl_options%5B'ca_certs'%5D - CA certificate%0A%0Adef run_server(ssl_options = %7B%7D, http_port = 80, https_port = 443, log_facility = None, html_root = './'):%0A global _http_server%0A global _https_server%0A global _log%0A%0A # list handlers for REST calls here%0A handlers = %5B%5D %0A%0A @@ -634,34 +634,127 @@ r()%0A +%0A -_html_root = html_root +handlers.append(('/(.*)', web.StaticFileHandler, %7B'path': html_root%7D))%0A app = tornado.web.Application(handlers) %0A @@ -825,33 +825,32 @@ rver.HTTPServer( -_ app, no_keep_ali @@ -911,17 +911,16 @@ PServer( -_ app, no_
d3f03d6e2cf48929f8233e52720b07242ccd64da
Put tweets back
web.py
web.py
""" Heroku/Python Quickstart: https://blog.heroku.com/archives/2011/9/28/python_and_django""" import os import random import requests from flask import Flask import tweepy import settings app = Flask(__name__) @app.route('/') def home_page(): return 'Hello from the SPARK learn-a-thon!' def get_instagram_image(): instagram_api_url = 'https://api.instagram.com/v1/tags/spark/media/recent?client_id={}'.format(settings.CLIENT_ID) data = requests.get(instagram_api_url).json()['data'] number_of_images = choose_number_of_images() images_returned = [] for image in number_of_images: images_returned.append(random.choice(data)['images']['low_resolution']['url']) return images_returned def get_tweets(): auth = tweepy.OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET) auth.set_access_token(settings.ACCESS_KEY, settings.ACCESS_SECRET) api = tweepy.API(auth) number_of_tweets = choose_number_of_tweets() return tweets.items(limit=number_of_tweets) def choose_number_of_images(): number = 3 return number def choose_number_of_tweets(): number = 3 return number if __name__ == '__main__': # port = int(os.environ.get("PORT", 5000)) # app.run(host='0.0.0.0', port=port)
Python
0
@@ -968,24 +968,75 @@ f_tweets()%0A%0A + tweets = tweepy.Cursor(api.search, q='#spark')%0A return t
686c01757c80dbca3181b74fa49c9c22fb5040e5
fix cache
web.py
web.py
from functools import wraps import datetime import json import os import urlparse from flask import Flask, flash, g, redirect, request, render_template, Response from flaskext.babel import Babel import postmark import pymongo import requests LANGUAGES = ('en', 'es') EMPTY_BLOCK = """<br><br>""" POSTMARK_KEY = os.environ.get('POSTMARK_KEY', '') app = Flask(__name__) app.secret_key = os.environ.get('FLASK_SECRETKEY', '1234567890') babel = Babel(app) class CrappyCache(dict): MINUTE = 60 HOUR = 60 * 60 DAY = 60 * 60 * 24 def __init__(self, *args, **kwargs): self.expiration = kwargs.pop("expiration", None) super(CrappyCache, self).__init__(*args, **kwargs) def __setitem__(self, key, value): now = datetime.datetime.utcnow() super(CrappyCache, self).__setitem__(key, (now, value)) def __getitem__(self, key): if key in self: (then, val) = super(CrappyCache, self).__getitem__(key) if self.expiration is None: return val now = datetime.datetime.utcnow() delta = now - then if delta.seconds < self.expiration: return val del self[key] SCARY_CACHE = CrappyCache(expiration=CrappyCache.MINUTE * 5) # # authentication stuff # def check_auth(username, password): return username == 'admin' and password == os.environ.get('ADMIN_PASSWORD', '') def authenticate(): msg = "This site is not yet available to the public. Please login." return Response(msg, 401, {'WWW-Authenticate': 'Basic realm="Login Required"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization if not auth or not check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs) return decorated # # locale and babel goodness # @babel.localeselector def get_locale(): if 'lang' in request.args: lang = request.args['lang'] if lang in LANGUAGES: return lang return request.accept_languages.best_match(LANGUAGES) # # request lifecycle # @app.before_request def before_request(): mongo_uri = os.environ.get('MONGOHQ_URL') if mongo_uri: conn = pymongo.Connection(mongo_uri) g.db = conn[os.environ.get('MONGOHQ_DB')] else: conn = pymongo.Connection() g.db = conn['openingparliament'] @app.teardown_request def teardown_request(exception): if hasattr(g, 'db'): g.db.connection.disconnect() @app.context_processor def inject_content(): doc = g.db.blocks.find_one({'path': request.path}) return {'content': doc.get('content') or EMPTY_BLOCK if doc else EMPTY_BLOCK} @app.context_processor def inject_admin(): print request.authorization return {'admin': True if request.authorization else False} # # the good, meaty url handlers # @app.route('/') def index(): return render_template('index.html') @app.route('/contact', methods=['GET', 'POST']) def contact(): if request.method == 'POST': msg = "%s <%s>\n" % (request.form['name'], request.form['email']) if request.form['organization']: msg += "%s\n" % request.form['organization'] msg += "\n%s\n" % request.form['message'] kwargs = { 'api_key': POSTMARK_KEY, 'sender': 'contact@sunlightfoundation.com', 'reply_to': '%s' % request.form['email'], 'to': 'johnwonderlich@gmail.com, amandelbaum@ndi.org, dswislow@ndi.org, psecchi@directoriolegislativo.org, melissa@fundar.org.mx', 'bcc': 'jcarbaugh@sunlightfoundation.com', 'subject': '[OpeningParliament.org] contact: %s <%s>' % (request.form['name'], request.form['email']), 'text_body': msg, } postmark.PMMail(**kwargs).send() flash('Your message has been sent. Thank you for contacting us!') return redirect('/contact') return render_template('contact.html') @app.route('/declaration') def declaration(): return render_template('declaration.html') @app.route('/networking') def networking(): return render_template('networking.html') @app.route('/organizations') def organizations(): return render_template('organizations.html') @app.route('/press') def press(): return render_template('press.html') @app.route('/export') def export(): docs = g.db.blocks.find() content = { 'pages': [{'path': d['path'], 'content': d['content']} for d in docs], } return Response(json.dumps(content), content_type='application/json') @app.route('/login') @requires_auth def login(): return redirect('/') @app.route('/save', methods=['POST']) @requires_auth def save(): content = request.form.get('content', '').strip() path = request.form.get('path') if not path: referrer = request.environ.get('HTTP_REFERER') path = urlparse.urlparse(referrer).path doc = { 'path': path, 'content': content, } g.db.blocks.update({'path': path}, {"$set": doc}, upsert=True) return content # # scary RSS proxy method # @app.route('/rss') def rss(): url = "http://blog.openingparliament.org/rss" doc = SCARY_CACHE.get(url) if not doc: resp = requests.get(url) doc = resp.text SCARY_CACHE[url] = doc return Response(doc, content_type="text/xml") # # the "magic" as they call it # if __name__ == '__main__': DEBUG = True app.run(debug=DEBUG, port=8000)
Python
0.000004
@@ -733,16 +733,17 @@ value):%0A +%0A @@ -2794,40 +2794,8 @@ ():%0A - print request.authorization%0A @@ -5233,16 +5233,47 @@ g/rss%22%0A%0A + if url in SCARY_CACHE:%0A doc @@ -5289,32 +5289,22 @@ ACHE -.get(url) +%5Burl%5D %0A -if not doc +else :%0A
c772951ffbe06be23ff56d0281b78d7b9eac456b
Add option to generate executable name from the current branch
pycket/entry_point.py
pycket/entry_point.py
#! /usr/bin/env python # -*- coding: utf-8 -*- # from pycket.expand import load_json_ast_rpython, expand_to_ast, PermException from pycket.interpreter import interpret_one, ToplevelEnv, interpret_module, GlobalConfig from pycket.error import SchemeException from pycket.option_helper import parse_args, ensure_json_ast from pycket.values import W_String from rpython.rlib import jit # _____ Define and setup target ___ def entry_point(argv): try: return actual_entry(argv) except SchemeException, e: print "ERROR:", e.msg raise # to see interpreter-level traceback def actual_entry(argv): jit.set_param(None, "trace_limit", 20000) config, names, args, retval = parse_args(argv) if retval != 0 or config is None: return retval args_w = [W_String(arg) for arg in args] module_name, json_ast = ensure_json_ast(config, names) if json_ast is None: ast = expand_to_ast(module_name) else: ast = load_json_ast_rpython(json_ast) GlobalConfig.load(ast) env = ToplevelEnv() env.commandline_arguments = args_w env.module_env.add_module(module_name, ast) val = interpret_module(ast, env) return 0 def target(driver, args): if driver.config.translation.jit: driver.exe_name = 'pycket-%(backend)s' else: driver.exe_name = 'pycket-%(backend)s-nojit' return entry_point, None
Python
0.000001
@@ -1218,16 +1218,212 @@ args):%0A + if %22--with-branch%22 in args:%0A import subprocess%0A base_name = subprocess.check_output(%5B%22git%22, %22rev-parse%22, %22--abbrev-ref%22, %22HEAD%22%5D).strip()%0A else:%0A base_name = 'pycket-'%0A if d @@ -1470,39 +1470,45 @@ iver.exe_name = -'pycket +base_name + ' -%25(backend)s'%0A @@ -1541,23 +1541,29 @@ _name = -'pycket +base_name + ' -%25(backe
96a61255bb813544c496a5ee2c3bc645cd97567c
Make note of inefficiency
pycket/hash/simple.py
pycket/hash/simple.py
from pycket import values from pycket.cont import label from pycket.error import SchemeException from pycket.hash.base import ( W_MutableHashTable, W_ImmutableHashTable, w_missing, get_dict_item) from pycket.hash.persistent_hash_map import make_persistent_hash_type from rpython.rlib.objectmodel import compute_hash, r_dict, specialize from rpython.rlib.rarithmetic import r_uint @specialize.call_location() def make_simple_mutable_table(cls, keys=None, vals=None): data = r_dict(cls.cmp_value, cls.hash_value, force_non_null=True) if keys is not None and vals is not None: assert len(keys) == len(vals) for i, k in enumerate(keys): data[k] = vals[i] return cls(data) @specialize.call_location() def make_simple_mutable_table_assocs(cls, assocs, who): if not assocs.is_proper_list(): raise SchemeException("%s: not given proper list" % who) data = r_dict(cls.cmp_value, cls.hash_value, force_non_null=True) while isinstance(assocs, values.W_Cons): entry, assocs = assocs.car(), assocs.cdr() if not isinstance(entry, values.W_Cons): raise SchemeException("%s: expected list of pairs" % who) key, val = entry.car(), entry.cdr() data[key] = val return cls(data) @specialize.call_location() def make_simple_immutable_table(cls, keys=None, vals=None): table = cls.EMPTY if keys is not None and vals is not None: assert len(keys) == len(vals) for i, k in enumerate(keys): table = table.assoc(k, vals[i]) return table @specialize.call_location() def make_simple_immutable_table_assocs(cls, assocs, who): if not assocs.is_proper_list(): raise SchemeException("%s: not given proper list" % who) table = cls.EMPTY while isinstance(assocs, values.W_Cons): entry, assocs = assocs.car(), assocs.cdr() if not isinstance(entry, values.W_Cons): raise SchemeException("%s: expected list of pairs" % who) key, val = entry.car(), entry.cdr() table = table.assoc(key, val) return table class W_SimpleMutableHashTable(W_MutableHashTable): _attrs_ = ['data'] _immutable_fields_ = ["data"] @staticmethod def hash_value(v): raise NotImplementedError("abstract method") @staticmethod def cmp_value(a, b): raise NotImplementedError("abstract method") def __init__(self, data): self.data = data def make_copy(self): raise NotImplementedError("abstract method") def hash_items(self): return self.data.items() def tostring(self): lst = [values.W_Cons.make(k, v).tostring() for k, v in self.data.iteritems()] return "#hash(%s)" % " ".join(lst) @label def hash_set(self, k, v, env, cont): from pycket.interpreter import return_value self.data[k] = v return return_value(values.w_void, env, cont) @label def hash_remove_inplace(self, k, env, cont): from pycket.interpreter import return_value del self.data[k] return return_value(values.w_void, env, cont) @label def hash_ref(self, k, env, cont): from pycket.interpreter import return_value return return_value(self.data.get(k, w_missing), env, cont) def length(self): return len(self.data) class W_EqvMutableHashTable(W_SimpleMutableHashTable): def make_empty(self): return make_simple_mutable_table(W_EqvMutableHashTable) def make_copy(self): return W_EqvMutableHashTable(self.data.copy(), immutable=False) @staticmethod def hash_value(k): return k.hash_eqv() @staticmethod def cmp_value(a, b): return a.eqv(b) def get_item(self, i): return get_dict_item(self.data, i) class W_EqMutableHashTable(W_SimpleMutableHashTable): def make_copy(self): return W_EqMutableHashTable(self.data.copy()) def make_empty(self): return make_simple_mutable_table(W_EqMutableHashTable) @staticmethod def hash_value(k): if isinstance(k, values.W_Fixnum): return compute_hash(k.value) if isinstance(k, values.W_Character): return ord(k.value) else: return compute_hash(k) @staticmethod def cmp_value(a, b): from pycket.prims.equal import eqp_logic return eqp_logic(a, b) def get_item(self, i): return get_dict_item(self.data, i) W_EqvImmutableHashTable = make_persistent_hash_type( super=W_ImmutableHashTable, name="W_EqvImmutableHashTable", hashfun=lambda x: r_uint(W_EqvMutableHashTable.hash_value(x)), equal=W_EqvMutableHashTable.cmp_value) W_EqvImmutableHashTable.EMPTY = W_EqvImmutableHashTable(0, None) W_EqImmutableHashTable = make_persistent_hash_type( super=W_ImmutableHashTable, name="W_EqImmutableHashTable", hashfun=lambda x: r_uint(W_EqMutableHashTable.hash_value(x)), equal=W_EqMutableHashTable.cmp_value) W_EqImmutableHashTable.EMPTY = W_EqvImmutableHashTable(0, None) class __extend__(W_EqvImmutableHashTable): def length(self): return len(self) def make_copy(self): return self def make_empty(self): return W_EqvImmutableHashTable.EMPTY def get_item(self, index): i = 0 for item in iter(self): if i == index: return item i += 1 raise IndexError def tostring(self): entries = [None] * len(self) i = 0 for k, v in iter(self): entries[i] = "(%s . %s)" % (k.tostring(), v.tostring()) i += 1 return "#hasheqv(%s)" % " ".join(entries) class __extend__(W_EqImmutableHashTable): def length(self): return len(self) def make_copy(self): return self def make_empty(self): return W_EqImmutableHashTable(0, None) def get_item(self, index): i = 0 for item in iter(self): if i == index: return item i += 1 raise IndexError def tostring(self): entries = [None] * len(self) i = 0 for i, (k, v) in iter(self): entries[i] = "(%s . %s)" % (k.tostring(), v.tostring()) i += 1 return "#hasheq(%s)" % " ".join(entries)
Python
0.000051
@@ -5421,32 +5421,58 @@ m(self, index):%0A + # XXX Inefficient%0A i = 0%0A @@ -6080,32 +6080,58 @@ m(self, index):%0A + # XXX Inefficient%0A i = 0%0A
423af3c8fbd7e1614ec24dcd8ef842ae9dfcc5d0
version bump for ensembl85
pyensembl/__init__.py
pyensembl/__init__.py
# Copyright (c) 2015. Mount Sinai School of Medicine # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function, division, absolute_import from .memory_cache import MemoryCache from .download_cache import DownloadCache from .ensembl_release import EnsemblRelease from .ensembl_release_versions import check_release_number, MAX_ENSEMBL_RELEASE from .exon import Exon from .genome import Genome from .gene import Gene from .gtf import GTF from .locus import Locus from .search import find_nearest_locus from .sequence_data import SequenceData from .species import ( find_species_by_name, find_species_by_reference, which_reference, check_species_object, normalize_reference_name, normalize_species_name, ) from .transcript import Transcript __version__ = '0.9.5' _cache = {} def cached_release(version, species="human"): """Cached construction of EnsemblRelease objects. It's desirable to reuse the same EnsemblRelease object since each one will store a lot of cached annotation data in-memory. """ version = check_release_number(version) species = check_species_object(species) key = (version, species) if key not in _cache: ensembl = EnsemblRelease(version, species=species) _cache[key] = ensembl return _cache[key] def genome_for_reference_name(reference_name): reference_name = normalize_reference_name(reference_name) species = find_species_by_reference(reference_name) (_, max_ensembl_release) = species.reference_assemblies[reference_name] return cached_release(max_ensembl_release, species=species) ensembl_grch36 = ensembl54 = cached_release(54) # last release for GRCh36/hg18 ensembl_grch37 = ensembl75 = cached_release(75) # last release for GRCh37/hg19 ensembl_grch38 = cached_release(MAX_ENSEMBL_RELEASE) # most recent for GRCh38 __all__ = [ "MemoryCache", "DownloadCache", "EnsemblRelease", "MAX_ENSEMBL_RELEASE", "cached_release", "Gene", "Transcript", "Exon", "SequenceData", "find_nearest_locus", "find_species_by_name", "find_species_by_reference", "which_reference", "check_species_object", "normalize_reference_name", "normalize_species_name", "Genome", "GTF", "Locus", "Exon", "ensembl_grch36", "ensembl_grch37", "ensembl_grch38", ]
Python
0
@@ -1303,17 +1303,17 @@ = '0.9. -5 +6 '%0A%0A_cach
17fe4613518def551e637764e644c5d58b1665d9
Add BodeAnalyser instrument to instrument table
pymoku/instruments.py
pymoku/instruments.py
import sys from . import _instrument from . import _oscilloscope from . import _waveform_generator from . import _phasemeter from . import _specan from . import _lockinamp from . import _datalogger from . import _bodeanalyser from . import _stream_instrument from . import _frame_instrument from . import _input_instrument ''' Preferred import point. Aggregates the separate instruments and helper classes to flatten the import heirarchy (e.g. pymoku.instruments.Oscilloscope rather than pymoku.instruments._oscilloscope.Oscilloscope) ''' InstrumentData = _frame_instrument.InstrumentData VoltsData = _oscilloscope.VoltsData SpectrumData = _specan.SpectrumData MokuInstrument = _instrument.MokuInstrument Oscilloscope = _oscilloscope.Oscilloscope WaveformGenerator = _waveform_generator.WaveformGenerator Phasemeter = _phasemeter.Phasemeter SpectrumAnalyser = _specan.SpectrumAnalyser LockInAmp = _lockinamp.LockInAmp Datalogger = _datalogger.Datalogger BodeAnalyser = _bodeanalyser.BodeAnalyser id_table = { 1: Oscilloscope, 2: SpectrumAnalyser, 3: Phasemeter, 4: WaveformGenerator, 5: None, 6: None, 7: Datalogger, 8: LockInAmp, 9: None, 10: None, 11: None, 12: None, 13: None, 14: None }
Python
0
@@ -1150,20 +1150,28 @@ mp,%0A%099: -None +BodeAnalyser ,%0A%0910: N
5a506ff7005f09b14faa4d6036563e0125ca00f4
Fix indent
pymystrom/__init__.py
pymystrom/__init__.py
""" Copyright (c) 2015-2017 Fabian Affolter <fabian@affolter-engineering.ch> Licensed under MIT. All rights reserved. """ import requests from . import exceptions class MyStromPlug(object): """A class for a myStrom switch.""" def __init__(self, host): """Initialize the switch.""" self.resource = 'http://{}'.format(host) self.timeout = 5 self.data = None self.state = None self.consumption = 0 def set_relay_on(self): """Turn the relay on.""" if not self.get_relay_state(): try: request = requests.get('{}/relay'.format(self.resource), params={'state': '1'}, timeout=self.timeout) if request.status_code == 200: self.data['relay'] = True except requests.exceptions.ConnectionError: raise exceptions.MyStromConnectionError() def set_relay_off(self): """Turn the relay off.""" if self.get_relay_state(): try: request = requests.get('{}/relay'.format(self.resource), params={'state': '0'}, timeout=self.timeout) if request.status_code == 200: self.data['relay'] = False except requests.exceptions.ConnectionError: raise exceptions.MyStromConnectionError() def get_status(self): """Get the details from the switch.""" try: request = requests.get('{}/report'.format(self.resource), timeout=self.timeout) self.data = request.json() return self.data except (requests.exceptions.ConnectionError, ValueError): raise exceptions.MyStromConnectionError() def get_relay_state(self): """Get the relay state.""" self.get_status() try: self.state = self.data['relay'] except TypeError: self.state = False return bool(self.state) def get_consumption(self): """Get current power consumption in mWh.""" self.get_status() try: self.consumption = self.data['power'] except TypeError: self.consumption = 0 return self.consumption
Python
0.000854
@@ -159,16 +159,17 @@ ptions%0A%0A +%0A class My @@ -598,32 +598,53 @@ = requests.get( +%0A '%7B%7D/relay'.forma @@ -664,47 +664,8 @@ ce), -%0A par @@ -684,35 +684,16 @@ : '1'%7D,%0A - @@ -1084,24 +1084,45 @@ equests.get( +%0A '%7B%7D/relay'.f @@ -1146,47 +1146,8 @@ ce), -%0A par @@ -1166,35 +1166,16 @@ : '0'%7D,%0A - @@ -1538,16 +1538,33 @@ sts.get( +%0A '%7B%7D/repo @@ -1593,43 +1593,8 @@ ce), -%0A tim
c034282423d47a6530ed0bb77c54e133de72115b
add more verbose output to PushwooshClient when debut=True
pypushwoosh/client.py
pypushwoosh/client.py
import logging import requests from .base import PushwooshBaseClient log = logging.getLogger('pypushwoosh.client.log') class PushwooshClient(PushwooshBaseClient): """ Implementation of the Pushwoosh API Client. """ headers = {'User-Agent': 'PyPushwooshClient', 'Content-Type': 'application/json', 'Accept': 'application/json'} def __init__(self, timeout=None): PushwooshBaseClient.__init__(self) self.timeout = timeout def path(self, command): return '{}://{}/'.format(self.scheme, self.hostname) + '/'.join((self.endpoint, self.version, command.command_name)) def invoke(self, command): PushwooshBaseClient.invoke(self, command) if self.debug: log.debug('Client: %s' % self.__class__.__name__) log.debug('Command: %s' % command.render()) log.debug('Request URL: %s://%s%s' % (self.scheme, self.hostname, self.path(command))) log.debug('Request method: %s' % self.method) log.debug('Request headers: %s' % self.headers) r = requests.post(self.path(command), data=command.render(), headers=self.headers, timeout=self.timeout) if self.debug: log.debug('Response version: %s' % r.raw.version) log.debug('Response code: %s' % r.status_code) log.debug('Response phrase: %s' % r.reason) log.debug('Response headers: %s' % r.headers) return r.json()
Python
0.003164
@@ -794,16 +794,84 @@ command) +%0A url = self.path(command)%0A payload = command.render() %0A%0A @@ -987,32 +987,23 @@ : %25s' %25 -command.render() +payload )%0A @@ -1038,67 +1038,15 @@ : %25s -://%25s%25s' %25 (self.scheme, self.hostname, self.path(command)) +' %25 url )%0A @@ -1192,49 +1192,25 @@ ost( -self.path(command), data=command.render() +url, data=payload , he @@ -1448,24 +1448,24 @@ %25 r.reason)%0A - @@ -1501,32 +1501,89 @@ %25s' %25 r.headers) +%0A log.debug('Response payload: %25s' %25 r.json()) %0A%0A return
b7ed71cc0b620f460a0d24eeef7891e9770fc39e
Modify & Access time in pyls.
pysh/shell/builtin.py
pysh/shell/builtin.py
import collections import csv import os import pwd import StringIO from pysh.shell.pycmd import register_pycmd from pysh.shell.pycmd import pycmd from pysh.shell.pycmd import IOType from pysh.shell.table import Table def file_to_array(f): return map(lambda line: line.rstrip('\r\n'), f.readlines()) class Permission(int): def __init__(self, val): int.__init__(self, val) def __str__(self): return ''.join((self.__to_rwx(self >> 6), self.__to_rwx(self >> 3), self.__to_rwx(self >> 0))) def __to_rwx(self, rwx): result = ['-'] * 3 if rwx & (1 << 2): result[0] = 'r' if rwx & (1 << 1): result[1] = 'w' if rwx & (1 << 0): result[2] = 'x' return ''.join(result) @pycmd(name='echo', inType=IOType.No) def pycmd_echo(args, input): line = [] for arg in args[1:]: if not isinstance(arg, basestring) and ( isinstance(arg, collections.Iterable)): if line: yield ' '.join(line) line = [] for e in arg: yield e else: line.append(str(arg)) if line: yield ' '.join(line) @pycmd(name='map') def pycmd_map(args, input): assert len(args) == 2 if isinstance(input, file): input = file_to_array(input) f = args[1] assert callable(f) return (f(x) for x in input) @pycmd(name='filter') def pycmd_filter(args, input): assert len(args) == 2 if isinstance(input, file): input = file_to_array(input) cond = args[1] assert callable(cond) for x in input: if cond(x): yield x @pycmd(name='reduce') def pycmd_reduce(args, input): assert len(args) == 2 if isinstance(input, file): input = file_to_array(input) f = args[1] assert callable(f) return [reduce(f, input)] @pycmd(name='readcsv') def pycmd_readcsv(args, input): return csv.reader(input) @pycmd(name='pyls') def pycmd_pls(args, input): table = Table(['mode', 'user', 'group', 'path']) for arg in args[1:]: stat = os.stat(arg) user = pwd.getpwuid(stat.st_uid).pw_name group = pwd.getpwuid(stat.st_gid).pw_name permission = stat.st_mode & 0777 table.add_row([Permission(permission), user, group, arg]) return table @pycmd(name='where') def pycmd_pls(args, input): assert len(args) == 2 row = list(input)[0] table = row.table() return table.where(args[1]) @pycmd(name='orderby') def pycmd_pls(args, input): assert len(args) == 2 row = list(input)[0] table = row.table() return table.orderby(args[1]) @pycmd(name='cd', inType=IOType.No, outType=IOType.No) def pycmd_cd(args, input): assert len(args) == 2 or len(args) == 1 if len(args) == 2: dir = args[1] else: dir = os.environ.get('HOME', '') if dir: os.chdir(dir) return ()
Python
0
@@ -23,16 +23,32 @@ ort csv%0A +import datetime%0A import o @@ -1965,16 +1965,34 @@ 'group', + 'mtime', 'atime', 'path'%5D @@ -1993,16 +1993,16 @@ path'%5D)%0A - for ar @@ -2168,16 +2168,134 @@ & 0777%0A + mtime = datetime.datetime.fromtimestamp(stat.st_mtime)%0A atime = datetime.datetime.fromtimestamp(stat.st_atime)%0A tabl @@ -2341,16 +2341,30 @@ , group, + mtime, atime, arg%5D)%0A @@ -2378,16 +2378,17 @@ table%0A%0A +%0A @pycmd(n
933e3193bbd1ceb45d33a9b2dc37f3bb80b5bc7b
fix in broadcasting code
broadcast/broadcast_service.py
broadcast/broadcast_service.py
#!/usr/bin/python #broadcast_service.py # # <<<COPYRIGHT>>> # # # # """ .. module:: broadcast_service @author: Veselin """ #------------------------------------------------------------------------------ _Debug = True _DebugLevel = 6 #------------------------------------------------------------------------------ # This is used to be able to execute this module directly from command line. if __name__ == '__main__': import sys, os.path as _p sys.path.insert(0, _p.abspath(_p.join(_p.dirname(_p.abspath(sys.argv[0])), '..'))) #------------------------------------------------------------------------------ import datetime import random import string import json #------------------------------------------------------------------------------ from logs import lg from lib import packetid from crypt import signed from crypt import key from p2p import commands from userid import my_id #------------------------------------------------------------------------------ def prepare_broadcast_message(owner, payload): tm = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S') rnd = ''.join(random.choice(string.ascii_uppercase) for _ in range(4)) msgid = '%s:%s:%s' % (tm, rnd, owner) msg = [ ('owner', owner), ('started', tm), ('id', msgid), ('payload', payload), ] # owner_sign = key.Sign(key.Hash(str(msg))) msg = {k:v for k, v in msg} # msg['owner_sign'] = owner_sign return msg def read_message_from_packet(newpacket): try: msg = json.loads(newpacket.Payload) except: lg.exc() return None # TODO verify owner signature and creator ID return msg def packet_for_broadcaster(broadcaster_idurl, json_data): if 'broadcaster' not in json_data: json_data['broadcaster'] = broadcaster_idurl return signed.Packet(commands.Broadcast(), json_data['owner'], json_data['owner'], json_data['id'], json.dumps(json_data), broadcaster_idurl,) def packet_for_listener(listener_idurl, json_data): if 'broadcaster' not in json_data: json_data['broadcaster'] = my_id.getLocalID() return signed.Packet(commands.Broadcast(), json_data['owner'], my_id.getLocalID(), json_data['id'], json.dumps(json_data), listener_idurl,) #------------------------------------------------------------------------------ def send_broadcast_message(payload): from broadcast import broadcaster_node from broadcast import broadcast_listener msg = prepare_broadcast_message(my_id.getLocalID(), payload) if broadcaster_node.A(): broadcaster_node.A('new-outbound-message', (msg, None)) elif broadcast_listener.A(): if broadcast_listener.A().state == 'OFFLINE': broadcast_listener.A('connect') broadcast_listener.A('outbound-message', msg) else: lg.warn('nor broadcaster_node(), nor broadcast_listener() exist') return None return msg #------------------------------------------------------------------------------ def on_incoming_broadcast_message(json_msg): lg.out(2, 'service_broadcasting._on_incoming_broadcast_message : %r' % json_msg) #------------------------------------------------------------------------------ def main(): pass #------------------------------------------------------------------------------ if __name__ == '__main__': main()
Python
0.00014
@@ -1953,34 +1953,34 @@ -json_data%5B'owner'%5D +my_id.getLocalID() , %0A
fda17a416790a04bd265946a5554cf5d0639d442
Add identifiers URL for refseq
indra/databases/__init__.py
indra/databases/__init__.py
import logging logger = logging.getLogger(__name__) def get_identifiers_url(db_name, db_id): """Return an identifiers.org URL for a given database name and ID. Parameters ---------- db_name : str An internal database name: HGNC, UP, CHEBI, etc. db_id : str An identifier in the given database. Returns ------- url : str An identifiers.org URL corresponding to the given database name and ID. """ identifiers_url = 'http://identifiers.org/' bel_scai_url = 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/' if db_name == 'UP': url = identifiers_url + 'uniprot/%s' % db_id elif db_name == 'HGNC': url = identifiers_url + 'hgnc/HGNC:%s' % db_id elif db_name == 'IP': url = identifiers_url + 'interpro/%s' % db_id elif db_name == 'IPR': url = identifiers_url + 'interpro/%s' % db_id elif db_name == 'CHEBI': url = identifiers_url + 'chebi/%s' % db_id elif db_name == 'NCIT': url = identifiers_url + 'ncit/%s' % db_id elif db_name == 'GO': if db_id.startswith('GO:'): url = identifiers_url + 'go/%s' % db_id else: url = identifiers_url + 'go/GO:%s' % db_id elif db_name in ('PUBCHEM', 'PCID'): # Assuming PCID = PubChem compound ID if db_id.startswith('PUBCHEM:'): db_id = db_id[8:] elif db_id.startswith('PCID:'): db_id = db_id[5:] url = identifiers_url + 'pubchem.compound/%s' % db_id elif db_name == 'PF': url = identifiers_url + 'pfam/%s' % db_id elif db_name == 'MIRBASEM': url = identifiers_url + 'mirbase.mature/%s' % db_id elif db_name == 'MIRBASE': url = identifiers_url + 'mirbase/%s' % db_id elif db_name == 'MESH': url = identifiers_url + 'mesh/%s' % db_id elif db_name == 'EGID': url = identifiers_url + 'ncbigene/%s' % db_id elif db_name == 'HMDB': url = identifiers_url + 'hmdb/%s' % db_id elif db_name == 'LINCS': if db_id.startswith('LSM-'): # Lincs Small Molecule ID url = identifiers_url + 'lincs.smallmolecule/%s' % db_id elif db_id.startswith('LCL-'): # Lincs Cell Line ID url = identifiers_url + 'lincs.cell/%s' % db_id else: # Assume LINCS Protein url = identifiers_url + 'lincs.protein/%s' % db_id elif db_name == 'HMS-LINCS': url = 'http://lincs.hms.harvard.edu/db/sm/%s-101' % db_id # Special cases with no identifiers entry elif db_name == 'SCHEM': url = bel_scai_url + 'selventa-legacy-chemicals/' + \ 'selventa-legacy-chemicals-20150601.belns' elif db_name == 'SCOMP': url = bel_scai_url + 'selventa-named-complexes/' + \ 'selventa-named-complexes-20150601.belns' elif db_name == 'SFAM': url = bel_scai_url + 'selventa-protein-families/' + \ 'selventa-protein-families-20150601.belns' elif db_name == 'FPLX': url = 'http://identifiers.org/fplx/%s' % db_id elif db_name == 'LNCRNADB': if db_id.startswith('ENSG'): url = 'http://www.lncrnadb.org/search/?q=%s' % db_id else: # Assmuing HGNC symbol url = 'http://www.lncrnadb.org/%s/' % db_id elif db_name == 'NXPFA': url = 'https://www.nextprot.org/term/FA-%s' % db_id elif db_name in ('UN', 'WDI', 'FAO'): url = 'https://github.com/clulab/eidos/wiki/JSON-LD#Grounding/%s' % \ db_id elif db_name == 'HUME': url = ('https://github.com/BBN-E/Hume/blob/master/resource/ontologies/' 'hume_ontology/%s' % db_id) elif db_name == 'CWMS': url = 'http://trips.ihmc.us/%s' % db_id elif db_name == 'SIGNOR': # Assuming db_id == Primary ID url = 'https://signor.uniroma2.it/relation_result.php?id=%s' % db_id elif db_name == 'SOFIA': url = 'http://cs.cmu.edu/sofia/%s' % db_id elif db_name == 'CHEMBL': if not db_id.startswith('CHEMBL'): db_id = 'CHEMBL%s' % db_id url = identifiers_url + 'chembl.compound/%s' % db_id elif db_name == 'NONCODE': url = 'http://www.noncode.org/show_gene.php?id=NONHSAG%s' % db_id elif db_name == 'TEXT': return None else: logger.warning('Unhandled name space %s' % db_name) url = None return url
Python
0.000001
@@ -4240,32 +4240,126 @@ HSAG%25s' %25 db_id%0A + elif db_name == 'REFSEQ_PROT':%0A url = '%09https://identifiers.org/refseq:%25s' %25 db_id%0A elif db_name
b2d016d1a1eb7bf36d94b57af99cd627a065c173
use getattr_static to avoid executing code
python/bncompleter.py
python/bncompleter.py
""" This file is a modified version of rlcompleter.py from the Python project under the Python Software Foundation License 2: https://github.com/python/cpython/blob/master/Lib/rlcompleter.py https://github.com/python/cpython/blob/master/LICENSE The only changes made were to modify the regular expression in attr_matches and all code that relied on GNU readline (the later more for readability as it wasn't required). -------------- Word completion for GNU readline. The completer completes keywords, built-ins and globals in a selectable namespace (which defaults to __main__); when completing NAME.NAME..., it evaluates (!) the expression up to the last dot and completes its attributes. It's very cool to do "import sys" type "sys.", hit the completion key (twice), and see the list of names defined by the sys module! Tip: to use the tab key as the completion key, call readline.parse_and_bind("tab: complete") Notes: - Exceptions raised by the completer function are *ignored* (and generally cause the completion to fail). This is a feature -- since readline sets the tty device in raw (or cbreak) mode, printing a traceback wouldn't work well without some complicated hoopla to save, reset and restore the tty state. - The evaluation of the NAME.NAME... form may cause arbitrary application defined code to be executed if an object with a __getattr__ hook is found. Since it is the responsibility of the application (or the user) to enable this feature, I consider this an acceptable risk. More complicated expressions (e.g. function calls or indexing operations) are *not* evaluated. - When the original stdin is not a tty device, GNU readline is never used, and this module (and the readline module) are silently inactive. """ import atexit import __main__ import inspect import sys __all__ = ["Completer"] def fnsignature(obj): if sys.version_info[0:2] >= (3, 5): try: sig = str(inspect.signature(obj)) except: sig = "()" return sig else: try: args = inspect.getargspec(obj).args args.remove('self') sig = "(" + ','.join(args) + ")" except: sig = "()" return sig class Completer: def __init__(self, namespace = None): """Create a new completer for the command line. Completer([namespace]) -> completer instance. If unspecified, the default namespace where completions are performed is __main__ (technically, __main__.__dict__). Namespaces should be given as dictionaries. Completer instances should be used as the completion mechanism of readline via the set_completer() call: readline.set_completer(Completer(my_namespace).complete) """ if namespace and not isinstance(namespace, dict): raise TypeError('namespace must be a dictionary') # Don't bind to namespace quite yet, but flag whether the user wants a # specific namespace or to use __main__.__dict__. This will allow us # to bind to __main__.__dict__ at completion time, not now. if namespace is None: self.use_main_ns = 1 else: self.use_main_ns = 0 self.namespace = namespace def complete(self, text, state): """Return the next possible completion for 'text'. This is called successively with state == 0, 1, 2, ... until it returns None. The completion should begin with 'text'. """ if self.use_main_ns: self.namespace = __main__.__dict__ if not text.strip(): if state == 0: return '\t' else: return None if state == 0: if "." in text: self.matches = self.attr_matches(text) else: self.matches = self.global_matches(text) try: return self.matches[state] except IndexError: return None def _callable_postfix(self, val, word): if callable(val) and not inspect.isclass(val): word = word + fnsignature(val) return word def global_matches(self, text): """Compute matches when text is a simple name. Return a list of all keywords, built-in functions and names currently defined in self.namespace that match. """ import keyword matches = [] seen = {"__builtins__"} n = len(text) for word in keyword.kwlist: if word[:n] == text: seen.add(word) if word in {'finally', 'try'}: word = word + ':' elif word not in {'False', 'None', 'True', 'break', 'continue', 'pass', 'else'}: word = word + ' ' matches.append(word) #Not sure why in the console builtins becomes a dict but this works for now. if hasattr(__builtins__, '__dict__'): builtins = __builtins__.__dict__ else: builtins = __builtins__ for nspace in [self.namespace, builtins]: for word, val in nspace.items(): if word[:n] == text and word not in seen: seen.add(word) matches.append(self._callable_postfix(val, word)) return matches def attr_matches(self, text): """Compute matches when text contains a dot. Assuming the text is of the form NAME.NAME....[NAME], and is evaluable in self.namespace, it will be evaluated and its attributes (as revealed by dir()) are used as possible completions. (For class instances, class members are also considered.) WARNING: this can still invoke arbitrary C code, if an object with a __getattr__ hook is evaluated. """ import re m = re.match(r"([\w\[\]]+(\.[\w\[\]]+)*)\.([\w\[\]]*)", text) if not m: return [] expr, attr = m.group(1, 3) try: thisobject = eval(expr, self.namespace) except Exception: return [] # get the content of the object, except __builtins__ words = set(dir(thisobject)) words.discard("__builtins__") if hasattr(thisobject, '__class__'): words.add('__class__') words.update(get_class_members(thisobject.__class__)) matches = [] n = len(attr) if attr == '': noprefix = '_' elif attr == '_': noprefix = '__' else: noprefix = None while True: for word in words: if (word[:n] == attr and not (noprefix and word[:n+1] == noprefix)): match = "%s.%s" % (expr, word) try: val = getattr(thisobject, word) except Exception: pass # Include even if attribute not set else: match = self._callable_postfix(val, match) matches.append(match) if matches or not noprefix: break if noprefix == '_': noprefix = '__' else: noprefix = None matches.sort() return matches def get_class_members(klass): ret = dir(klass) if hasattr(klass,'__bases__'): for base in klass.__bases__: ret = ret + get_class_members(base) return ret
Python
0.000001
@@ -5945,24 +5945,31 @@ al = getattr +_static (thisobject,
d5245c08e96b374d67bf35c319c8076c41c2feb1
Reset auth
Htpc.py
Htpc.py
#!/usr/bin/env python # -*- coding: utf-8 -*-A """ This is the main executable of HTPC Manager. It parses the command line arguments, sets globals variables and calls the start function to start the server. """ import os import sys import htpc def parse_arguments(): """ Get variables from commandline """ import argparse parser = argparse.ArgumentParser() parser.add_argument('--datadir', default=None, help='Set the datadirectory') parser.add_argument('--db', default=None, help='Use a custom database') parser.add_argument('--host', default=None, help='Use a specific host/IP') parser.add_argument('--port', type=int, help='Use a specific port') parser.add_argument('--shell', action='store_true', default=False, help='WARNING! DO NOT USE UNLESS YOU KNOW WHAT .POPEN CAN BE USED FOR (LIKE WIPEING YOUR HARDDRIVE).') parser.add_argument('--daemon', action='store_true', default=False, help='Daemonize process') parser.add_argument('--pid', default=False, help='Generate PID file at location') parser.add_argument('--debug', action='store_true', default=False, help='Print debug text') parser.add_argument('--webdir', default=None, help='Use a custom webdir') parser.add_argument('--loglevel', default='info', help='Set a loglevel. Allowed values: debug, info, warning, error, critical') return parser.parse_args() def load_modules(): """ Import the system modules """ from htpc.root import Root htpc.ROOT = Root() from htpc.settings import Settings htpc.ROOT.settings = Settings() from htpc.log import Log htpc.ROOT.log = Log() from htpc.updater import Updater htpc.ROOT.update = Updater() # Import all modules. from modules.xbmc import Xbmc htpc.ROOT.xbmc = Xbmc() from modules.sabnzbd import Sabnzbd htpc.ROOT.sabnzbd = Sabnzbd() from modules.couchpotato import Couchpotato htpc.ROOT.couchpotato = Couchpotato() from modules.sickbeard import Sickbeard htpc.ROOT.sickbeard = Sickbeard() from modules.transmission import Transmission htpc.ROOT.transmission = Transmission() from modules.deluge import Deluge htpc.ROOT.deluge = Deluge() from modules.squeezebox import Squeezebox htpc.ROOT.squeezebox = Squeezebox() from modules.search import Search htpc.ROOT.search = Search() from modules.utorrent import UTorrent htpc.ROOT.utorrent = UTorrent() from modules.nzbget import NZBGet htpc.ROOT.nzbget = NZBGet() from modules.qbittorrent import qbittorrent htpc.ROOT.qbittorrent = qbittorrent() from modules.stats import Stats htpc.ROOT.stats = Stats() from modules.tvheadend import TVHeadend htpc.ROOT.tvheadend = TVHeadend() def main(): """ Main function is called at startup. """ # Parse runtime arguments args = parse_arguments() # Set root and insert bundled libraies into path htpc.RUNDIR = os.path.dirname(os.path.abspath(sys.argv[0])) sys.path.insert(0, os.path.join(htpc.RUNDIR, 'libs')) # Set datadir, create if it doesn't exist and exit if it isn't writable. htpc.DATADIR = os.path.join(htpc.RUNDIR, 'userdata/') if args.datadir: htpc.DATADIR = args.datadir if not os.path.isdir(htpc.DATADIR): os.makedirs(htpc.DATADIR) if not os.access(htpc.DATADIR, os.W_OK): sys.exit("No write access to userdata folder") from mako.lookup import TemplateLookup # Enable debug mode if needed htpc.DEBUG = args.debug # Set loglevel htpc.LOGLEVEL = args.loglevel.lower() # Set default database and overwrite if supplied through commandline htpc.DB = os.path.join(htpc.DATADIR, 'database.db') if args.db: htpc.DB = args.db # Load settings from database from htpc.settings import Settings htpc.settings = Settings() # Check for SSL htpc.SSLCERT = htpc.settings.get('app_ssl_cert') htpc.SSLKEY = htpc.settings.get('app_ssl_key') htpc.WEBDIR = htpc.settings.get('app_webdir', '/') if args.webdir: htpc.WEBDIR = args.webdir if not(htpc.WEBDIR.endswith('/')): htpc.WEBDIR += '/' # Inititialize root and settings page load_modules() htpc.TEMPLATE = os.path.join(htpc.RUNDIR, 'interfaces/', htpc.settings.get('app_template', 'default')) htpc.LOOKUP = TemplateLookup(directories=[os.path.join(htpc.TEMPLATE, 'html/')]) # Overwrite host setting if supplied through commandline htpc.HOST = htpc.settings.get('app_host', '0.0.0.0') if args.host: htpc.HOST = args.host # Overwrite port setting if supplied through commandline htpc.PORT = int(htpc.settings.get('app_port', 8085)) if args.port: htpc.PORT = args.port htpc.USERNAME = htpc.settings.get('app_username') htpc.PASSWORD = htpc.settings.get('app_password') #Select if you want to controll processes and popen from HTPC-Manager htpc.SHELL = args.shell # Select wether to run as daemon htpc.DAEMON = args.daemon # Set Application PID htpc.PID = args.pid # Start the server from htpc.server import start start() if __name__ == '__main__': main()
Python
0.000011
@@ -38,17 +38,17 @@ tf-8 -*- -A +%0A %0A%22%22%22%0AThi @@ -1419,16 +1419,172 @@ ebdir')%0A + parser.add_argument('--resetauth', action='store_true', default=False,%0A help='Resets the username and password to HTPC-Manager')%0A pars @@ -5263,24 +5263,224 @@ password')%0A%0A + # Resets the htpc manager password and username%0A if args.resetauth:%0A htpc.USERNAME = htpc.settings.set('app_username', '')%0A htpc.PASSWORD = htpc.settings.set('app_password', '')%0A%0A #Select
f56cc7acae3c3b295febafec384bcfdf3b2dcee0
koda za python
Koda.py
Koda.py
from csv import DictReader import pandas as ps def fileReaderSmucNesrece(): fp = open("evidencanesrecnasmuciscihV1.csv", "rt", encoding=" utf -8 ") reader = DictReader(fp) return [line for line in reader] #branje SmucNes = fileReaderSmucNesrece() SmucNes = ps.DataFrame(SmucNes) # uporaba pandas print(SmucNes)
Python
0.999999
@@ -1,8 +1,183 @@ +%0Aimport csv%0A%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0Afrom datetime import datetime%0Afrom scipy.stats import multivariate_normal as mvn%0Afrom scipy.stats import beta%0A%0A from csv @@ -213,17 +213,17 @@ das as p -s +a %0A%0Adef fi @@ -389,18 +389,8 @@ der%5D - #branje %0A%0A%0A%0A @@ -435,17 +435,17 @@ cNes = p -s +a .DataFra @@ -459,26 +459,1251 @@ Nes) - # uporaba pandas%0A +%0Atitles = %5B%5D%0Afor i in SmucNes:%0A titles.append(i)%0Adef tf(t):%0A st = 0%0A for i in SmucNes%5Bt%5D:%0A if i == 'Da':%0A SmucNes.set_value(st, t, %22True%22)%0A elif i == %22Ne%22:%0A SmucNes.set_value(st, t, %22False%22)%0A else:%0A SmucNes.set_value(st, t, %22%22)%0A st += 1%0Adef tfmake():%0A num = %5B3, 4, 5, 13, 14, 15%5D%0A for i in num:%0A tf(titles%5Bi%5D)%0A num2 = %5B9, 16, 17, 19, 20 %5D%0A for i in num2:%0A udA(titles%5Bi%5D)%0Adef udA(t):%0A dic = %7B%7D%0A temp=%5B%5D%0A st = 0%0A for i in SmucNes%5Bt%5D:%0A if len(i)%3E0:%0A temp = i.split(%22 %22, 1)%0A if temp%5B0%5D not in dic:%0A if temp%5B0%5D == 25:%0A dic%5B25%5D = 'ostalo'%0A elif temp%5B0%5D == 'NO%C3%88NA':%0A dic%5B7%5D = 'NO%C3%88NA'%0A else:%0A if len(temp) %3E 1:%0A dic%5Btemp%5B0%5D%5D = temp%5B1%5D%0A%0A if temp%5B0%5D.isdigit():%0A SmucNes.set_value(st, t, temp%5B0%5D)%0A else:%0A for i in dic:%0A if temp%5B0%5D == dic%5Bi%5D:%0A SmucNes.set_value(st, t, i)%0A SmucNes.set_value(st, t, %22%22)%0A else:%0A SmucNes.set_value(st, t, %22%22)%0A%0A st += 1%0A%0A%0Atfmake() %0Apri
ac2f32bc48a1763cf8f7b57094990329da621a59
Fix long line of code in Main.py
Main.py
Main.py
"""Main Module of PDF Splitter""" import argparse import logging import os from PyPDF2 import PdfFileWriter from Util import all_pdf_files_in_directory, split_on, concat_pdf_pages, merge_with_next, is_landscape, write_pdf_file, \ add_pages # Get default logger logging.basicConfig(level=logging.INFO) log = logging.getLogger(__name__) parser = \ argparse.ArgumentParser( description='Split all the pages of multiple PDF files in a directory by document number' ) parser.add_argument( 'directory', metavar='PATH', type=str, help='path to a directory' ) parser.add_argument( '-r', '--rotate-back', choices=['ccw', 'cw', 'no-op'], default='no-op', help='how to correct the rotation of the first PDF page' ) parser.add_argument( '-m', '--merge', action='store_true', help='merge all the output files into another PDF file' ) parser.add_argument( '-d', '--double-sided', action='store_true', default=False, help='the input PDF files are double sided scans' ) parser.add_argument( '-e', '--even-pages', dest='even_pages', action='store_true', default=False, help='Append blank page to make page number an even number' ) parser.add_argument( '-v', '--verbose', action='store_true', help='increase output verbosity' ) # A dictionary that contains function that corrects the rotation of a PDF page rotation_correctors = { 'cw': lambda page: page.rotateClockwise(90), 'ccw': lambda page: page.rotateCounterClockwise(90), 'no-op': lambda page: page # Do nothing } # A dictionary that contains tuple of two functions, the first detect whether a chunk needs to be merged # with the next chunk, the second function decides how chunks should be merged merge_configs = { # Merge when there is only one page in the chunk (happens with double-side scan) and # merge the two chunks leaving out only the back-side of the header page 'double_sided': (lambda pages: len(pages) == 1, lambda chunk1, chunk2: chunk1 + chunk2[1:]), 'single_sided': (lambda pages: False, None) # Never merge page } def main(): # Get to directory with PDF files to work on args = parser.parse_args() if args.verbose: log.parent.setLevel(logging.DEBUG) directory = args.directory merge_config = 'double_sided' if args.double_sided else 'single_sided' output_files = pdf_split( directory, rotation_correctors[args.rotate_back], args.even_pages, merge_configs[merge_config] ) if args.merge: merge_output(output_files) def pdf_split(directory, correct_rotation, even_pages, merge_config): """Split all the PDF files in a certain directory. Optionally correct the rotation of the header page, make page chunks have even number of pages and merge page chunks before writing to output files.""" log.info('Working on PDF files in %s', directory) output_filenames = [] # Open the PDF files all_pdf_files = [os.path.join(directory, filename) for filename in all_pdf_files_in_directory(directory)] log.info('Found the following PDF files\n %s', '\n '.join(all_pdf_files)) opened_files = [open(path, 'rb') for path in all_pdf_files] # Take all the pages in all the PDF files into a generator all_pages = concat_pdf_pages(opened_files) def make_pagenum_even(writer): """Helper function that append a blank page if the number of page is an odd number, in order to make the page number even.""" if writer.getNumPages() % 2 == 1: log.info(' Adding a blank page') writer.addBlankPage() # TODO: Fix blank page orientation bug # Problem: if the only page in this writer was originally in landscape orientation, a new blank page will # be in landscape orientation too. # But it should be rare that a writer has only one page in it, thus not a big problem. # Solution: takes the rotation of the only page in the PDF writer into account, or have a predefined page # width & height. # First split pages into chunks when a page in landscape orientation is detected page_chunks1 = split_on(all_pages, predicate=is_landscape) # Next merge adjacent chunks that meets certain condition with a merger function # this is used to handle situation where the scan is double sided page_chunks2 = merge_with_next(page_chunks1, predicate=merge_config[0], merger=merge_config[1]) # For all pages that belongs to the same document ID for idx, pages_to_write in enumerate(page_chunks2, start=1): # Create a PDF writer instance pdf_writer = PdfFileWriter() # Correct the rotation of the first page in file first_page = pages_to_write[0] # If this is the first page of the first PDF file, it might not be in landscape view # so we check for that if is_landscape(first_page): log.debug('Correction rotation on the first page=%s', repr(first_page)) # Correct the rotation the way the user specifies correct_rotation(first_page) # Put those pages into a writer add_pages(pdf_writer, pages_to_write) # Conditionally make the output PDF file have an even number of pages, which makes printing the PDF file easier if even_pages: make_pagenum_even(pdf_writer) output_filename = '{0:05}.pdf'.format(idx) output_filenames.append(output_filename) # And write those pages to a single PDF file log.info('Writing PDF pages to %s', output_filename) write_pdf_file(output_filename, pdf_writer) # Make sure to close all the files that were opened log.debug('Closing all opened files') for opened_file in opened_files: opened_file.close() return output_filenames def merge_output(pdf_files, output_filename='all.pdf'): """Merge all the output PDF files into a single PDF files to make printing easier. The output filename defaults to 'all.pdf'.""" log.info('Merging output files\n %s', '\n '.join(pdf_files)) opened_files = [open(path, 'rb') for path in pdf_files] pdf_writer = PdfFileWriter() # Write all the pages in all the output PDF files into PDFWriter add_pages(pdf_writer, concat_pdf_pages(opened_files)) # And write those pages to a single PDF file log.info('Writing PDF pages to %s', output_filename) write_pdf_file(output_filename, pdf_writer) # Make sure to close all the files that were opened log.debug('Closing all opened files') for opened_file in opened_files: opened_file.close() if __name__ == '__main__': main() logging.shutdown()
Python
0.999065
@@ -206,16 +206,22 @@ ndscape, + %5C%0A write_p @@ -228,22 +228,16 @@ df_file, - %5C%0A add_pag
f503d49a2753d3a92a480a2923c6b955d7f45348
Include rendered (instead of raw) body in tourney match comments JSON
byceps/blueprints/tourney/match/views.py
byceps/blueprints/tourney/match/views.py
""" byceps.blueprints.tourney.match.views ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from flask import abort, g, jsonify, request, url_for from ....services.tourney import match_service from ....util.framework.blueprint import create_blueprint from ....util.framework.templating import templated from ....util.views import respond_created, respond_no_content from ...authentication.decorators import api_token_required from . import signals blueprint = create_blueprint('tourney_match', __name__) # -------------------------------------------------------------------- # # match comments @blueprint.route('/<uuid:match_id>/comments') @templated def comments_view(match_id): """Render the comments on a match.""" match = _get_match_or_404(match_id) comments = match_service.get_comments(match.id, g.party_id) return { 'comments': comments, } @blueprint.route('/<uuid:match_id>/comments.json') def comments_view_as_json(match_id): """Render the comments on a match as JSON.""" match = _get_match_or_404(match_id) comments = match_service.get_comments(match.id, g.party_id) comment_dtos = list(map(_comment_to_json, comments)) return jsonify({ 'comments': comment_dtos, }) def _comment_to_json(comment): creator = comment.creator return { 'comment_id': str(comment.id), 'match_id': str(comment.match_id), 'created_at': comment.created_at.isoformat(), 'creator': { 'user_id': creator.id, 'screen_name': creator.screen_name, 'suspended': creator.suspended, 'deleted': creator.deleted, 'avatar_url': creator.avatar_url, 'is_orga': creator.is_orga, }, 'body': comment.body, 'hidden': comment.hidden, 'hidden_at': comment.hidden_at, 'hidden_by_id': comment.hidden_by_id, } blueprint.add_url_rule('/<uuid:match_id>/comments/<uuid:comment_id>', endpoint='comment_view', build_only=True) @blueprint.route('/<uuid:match_id>/comments', methods=['POST']) @respond_created def comment_create(match_id): """Create a comment on a match.""" if not g.current_user.is_active: abort(403) match = _get_match_or_404(match_id) body = request.form['body'].strip() comment = match_service.create_comment(match_id, g.current_user.id, body) signals.match_comment_created.send(None, comment_id=comment.id) return url_for('.comment_view', match_id=match.id, comment_id=comment.id) @blueprint.route('/<uuid:match_id>/comments/<uuid:comment_id>/flags/hidden', methods=['POST']) @api_token_required @respond_no_content def comment_hide(match_id, comment_id): """Hide the match comment.""" initiator_id = request.form.get('initiator_id') if not initiator_id: abort(400, 'Initiator ID missing') match_service.hide_comment(comment_id, initiator_id) @blueprint.route('/<uuid:match_id>/comments/<uuid:comment_id>/flags/hidden', methods=['DELETE']) @api_token_required @respond_no_content def comment_unhide(match_id, comment_id): """Un-hide the match comment.""" initiator_id = request.form.get('initiator_id') if not initiator_id: abort(400, 'Initiator ID missing') match_service.unhide_comment(comment_id, initiator_id) def _get_match_or_404(match_id): match = match_service.find_match(match_id) if match is None: abort(404) return match
Python
0
@@ -1857,16 +1857,25 @@ ent.body +_rendered ,%0A
dd526ef40d3eb13681dca602b82390d66363783f
fix FlipDimension for LinearDimension
src/Mod/Draft/draftguitools/gui_dimension_ops.py
src/Mod/Draft/draftguitools/gui_dimension_ops.py
# *************************************************************************** # * (c) 2009, 2010 Yorik van Havre <yorik@uncreated.net> * # * (c) 2009, 2010 Ken Cline <cline@frii.com> * # * (c) 2020 Eliud Cabrera Castillo <e.cabrera-castillo@tum.de> * # * * # * This file is part of the FreeCAD CAx development system. * # * * # * This program is free software; you can redistribute it and/or modify * # * it under the terms of the GNU Lesser General Public License (LGPL) * # * as published by the Free Software Foundation; either version 2 of * # * the License, or (at your option) any later version. * # * for detail see the LICENCE text file. * # * * # * FreeCAD is distributed in the hope that it will be useful, * # * but WITHOUT ANY WARRANTY; without even the implied warranty of * # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * # * GNU Library General Public License for more details. * # * * # * You should have received a copy of the GNU Library General Public * # * License along with FreeCAD; if not, write to the Free Software * # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * # * USA * # * * # *************************************************************************** """Provides tools to modify Draft dimensions. For example, a tool to flip the direction of the text in the dimension as the normal is sometimes not correctly calculated automatically. """ ## @package gui_dimension_ops # \ingroup DRAFT # \brief Provides tools to modify Draft dimensions. from PySide.QtCore import QT_TRANSLATE_NOOP import FreeCADGui as Gui import draftutils.utils as utils import draftguitools.gui_base as gui_base from draftutils.translate import _tr class FlipDimension(gui_base.GuiCommandNeedsSelection): """The Draft FlipDimension command definition. Flip the normal direction of the selected dimensions. It inherits `GuiCommandNeedsSelection` to set up the document and other behavior. See this class for more information. """ def __init__(self): super(Draft_FlipDimension, self).__init__(name=_tr("Flip dimension")) def GetResources(self): """Set icon, menu and tooltip.""" _tip = ("Flip the normal direction of the selected dimensions " "(linear, radial, angular).\n" "If other objects are selected they are ignored.") return {'Pixmap': 'Draft_FlipDimension', 'MenuText': QT_TRANSLATE_NOOP("Draft_FlipDimension", "Flip dimension"), 'ToolTip': QT_TRANSLATE_NOOP("Draft_FlipDimension", _tip)} def Activated(self): """Execute when the command is called.""" super(Draft_FlipDimension, self).Activated() for o in Gui.Selection.getSelection(): if utils.get_type(o) in ("Dimension", "AngularDimension"): self.doc.openTransaction("Flip dimension") _cmd = "App.activeDocument()." + o.Name + ".Normal" _cmd += " = " _cmd += "App.activeDocument()." + o.Name + ".Normal.negative()" Gui.doCommand(_cmd) self.doc.commitTransaction() self.doc.recompute() Draft_FlipDimension = FlipDimension Gui.addCommand('Draft_FlipDimension', FlipDimension())
Python
0
@@ -3530,16 +3530,72 @@ ension%22, +%0A %22LinearDimension%22, %22Angula
c3802c77e3e6dc5171fc31152314e804967986ee
Document response JSON for login
authentication/authentication.py
authentication/authentication.py
import os from flask import Flask, jsonify, request from flask.ext.bcrypt import Bcrypt from flask.ext.login import ( LoginManager, login_required, login_user, logout_user, UserMixin, ) from flask.ext.sqlalchemy import SQLAlchemy from requests import codes db = SQLAlchemy() class User(db.Model, UserMixin): """ A user has an email and password. """ email = db.Column(db.String, primary_key=True) password_hash = db.Column(db.String) def get_id(self): """ Return the email address to satify Flask-Login's requirements. This is used in conjunction with ``load_user`` for session management """ return self.email def create_app(database_uri): """ Create an application with a database in a given location. :param database_uri: The location of the database for the application. :type database_uri: string :return: An application instance. :rtype: ``Flask`` """ app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = database_uri app.config['SECRET_KEY'] = 'secret' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True db.init_app(app) with app.app_context(): db.create_all() return app SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI', 'sqlite:///:memory:') app = create_app(database_uri=SQLALCHEMY_DATABASE_URI) bcrypt = Bcrypt(app) login_manager = LoginManager() login_manager.init_app(app) @login_manager.user_loader def load_user(userid): """ Flask-Login user_loader callback. The user_loader function asks this function to get a User object based on the userid. If there is no user with the current userid (where userid is the result of ``User.get_id``), return None. The userid was stored in the session environment by Flask-Login. user_loader stores the returned User object in current_user during every flask request. """ return User.query.filter_by(email=userid).first() @app.route('/login', methods=['POST']) def login(): """ Login API endpoint. :param email: An email address to log in as. :type email: string :param password: A password associated with the given ``email`` address. :type password: string :resheader Content-Type: application/json :status 200: A ``User`` with the given ``email`` has been logged in. :status 404: No user can be found with the given ``email``. :status 401: The given ``password`` is incorrect. """ email = request.form['email'] password = request.form['password'] existing_users = User.query.filter_by(email=email) if not existing_users.count(): return jsonify({}), codes.NOT_FOUND user = existing_users.first() if not bcrypt.check_password_hash(user.password_hash, password): return jsonify({}), codes.UNAUTHORIZED login_user(user, remember=True) response_content = {'email': email, 'password': password} return jsonify(response_content), codes.OK @app.route('/logout', methods=['POST']) @login_required def logout(): """ Log the current user out. """ logout_user() return jsonify({}), codes.OK @app.route('/signup', methods=['POST']) def signup(): """ Signup API endpoint. Return an OK status code and user details if a user with the given email and password does not exist, else give an appropriate error code. """ email = request.form['email'] password = request.form['password'] if load_user(email) is not None: return jsonify({}), codes.CONFLICT password_hash = bcrypt.generate_password_hash(password) user = User(email=email, password_hash=password_hash) db.session.add(user) db.session.commit() response_content = {'email': email, 'password': password} return jsonify(response_content), codes.CREATED if __name__ == '__main__': # pragma: no cover # Specifying 0.0.0.0 as the host tells the operating system to listen on # all public IPs. This makes the server visible externally. # See http://flask.pocoo.org/docs/0.10/quickstart/#a-minimal-application app.run(host='0.0.0.0')
Python
0
@@ -2349,16 +2349,176 @@ on/json%0A + :resjson string email: The email address which has been logged in.%0A :resjson string password: The password of the user which has been logged%0A in.%0A :sta
3b41b94b4ad7b249a2ff1040d6bf2d4759d48b14
revise task error handling (exceptions bubble up now)
ape/main.py
ape/main.py
import argparse import inspect import importlib import sys import os from ape import tasks, TaskNotFound, FeatureNotFound, EnvironmentIncomplete from featuremonkey import get_features_from_equation_file def get_task_parser(task): ''' construct an ArgumentParser for task this function returns a tuple (parser, proxy_args) if task accepts varargs only, proxy_args is True. if task accepts only positional and explicit keyword args, proxy args is False. ''' args, varargs, keywords, defaults = inspect.getargspec(task) defaults = defaults or [] parser = argparse.ArgumentParser( prog='ape ' + task.__name__, add_help=False, description = task.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter ) posargslen = len(args) - len(defaults) if varargs is None and keywords is None: for idx, arg in enumerate(args): if idx < posargslen: parser.add_argument(arg) else: default = defaults[idx - posargslen] parser.add_argument('--' + arg, default=default) return parser, False elif not args and varargs and not keywords and not defaults: return parser, True else: raise def invoke_task(task, args): ''' invoke task with args ''' parser, proxy_args = get_task_parser(task) if proxy_args: task(*args) else: pargs = parser.parse_args(args) task(**vars(pargs)) def run(args, features=None): ''' composes task modules of the selected features and calls the task given by args ''' features = features or [] for feature in features: try: feature_module = importlib.import_module(feature) except ImportError: raise FeatureNotFound(feature) try: tasks_module = importlib.import_module(feature + '.tasks') tasks.superimpose(tasks_module) except ImportError: #No tasks module in feature ... skip it pass if len(args) < 2 or (len(args) == 2 and args[1] == 'help'): tasks.help() else: taskname = args[1] try: task = tasks.get_task(taskname, include_helpers=False) except TaskNotFound: print 'Task "%s" not found! Use "ape help" to get usage information.' % taskname else: remaining_args = args[2:] if len(args) > 2 else [] invoke_task(task, remaining_args) def main(): ''' entry point when used via command line features are given using the environment variable PRODUCT_EQUATION. If it is not set, PRODUCT_EQUATION_FILENAME is tried: if it points to an existing equation file that selection is used. If that fails ``ape.EnvironmentIncomplete`` is raised. ''' #check APE_PREPEND_FEATURES features = os.environ.get('APE_PREPEND_FEATURES', '').split() #features can be specified inline in PRODUCT_EQUATION inline_features = os.environ.get('PRODUCT_EQUATION', '').split() if inline_features: #append inline features features += inline_features else: #fallback: features are specified in equation file feature_file = os.environ.get('PRODUCT_EQUATION_FILENAME', '') if feature_file: #append features from equation file features += get_features_from_equation_file(feature_file) else: if not features: print ( 'Error running ape:\n' 'Either the PRODUCT_EQUATION or ' 'PRODUCT_EQUATION_FILENAME environment ' 'variable needs to be set!' ) sys.exit(1) #run ape with features selected run(sys.argv, features=features) if __name__ == '__main__': main()
Python
0
@@ -62,16 +62,33 @@ port os%0A +import traceback%0A from ape @@ -461,17 +461,16 @@ rd args, - %0A pro @@ -2594,20 +2594,16 @@ nd line%0A - %0A fea @@ -2649,16 +2649,18 @@ ariable +%60%60 PRODUCT_ @@ -2667,16 +2667,18 @@ EQUATION +%60%60 .%0A If @@ -2693,16 +2693,18 @@ ot set, +%60%60 PRODUCT_ @@ -2720,16 +2720,18 @@ FILENAME +%60%60 is trie @@ -2807,25 +2807,120 @@ ed.%0A - If that fails +%0A (if %60%60APE_PREPEND_FEATURES%60%60 is given, those features are prepended)%0A%0A If the list of features is empty, %60%60a @@ -3625,22 +3625,43 @@ -print +raise EnvironmentIncomplete (%0A @@ -3881,36 +3881,8 @@ ) -%0A sys.exit(1) %0A%0A @@ -3984,15 +3984,105 @@ _':%0A -main( +try:%0A main()%0A except Exception as e:%0A traceback.print_exc()%0A sys.exit(1 )%0A
17474a74a8c382c1cc5923c0e2128e4a4e776553
Add method I am yet to use
eva/util/nutil.py
eva/util/nutil.py
import numpy as np def to_rgb(pixels): return np.repeat(pixels, 3 if pixels.shape[2] == 1 else 1, 2) def binarize(arr, generate=np.random.uniform): return (generate(size=arr.shape) < arr).astype('i')
Python
0
@@ -204,8 +204,112 @@ pe('i')%0A +%0Adef quantisize(arr, levels):%0A return (np.digitize(arr, np.arange(levels) / levels) - 1).astype('i')%0A
57773e149ae2c7634e262b103a10cc35f6e138b2
Ids are strings.
src/scim/schema/core.py
src/scim/schema/core.py
# -*- coding: utf-8 -*- from . import attributes, types class Metadata(attributes.Base): """A complex attribute containing resource metadata. """ #! The DateTime the Resource was added to the Service Provider. created = attributes.Singular(types.DateTime) #! The most recent DateTime the details of this Resource were updated at #! the Service Provider. If this Resource has never been modified since #! its initial creation, the value MUST be the same as the value of #! created. last_modified = attributes.Singular(types.DateTime) #! The URI of the resource being returned. #! #! This value MUST be the same as the Location HTTP response header. location = attributes.Singular(types.String) #! The version of the Resource being returned. #! #! This value must be the same as the ETag HTTP response header. version = attributes.Singular(types.String) #! The names of the attributes to remove during a PATCH operation. attributes = attributes.List(types.String) class Base(attributes.Base): """Defines the base SCIM schema (v1.1 § 5.5). Contains common attributes that all data models in the SCIM schema have. """ class Meta: schema = 'urn:scim:schemas:core:1.0' #! Unique identifier for the SCIM Resource as defined by the #! Service Provider. #! #! Each representation of the Resource MUST include a non-empty id value. #! This identifier MUST be unique across the Service Provider's entire #! set of Resources. It MUST be a stable, non-reassignable identifier #! that does not change when the same Resource is returned in #! subsequent requests. #! #! The value of the id attribute is always issued by the Service Provider #! and MUST never be specified by the Service Consumer. id = attributes.Singular(types.Integer, required=True) #! An identifier for the Resource as defined by the Service Consumer. #! #! The externalId may simplify identification of the Resource between #! Service Consumer and Service provider by allowing the Consumer to #! refer to the Resource with its own identifier, obviating the need to #! store a local mapping between the local identifier of the Resource and #! the identifier used by the Service Provider. external_id = attributes.Singular(types.Integer) #! A complex attribute containing resource metadata. meta = attributes.Complex(Metadata, last=True)
Python
0.999944
@@ -1866,23 +1866,22 @@ r(types. -Integer +String , requir @@ -2372,15 +2372,14 @@ pes. -Integer +String )%0A%0A
fb2cfe4759fb98de644932af17a247428b2cc0f5
Fix Auth API key check causing error 500s
api/auth.py
api/auth.py
from django.http import HttpResponseForbidden from django.contrib.auth.models import AnonymousUser from api.models import AuthAPIKey class APIKeyAuthentication(object): def is_authenticated(self, request): params = {} for key,value in request.GET.items(): params[key.lower()] = value if params['apikey']: try: keyobj = AuthAPIKey.objects.get(key=params['apikey']) except: keyobj = None if keyobj and keyobj.active: request.user = AnonymousUser() return True return False def challenge(self): return HttpResponseForbidden('Access Denied, use a API Key')
Python
0
@@ -324,31 +324,24 @@ if -params%5B 'apikey' %5D:%0A @@ -332,17 +332,26 @@ 'apikey' -%5D + in params :%0A
1d67f755ea0f638c3cabef9e9359665d5b50ff86
Clean up BeamConstellation
cactusbot/services/beam/constellation.py
cactusbot/services/beam/constellation.py
"""Interact with Beam Constellation.""" from logging import getLogger import re import json import asyncio from .. import WebSocket class BeamConstellation(WebSocket): """Interact with Beam Constellation.""" URL = "wss://constellation.beam.pro" RESPONSE_EXPR = re.compile(r'^(\d+)(.+)?$') INTERFACE_EXPR = re.compile(r'^([a-z]+):\d+:([a-z]+)') def __init__(self, channel, user): super().__init__(self.URL) self.logger = getLogger(__name__) assert isinstance(channel, int), "Channel ID must be an integer." self.channel = channel assert isinstance(user, int), "User ID must be an integer." self.user = user async def read(self, handle): """Read packets from the Constellation WebSocket.""" packet = await self.parse(await self.receive()) await super().read(handle) async def initialize(self, *interfaces): """Subscribe to Constellation interfaces.""" if not interfaces: interfaces = [ "channel:{channel}:update", "channel:{channel}:status", "channel:{channel}:followed", "channel:{channel}:subscribed", "channel:{channel}:hosted", "user:{user}:followed", "user:{user}:subscribed", "user:{user}:achievement" ] interfaces = list( interface.format(channel=self.channel, user=self.user) for interface in interfaces ) packet = { "type": "method", "method": "livesubscribe", "params": { "events": interfaces }, "id": 1 } self.websocket.send_str(json.dumps(packet)) self.logger.info( "Successfully subscribed to Constellation interfaces.")
Python
0.000041
@@ -38,39 +38,8 @@ %22%22%0A%0A -from logging import getLogger%0A%0A impo @@ -61,24 +61,8 @@ on%0A%0A -import asyncio%0A%0A from @@ -394,51 +394,8 @@ L)%0A%0A - self.logger = getLogger(__name__)%0A%0A @@ -594,197 +594,8 @@ er%0A%0A - async def read(self, handle):%0A %22%22%22Read packets from the Constellation WebSocket.%22%22%22%0A%0A packet = await self.parse(await self.receive())%0A%0A await super().read(handle)%0A%0A @@ -741,17 +741,17 @@ faces = -%5B +( %0A @@ -1105,17 +1105,18 @@ -%5D +)%0A %0A @@ -1137,13 +1137,9 @@ s = -list( +%5B %0A @@ -1254,33 +1254,33 @@ ces%0A -) +%5D %0A%0A packet @@ -1511,16 +1511,45 @@ packet)) +%0A await self.receive() %0A%0A @@ -1636,8 +1636,450 @@ aces.%22)%0A +%0A async def parse(self, packet):%0A %22%22%22Parse a chat packet.%22%22%22%0A%0A try:%0A packet = json.loads(packet)%0A except (TypeError, ValueError):%0A self.logger.exception(%22Invalid JSON: %25s.%22, packet)%0A return None%0A else:%0A if packet.get(%22error%22) is not None:%0A self.logger.error(packet)%0A else:%0A self.logger.debug(packet)%0A return packet%0A
948dadbd4aa262c86e561c56e7cd7748cdefa18b
Extend teacher column for institute courses
data_center/models.py
data_center/models.py
# -*- coding: utf-8 -*- from datetime import datetime from django.db import models class Course(models.Model): """Course database schema""" no = models.CharField(max_length=20, blank=True) code = models.CharField(max_length=20, blank=True) eng_title = models.CharField(max_length=200, blank=True) chi_title = models.CharField(max_length=200, blank=True) note = models.TextField(blank=True) objective = models.CharField(max_length=80, blank=True) time = models.CharField(max_length=80, blank=True) time_token = models.CharField(max_length=80, blank=True) teacher = models.CharField(max_length=80, blank=True) # Only save Chinese room = models.CharField(max_length=80, blank=True) credit = models.IntegerField(blank=True, null=True) limit = models.IntegerField(blank=True, null=True) prerequisite = models.BooleanField(default=False, blank=True) clas = models.CharField(max_length=10, blank=True) dept = models.CharField(max_length=10, blank=True) serial = models.CharField(max_length=20, blank=True) ge = models.CharField(max_length=80, blank=True) hit = models.IntegerField(default=0) syllabus = models.TextField(blank=True) # A html div def __str__(self): return self.no class Department(models.Model): dept_name = models.CharField(max_length=20, blank=True) required_course = models.ManyToManyField(Course, blank=True) def __unicode__(self): return self.dept_name class Announcement(models.Model): TAG_CHOICE = ( ('Info', '公告'), ('Bug', '已知問題'), ('Fix', '問題修復'), ) content = models.TextField(blank=True) time = models.DateTimeField(default=datetime.now) tag = models.CharField(max_length=10, choices=TAG_CHOICE, default='Info') def __unicode__(self): return '%s|%s' % (self.time, self.tag)
Python
0
@@ -620,33 +620,34 @@ ield(max_length= -8 +12 0, blank=True)
1e3ea59bb631bb78dd0525dcf92a96a6a39053d8
fix hooks may not been assignment #148
py12306/helpers/request.py
py12306/helpers/request.py
import requests from requests.exceptions import * from py12306.helpers.func import * from requests_html import HTMLSession, HTMLResponse requests.packages.urllib3.disable_warnings() class Request(HTMLSession): """ 请求处理类 """ # session = {} def save_to_file(self, url, path): response = self.get(url, stream=True) with open(path, 'wb') as f: for chunk in response.iter_content(chunk_size=1024): f.write(chunk) return response @staticmethod def _handle_response(response, **kwargs) -> HTMLResponse: """ 扩充 response :param response: :param kwargs: :return: """ response = HTMLSession._handle_response(response, **kwargs) expand_class(response, 'json', Request.json) return response def add_response_hook(self, hook): exist_hooks = self.hooks['response'] if not isinstance(exist_hooks, list): hooks = [exist_hooks] hooks.append(hook) self.hooks['response'] = hooks return self def json(self, default={}): """ 重写 json 方法,拦截错误 :return: """ from py12306.app import Dict try: result = self.old_json() return Dict(result) except: return Dict(default) def request(self, *args, **kwargs): # 拦截所有错误 try: if not 'timeout' in kwargs: from py12306.config import Config kwargs['timeout'] = Config().TIME_OUT_OF_REQUEST response = super().request(*args, **kwargs) return response except RequestException as e: from py12306.log.common_log import CommonLog if e.response: response = e.response else: response = HTMLResponse(HTMLSession) # response.status_code = 500 expand_class(response, 'json', Request.json) response.reason = response.reason if response.reason else CommonLog.MESSAGE_RESPONSE_EMPTY_ERROR return response def cdn_request(self, url: str, cdn=None, method='GET', **kwargs): from py12306.helpers.api import HOST_URL_OF_12306 from py12306.helpers.cdn import Cdn if not cdn: cdn = Cdn.get_cdn() url = url.replace(HOST_URL_OF_12306, cdn) return self.request(method, url, headers={'Host': HOST_URL_OF_12306}, verify=False, **kwargs)
Python
0
@@ -877,22 +877,16 @@ -exist_ hooks = @@ -934,22 +934,16 @@ nstance( -exist_ hooks, l @@ -947,16 +947,28 @@ , list): +%0A hooks = @@ -973,14 +973,8 @@ = %5B -exist_ hook
89aa3cbc62a947b3623380f7d1fe631bdf070b98
fix the need of admin to run
homeassistant/components/influxdb.py
homeassistant/components/influxdb.py
""" homeassistant.components.influxdb ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ InfluxDB component which allows you to send data to an Influx database. For more details about this component, please refer to the documentation at https://home-assistant.io/components/influxdb/ """ import logging import homeassistant.util as util from homeassistant.helpers import validate_config from homeassistant.const import (EVENT_STATE_CHANGED, STATE_ON, STATE_OFF, STATE_UNLOCKED, STATE_LOCKED, STATE_UNKNOWN) from homeassistant.components.sun import (STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON) _LOGGER = logging.getLogger(__name__) DOMAIN = "influxdb" DEPENDENCIES = [] DEFAULT_HOST = 'localhost' DEFAULT_PORT = 8086 DEFAULT_DATABASE = 'home_assistant' REQUIREMENTS = ['influxdb==2.10.0'] CONF_HOST = 'host' CONF_PORT = 'port' CONF_DB_NAME = 'database' CONF_USERNAME = 'username' CONF_PASSWORD = 'password' def setup(hass, config): """ Setup the InfluxDB component. """ from influxdb import InfluxDBClient, exceptions if not validate_config(config, {DOMAIN: ['host']}, _LOGGER): return False conf = config[DOMAIN] host = conf[CONF_HOST] port = util.convert(conf.get(CONF_PORT), int, DEFAULT_PORT) database = util.convert(conf.get(CONF_DB_NAME), str, DEFAULT_DATABASE) username = util.convert(conf.get(CONF_USERNAME), str) password = util.convert(conf.get(CONF_PASSWORD), str) try: influx = InfluxDBClient(host=host, port=port, username=username, password=password, database=database) databases = [i['name'] for i in influx.get_list_database()] except exceptions.InfluxDBClientError: _LOGGER.error("Database host is not accessible. " "Please check your entries in the configuration file.") return False if database not in databases: _LOGGER.error("Database %s doesn't exist", database) return False def influx_event_listener(event): """ Listen for new messages on the bus and sends them to Influx. """ state = event.data.get('new_state') if state is None: return if state.state in (STATE_ON, STATE_LOCKED, STATE_ABOVE_HORIZON): _state = 1 elif state.state in (STATE_OFF, STATE_UNLOCKED, STATE_UNKNOWN, STATE_BELOW_HORIZON): _state = 0 else: _state = state.state try: _state = float(_state) except ValueError: pass measurement = state.attributes.get('unit_of_measurement', state.domain) json_body = [ { 'measurement': measurement, 'tags': { 'domain': state.domain, 'entity_id': state.object_id, }, 'time': event.time_fired, 'fields': { 'value': _state, } } ] try: influx.write_points(json_body) except exceptions.InfluxDBClientError: _LOGGER.exception('Error saving event to InfluxDB') hass.bus.listen(EVENT_STATE_CHANGED, influx_event_listener) return True
Python
0
@@ -1645,67 +1645,51 @@ -databases = %5Bi%5B'name'%5D for i in influx.get_list_database()%5D +influx.query(%22select * from /.*/ LIMIT 1;%22) %0A @@ -1718,32 +1718,39 @@ luxDBClientError + as exc :%0A _LOGGE @@ -1789,17 +1789,36 @@ cessible -. + due to '%25s', please %22%0A @@ -1839,15 +1839,8 @@ %22 -Please chec @@ -1883,127 +1883,87 @@ file -.%22)%0A return False%0A%0A if database not in databases:%0A _LOGGER.error(%22Database %25s doesn't exist%22, database + and that%22%0A %22 the database exists and is READ/WRITE.%22, exc )%0A @@ -2477,16 +2477,68 @@ e.state%0A + if _state == '':%0A return%0A @@ -3240,16 +3240,21 @@ ng event + %22%25s%22 to Infl @@ -3258,16 +3258,27 @@ nfluxDB' +, json_body )%0A%0A h
19161f3259237bb70904c7fb469e1831dd424fa6
Update test_Vulnerability.py
cairis/cairis/test/test_Vulnerability.py
cairis/cairis/test/test_Vulnerability.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import unittest import os import json import BorgFactory from Borg import Borg from VulnerabilityParameters import VulnerabilityParameters from ARM import DatabaseProxyException class VulnerabilityTest(unittest.TestCase): def setUp(self): BorgFactory.initialise() f = open(os.environ['CAIRIS_SRC'] + '/test/vulnerabilities.json') d = json.load(f) f.close() self.iVuln = d['vulnerabilities'] def testStandardVulnerability(self): iVuln1 = VulnerabilityParameters(self.iVuln[0]["theName"],self.iVuln[0]["theType"],self.iVuln[0]["theDescription"],self.iVuln[0]["theEnvironmentProperties"][0][0],self.iVuln[0]["theEnvironmentProperties"][0][1]) iVuln2 = VulnerabilityParameters(self.iVuln[1]["theName"],self.iVuln[1]["theType"],self.iVuln[1]["theDescription"],self.iVuln[1]["theEnvironmentProperties"][0][0],self.iVuln[1]["theEnvironmentProperties"][0][1]) iVuln3 = VulnerabilityParameters(self.iVuln[2]["theName"],self.iVuln[2]["theType"],self.iVuln[2]["theDescription"],self.iVuln[2]["theEnvironmentProperties"][0][0],self.iVuln[2]["theEnvironmentProperties"][0][1]) b = Borg() b.dbProxy.addVulnerability(iVuln1) b.dbProxy.addVulnerability(iVuln2) b.dbProxy.addVulnerability(iVuln3) oVuln = b.dbProxy.getVulnerabilities() oVuln1 = oVuln[self.iVuln[0]["theName"]] self.assertEqual(iVuln1.name(), oVuln1.name()) self.assertEqual(iVuln1.theType(),oVuln1.theType()) self.assertEqual(iVuln1.description(),oVuln1.description()) self.assertEqual(iVuln1.theEnvironmentProperties()[0][0],oVuln1.theEnvironmentProperties()[0][0]) self.assertEqual(iVuln1.theEnvironmentProperties()[0][1],oVuln1.theEnvironmentProperties()[0][1]) oVuln2 = oVuln[self.iVuln[1]["theName"]] self.assertEqual(iVuln2.name(), oVuln2.name()) self.assertEqual(iVuln2.theType(),oVuln2.theType()) self.assertEqual(iVuln2.description(),oVuln2.description()) self.assertEqual(iVuln2.theEnvironmentProperties()[0][0],oVuln2.theEnvironmentProperties()[0][0]) self.assertEqual(iVuln2.theEnvironmentProperties()[0][1],oVuln2.theEnvironmentProperties()[0][1]) oVuln3 = oVuln[self.iVuln[2]["theName"]] self.assertEqual(iVuln3.name(), oVuln3.name()) self.assertEqual(iVuln3.theType(),oVuln3.theType()) self.assertEqual(iVuln3.description(),oVuln3.description()) self.assertEqual(iVuln3.theEnvironmentProperties()[0][0],oVuln3.theEnvironmentProperties()[0][0]) self.assertEqual(iVuln3.theEnvironmentProperties()[0][1],oVuln3.theEnvironmentProperties()[0][1]) b.dbProxy.deleteVulnerability(oVuln1.id()) b.dbProxy.deleteVulnerability(oVuln2.id()) b.dbProxy.deleteVulnerability(oVuln3.id()) def tearDown(self): b = Borg() b.dbProxy.close() if __name__ == '__main__': unittest.main()
Python
0
@@ -1406,35 +1406,32 @@ tProperties%22%5D%5B0%5D -%5B0%5D ,self.iVuln%5B0%5D%5B%22 @@ -1449,35 +1449,32 @@ entProperties%22%5D%5B -0%5D%5B 1%5D)%0A iVuln2 = @@ -1616,35 +1616,32 @@ tProperties%22%5D%5B0%5D -%5B0%5D ,self.iVuln%5B1%5D%5B%22 @@ -1659,35 +1659,32 @@ entProperties%22%5D%5B -0%5D%5B 1%5D)%0A iVuln3 = @@ -1834,19 +1834,16 @@ ies%22%5D%5B0%5D -%5B0%5D ,self.iV @@ -1877,19 +1877,16 @@ rties%22%5D%5B -0%5D%5B 1%5D)%0A @@ -2325,27 +2325,24 @@ perties()%5B0%5D -%5B0%5D ,oVuln1.theE @@ -2358,35 +2358,32 @@ tProperties()%5B0%5D -%5B0%5D )%0A self.asser @@ -2415,35 +2415,32 @@ entProperties()%5B -0%5D%5B 1%5D,oVuln1.theEnv @@ -2452,35 +2452,32 @@ entProperties()%5B -0%5D%5B 1%5D)%0A oVuln2 = @@ -2733,27 +2733,24 @@ perties()%5B0%5D -%5B0%5D ,oVuln2.theE @@ -2766,35 +2766,32 @@ tProperties()%5B0%5D -%5B0%5D )%0A self.asser @@ -2823,35 +2823,32 @@ entProperties()%5B -0%5D%5B 1%5D,oVuln2.theEnv @@ -2864,27 +2864,24 @@ roperties()%5B -0%5D%5B 1%5D)%0A oVul @@ -3145,19 +3145,16 @@ ies()%5B0%5D -%5B0%5D ,oVuln3. @@ -3182,19 +3182,16 @@ ies()%5B0%5D -%5B0%5D )%0A se @@ -3239,19 +3239,16 @@ rties()%5B -0%5D%5B 1%5D,oVuln @@ -3276,19 +3276,16 @@ rties()%5B -0%5D%5B 1%5D)%0A%0A
f0243e8ab8897d218bcf45af91a7cd03a3f83c5e
Add section comments.
cloudkitpy/container.py
cloudkitpy/container.py
# # container.py # CloudKitPy # # Created by James Barrow on 28/04/2016. # Copyright (c) 2013-2016 Pig on a Hill Productions. All rights reserved. # # !/usr/bin/env python class Container: public_cloud_database = None private_cloud_database = None container_identifier = None environment = None apns_environment = None def __init__( self, container_identifier, environment, apns_environment=None ): pass def fetch_user_info(self): """Fetch information about the current user asynchronously.""" pass def discover_user_info_with_email_address(self, email_address): """Fetch information about a single user. Based on the user's email address. """ pass def discover_user_info_with_user_record_name(self, record_name): """Fetch information about a single user using the record name.""" pass
Python
0
@@ -186,16 +186,64 @@ ainer:%0A%0A + # Getting the Public and Private Databases%0A%0A publ @@ -297,24 +297,71 @@ base = None%0A +%0A # Getting the Identifier and Environment%0A%0A containe @@ -558,32 +558,57 @@ :%0A pass%0A%0A + # Discovering Users%0A%0A def fetch_us
9274ec308974b0d6702e7f98a0b8a2c3be1cbe11
FIX #170 Throw Python34 compatible exception
autosklearn/util/dependencies.py
autosklearn/util/dependencies.py
from warnings import warn import pkg_resources import re from distutils.version import LooseVersion RE_PATTERN = re.compile('^(?P<name>[\w\-]+)((?P<operation>==|>=|>)(?P<version>(\d+\.)?(\d+\.)?(\d+)))?$') def verify_packages(packages): if not packages: return if isinstance(packages, str): packages = packages.splitlines() for package in packages: if not package: continue match = RE_PATTERN.match(package) if match: name = match.group('name') operation = match.group('operation') version = match.group('version') _verify_package(name, operation, version) else: raise ValueError('Unable to read requirement: %s' % package) def _verify_package(name, operation, version): try: module = pkg_resources.get_distribution(name) except pkg_resources.DistributionNotFound: raise MissingPackageError(name) from None if not operation: return required_version = LooseVersion(version) installed_version = LooseVersion(module.version) if operation == '==': check = required_version == installed_version elif operation == '>': check = installed_version > required_version elif operation == '>=': check = installed_version > required_version or \ installed_version == required_version else: raise NotImplementedError('operation \'%s\' is not supported' % operation) if not check: raise IncorrectPackageVersionError(name, installed_version, operation, required_version) class MissingPackageError(Exception): error_message = 'mandatory package \'{name}\' not found' def __init__(self, package_name): self.package_name = package_name super(MissingPackageError, self).__init__(self.error_message.format(name=package_name)) class IncorrectPackageVersionError(Exception): error_message = '\'{name} {installed_version}\' version mismatch ({operation}{required_version})' def __init__(self, package_name, installed_version, operation, required_version): self.package_name = package_name self.installed_version = installed_version self.operation = operation self.required_version = required_version message = self.error_message.format(name=package_name, installed_version=installed_version, operation=operation, required_version=required_version) super(IncorrectPackageVersionError, self).__init__(message)
Python
0
@@ -959,18 +959,8 @@ ame) - from None %0A%0A
c06800d2bd70c2cde6fb40de925450342f9c6d91
Update comment about timeout (no effect on SFTP/functions)
example/deploy.py
example/deploy.py
# pyinfra # File: example/deploy.py # Desc: example deploy script for the pyinfra CLI, targets: Ubuntu/Debian, CentOS & OpenBSD # Host represents the *current* server begin managed from pyinfra import host # Modules provide namespaced operations, which do the work from pyinfra.modules import server, apt, yum, files, python, git, pip, pkg, init, local # Ensure the state of a user server.user( 'pyinfra', home='/home/pyinfra', shell='/bin/bash', # Options available for all operations name='Ensure user pyinfra', sudo=True, sudo_user='root', ignore_errors=False, serial=False, run_once=False, timeout=30 # ignored for SFTP transfers ) # Ensure the state of files files.file( '/var/log/pyinfra.log', user='pyinfra', group='pyinfra', mode='644', sudo=True ) # Ensure the state of directories files.directory( host.data.env_dir, user='pyinfra', group='pyinfra', mode='755', recursive=True, sudo=True, serial=True ) files.directory( host.data.app_dir, user='pyinfra', group='pyinfra', sudo=True ) # Copy local files to remote host files.put( 'files/file.txt', '/home/vagrant/file.txt', mode='777' ) # and sync directories files.sync( 'files', '/home/pyinfra/example_files', user='pyinfra', group='pyinfra', delete=True, sudo=True ) # Generate files from local jinja2 templates files.template( 'templates/template.txt.jn2', '/home/vagrant/template.txt', # non-standard kwargs are passed to the template hostname=host.hostname ) # Work with facts about the remote host if host.os == 'Linux': if host.linux_distribution['name'] in ('Debian', 'Ubuntu'): # apt package manager apt.packages( ['git', 'python-pip'], sudo=True, update=True, update_time=3600, op='core_packages' # this and below binds these three operations to run as one ) elif host.linux_distribution['name'] == 'CentOS': # yum package manager yum.packages( ['git'], sudo=True, op='core_packages' # this and above/below binds these three operations to run as one ) # yum doesn't, by default, have pip if not host.file('/tmp/get-pip.py'): server.shell('wget https://bootstrap.pypa.io/get-pip.py -O /tmp/get-pip.py') server.shell( 'python /tmp/get-pip.py', sudo=True ) # Work with inventory groups elif 'bsd' in host.groups: # OpenBSD packages? pkg.packages( ['py-pip', 'git'], sudo=True, op='core_packages' # this and above binds these three operations to run as one ) # add_pkg does not automatically do this server.shell( 'ln -sf /usr/local/bin/pip-2.7 /usr/local/bin/pip', sudo=True ) # Execute arbitrary shell commands server.shell( 'echo "Shell command"', 'echo "And another!"' ) # and scripts server.script( 'files/test.sh' ) # Manage init systems init.service( 'crond', running=True, sudo=True, ignore_errors=True ) # Execute Python locally, mid-deploy def some_python(hostname, host, *args, **kwargs): print 'connecting hostname: {0}, actual: {1}'.format(hostname, host.hostname) python.execute(some_python, 'arg1', 'arg2', kwarg='hello world') # Ensure the state of git repositories git.repo( 'https://github.com/Fizzadar/pyinfra', host.data.app_dir, branch='develop', sudo=True, sudo_user='pyinfra' ) # Manage pip packages pip.packages( ['virtualenv'], sudo=True ) # Create a virtualenv server.shell( 'virtualenv {0}'.format(host.data.env_dir), sudo=True, sudo_user='pyinfra' ) # and manage pip within it pip.packages( ['ElasticQuery', 'JsonTest'], venv=host.data.env_dir, sudo=True, sudo_user='pyinfra' ) # Run things locally local.shell( 'echo "I am local!"', run_once=True ) # Wait for services server.wait( port=22, timeout=5 )
Python
0
@@ -653,34 +653,76 @@ 0 # -ignored for SFTP transfers +only applies to commands on the remote host (not SFTP, local Python) %0A)%0A%0A
07e767f9c19ece3c41e33cca24dd2b0317244292
Update the latest version
src/site/sphinx/conf.py
src/site/sphinx/conf.py
# -*- coding: utf-8 -*- import sys, os, re import xml.etree.ElementTree as etree from datetime import date from collections import defaultdict def etree_to_dict(t): t.tag = re.sub(r'\{[^\}]*\}', '', t.tag) d = {t.tag: {} if t.attrib else None} children = list(t) if children: dd = defaultdict(list) for dc in map(etree_to_dict, children): for k, v in dc.iteritems(): dd[k].append(v) d = {t.tag: {k:v[0] if len(v) == 1 else v for k, v in dd.iteritems()}} if t.attrib: d[t.tag].update(('@' + k, v) for k, v in t.attrib.iteritems()) if t.text: text = t.text.strip() if children or t.attrib: if text: d[t.tag]['#text'] = text else: d[t.tag] = text return d # Parse the Maven pom.xml. pom = etree_to_dict(etree.parse('../../../pom.xml').getroot())['project'] # Set the basic project information. project = pom['name'] project_short = pom['name'] copyright = str(date.today().year) + ', ' + pom['organization']['name'] # Set the project version and release. # Use the last known stable release if the current version ends with '-SNAPSHOT'. if re.match(r'^.*-SNAPSHOT$', pom['version']): release = '0.21.3.Final' version = '0.21' else: release = pom['version'] version = re.match(r'^[0-9]+\.[0-9]+', pom['version']).group(0) # Define some useful global substitutions. rst_epilog = '\n' rst_epilog += '.. |baseurl| replace:: http://line.github.io/armeria/\n' rst_epilog += '.. |jetty_alpnAgent_version| replace:: ' + pom['properties']['jetty.alpnAgent.version'] + '\n' rst_epilog += '.. |oss_parent_version| replace:: ' + pom['parent']['version'] + '\n' rst_epilog += '.. |logback_version| replace:: ' + pom['properties']['logback.version'] + '\n' rst_epilog += '.. |slf4j_version| replace:: ' + pom['properties']['slf4j.version'] + '\n' rst_epilog += '.. |tomcat_version| replace:: ' + pom['properties']['tomcat.version'] + '\n' rst_epilog += '\n' needs_sphinx = '1.0' extensions = ['sphinx.ext.autodoc'] templates_path = ['_templates'] source_suffix = '.rst' source_encoding = 'utf-8-sig' master_doc = 'index' exclude_trees = ['.build'] add_function_parentheses = True pygments_style = 'tango' master_doc = 'index' sys.path.append(os.path.abspath('_themes')) html_theme = 'sphinx_rtd_theme' html_theme_path = ['_themes'] html_short_title = project_short html_static_path = ['_static'] html_use_smartypants = True html_use_index = True html_show_sourcelink = False htmlhelp_basename = pom['artifactId']
Python
0
@@ -1252,9 +1252,9 @@ .21. -3 +4 .Fin
7965ce3036f98a9b880f19f688e7e282644e63cf
remove server_name
app/main.py
app/main.py
from flask import Flask, render_template app = Flask(__name__) app.config['DEBUG'] = True app.config['SERVER_NAME'] = "vcaen.com" @app.route('/') def show_about(): return render_template('aboutme.html') if __name__ == '__main__': app.run()
Python
0.000008
@@ -62,74 +62,8 @@ __)%0A -app.config%5B'DEBUG'%5D = True%0Aapp.config%5B'SERVER_NAME'%5D = %22vcaen.com%22 %0A%0A%0A@
8b6daea77356631d657feee438da41379a8f5f90
Version 0.46.0
cloudvolume/__init__.py
cloudvolume/__init__.py
from .connectionpools import ConnectionPool from .cloudvolume import CloudVolume from .lib import Bbox from .provenance import DataLayerProvenance from .skeletonservice import PrecomputedSkeleton, SkeletonEncodeError, SkeletonDecodeError from .storage import Storage from .threaded_queue import ThreadedQueue from .exceptions import EmptyVolumeException, EmptyRequestException, AlignmentError from .volumecutout import VolumeCutout from . import exceptions from . import secrets from . import txrx from . import viewer from .viewer import view, hyperview __version__ = '0.45.0'
Python
0
@@ -573,9 +573,9 @@ '0.4 -5 +6 .0'%0A
a1d9312e1ac6f66aaf558652d890ac2a6bd67e40
Add parent so we can track versions.
backend/loader/model/datafile.py
backend/loader/model/datafile.py
from dataitem import DataItem class DataFile(DataItem): def __init__(self, name, access, owner): super(DataFile, self).__init__(name, access, owner, "datafile") self.checksum = "" self.size = 0 self.location = "" self.mediatype = "" self.conditions = [] self.text = "" self.metatags = [] self.datadirs = []
Python
0
@@ -373,12 +373,37 @@ tadirs = %5B%5D%0A + self.parent = %22%22%0A
424b50960e7ca42c61ccc98864f9876e9688dcd4
remove empty elements
example/models.py
example/models.py
from django.db import models class Cake(models.Model): name = models.CharField(max_length=100) description = models.TextField() class Meta: verbose_name = 'Cake' verbose_name_plural = 'Cakes' def __unicode__(self): return unicode('{}'.format(self.name)) def get_summary_description(self): return self.name, self.description class Coffee(models.Model): name = models.CharField(max_length=100) rating = models.IntegerField() class Meta: verbose_name = 'Coffee' verbose_name_plural = 'Coffees' def __unicode__(self): return unicode('{}'.format(self.name)) def get_summary_description(self): return self.name, self.rating
Python
0.319064
@@ -342,34 +342,73 @@ return -self.name, +filter(None, (%0A self.name,%0A self.descri @@ -412,16 +412,28 @@ cription +,%0A )) %0A%0A%0Aclass @@ -757,27 +757,78 @@ urn -self.name, self.rating +filter(None, (%0A self.name,%0A self.rating,%0A )) %0A
bcc7692e14b7b695f08dfb39aaccf3dbfa67d857
Add safeGetInt to BMConfigParser
src/configparser.py
src/configparser.py
import ConfigParser from singleton import Singleton @Singleton class BMConfigParser(ConfigParser.SafeConfigParser): def set(self, section, option, value=None): if self._optcre is self.OPTCRE or value: if not isinstance(value, basestring): raise TypeError("option values must be strings") return ConfigParser.ConfigParser.set(self, section, option, value) def get(self, section, option, raw=False, vars=None): if section == "bitmessagesettings" and option == "timeformat": try: return ConfigParser.ConfigParser.get(self, section, option, raw, vars) except ConfigParser.InterpolationError: return ConfigParser.ConfigParser.get(self, section, option, True, vars) return ConfigParser.ConfigParser.get(self, section, option, True, vars) def safeGetBoolean(self, section, field): if self.has_option(section, field): try: return self.getboolean(section, field) except ValueError: return False return False def safeGet(self, section, option, default = None): if self.has_option(section, option): return self.get(section, option) else: return default def items(self, section, raw=False, vars=None): return ConfigParser.ConfigParser.items(self, section, True, vars)
Python
0.000001
@@ -1099,16 +1099,244 @@ False%0A%0A + def safeGetInt(self, section, field):%0A if self.has_option(section, field):%0A try:%0A return self.getint(section, field)%0A except ValueError:%0A return 0%0A return 0%0A%0A def
311459eef0178b5c3e337ff16c15a26d56f36471
Allow reverse proxy to set https
hoover/site/settings/docker_local.py
hoover/site/settings/docker_local.py
import os from pathlib import Path from urllib.parse import urlparse import re from logzero import logger as log from .common import * base_dir = Path(__file__).absolute().parent.parent.parent.parent SECRET_KEY = os.environ.get('SECRET_KEY') _hostname = os.environ.get('HOOVER_HOSTNAME') if _hostname: HOOVER_BASE_URL = 'https://' + _hostname ALLOWED_HOSTS = [_hostname] def bool_env(value): return (value or '').lower() in ['on', 'true'] DEBUG = bool_env(os.environ.get('DEBUG')) if DEBUG: log.warn('DEBUG mode on') if bool_env(os.environ.get('HOOVER_TWOFACTOR_ENABLED')): INSTALLED_APPS += ( 'hoover.contrib.twofactor', 'django_otp', 'django_otp.plugins.otp_totp', 'hoover.contrib.ratelimit', ) MIDDLEWARE_CLASSES += ( 'django_otp.middleware.OTPMiddleware', 'hoover.contrib.twofactor.middleware.AutoLogout', 'hoover.contrib.twofactor.middleware.RequireAuth', ) log.info("Enabling 2FA") _twofactor_invitation_valid = os.environ.get('HOOVER_TWOFACTOR_INVITATION_VALID') if _twofactor_invitation_valid: HOOVER_TWOFACTOR_INVITATION_VALID = int(_twofactor_invitation_valid) _twofactor_auto_logout = os.environ.get('HOOVER_TWOFACTOR_AUTOLOGOUT') if _twofactor_auto_logout: HOOVER_TWOFACTOR_AUTOLOGOUT = int(_twofactor_auto_logout) HOOVER_RATELIMIT_USER = (30, 60) # 30 per minute HOOVER_TWOFACTOR_RATELIMIT = (3, 60) # 3 per minute if os.environ.get('LIQUID_AUTH_CLIENT_ID'): INSTALLED_APPS += ( 'hoover.contrib.oauth2', ) LIQUID_AUTH_PUBLIC_URL = os.environ.get('LIQUID_AUTH_PUBLIC_URL') LIQUID_AUTH_INTERNAL_URL = os.environ.get('LIQUID_AUTH_INTERNAL_URL') LIQUID_AUTH_CLIENT_ID = os.environ.get('LIQUID_AUTH_CLIENT_ID') LIQUID_AUTH_CLIENT_SECRET = os.environ.get('LIQUID_AUTH_CLIENT_SECRET') log.info("Enabling Liquid OAuth2 at %s", LIQUID_AUTH_PUBLIC_URL) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'search', }, } # heroku-style db config _db = os.environ.get('HOOVER_DB') if _db: dbm = re.match( r'postgresql://(?P<user>[^:]+):(?P<password>[^@]+)' r'@(?P<host>[^:]+):(?P<port>\d+)/(?P<name>.+)', _db, ) if not dbm: raise RuntimeError("Can't parse HOOVER_DB value %r" % _db) DATABASES['default']['HOST'] = dbm.group('host') DATABASES['default']['PORT'] = dbm.group('port') DATABASES['default']['NAME'] = dbm.group('name') DATABASES['default']['USER'] = dbm.group('user') DATABASES['default']['PASSWORD'] = dbm.group('password') STATIC_ROOT = str(base_dir / 'static') HOOVER_UPLOADS_ROOT = str(base_dir / 'uploads') HOOVER_UI_ROOT = str(base_dir.parent / 'ui' / 'build') HOOVER_EVENTS_DIR = str(base_dir.parent / 'metrics' / 'users') HOOVER_ELASTICSEARCH_URL = os.environ.get('HOOVER_ES_URL') log.info('hoover-search configuration loaded')
Python
0.000001
@@ -2908,16 +2908,151 @@ _URL')%0A%0A +_secure_header = os.environ.get('SECURE_PROXY_SSL_HEADER')%0Aif _secure_header:%0A SECURE_PROXY_SSL_HEADER = (_secure_header, 'https')%0A%0A log.info
5bce4bb123a086dd116abbd0932d34fa170a83cd
Update view to point to corrected template path
search/views.py
search/views.py
# GNU MediaGoblin -- federated, autonomous media hosting # Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from mediagoblin import mg_globals from mediagoblin.db.base import Session from mediagoblin.db.models import (MediaEntry, MediaTag, Collection, CollectionItem, User) from mediagoblin.decorators import uses_pagination from mediagoblin.tools.response import render_to_response from mediagoblin.tools.pagination import Pagination from mediagoblin.plugins.search import forms as search_forms from mediagoblin.tools.translate import lazy_pass_to_ugettext as _ from mediagoblin.meddleware.csrf import csrf_exempt from sqlalchemy import and_, or_ import logging _log = logging.getLogger(__name__) @csrf_exempt @uses_pagination def search_results_view(request, page): media_entries = None pagination = None query = None form = search_forms.SearchForm( request.form) #if request.method == 'GET': if request.GET.get('query') != None: if request.GET.get('query') != '': query = '%' + request.GET.get('query') + '%' #cursor = MediaEntry.query.filter(MediaEntry.uploader==1).\ matches = MediaEntry.query.filter( and_( MediaEntry.state == u'processed', or_( MediaEntry.title.ilike(query), MediaEntry.description.ilike(query) ) )).order_by(MediaEntry.created.desc()) #_log.info(matches) pagination = Pagination(page, matches) media_entries = pagination() return render_to_response( request, 'search/results.html', {'media_entries': media_entries, 'pagination': pagination, 'form': form})
Python
0
@@ -2355,18 +2355,46 @@ request, - ' +%0A 'mediagoblin/plugins/ search/r
f9087a4d11db748586227829f44b5bb26994a67e
Ajuste do nome da classe de acordo com python3
pycep_correios/correios.py
pycep_correios/correios.py
# -*- coding: utf-8 -*- # ############################################################################# # The MIT License (MIT) # # Copyright (c) 2016 Michell Stuttgart # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # ############################################################################# import xml.etree.cElementTree as Et import requests from pycep_correios.correios_exceptions import CorreiosCEPConnectionErrorException from pycep_correios.correios_exceptions import CorreiosCEPInvalidCEPException from pycep_correios.correios_exceptions import CorreiosTimeOutException from pycep_correios.correios_exceptions import CorreiosCEPTooManyRedirectsException class Correios(object): URL = 'https://apps.correios.com.br/SigepMasterJPA' \ '/AtendeClienteService/AtendeCliente?wsdl' HEADER = '<soap:Envelope ' \ 'xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\" ' \ 'xmlns:cli=\"http://cliente.bean.master.sigep.bsb.correios.com' \ '.br/\"><soap:Header/><soap:Body>' FOOTER = '</soap:Body></soap:Envelope>' @staticmethod def get_cep(cep: str) -> dict: """ Retorna dos dados do endereço de um dado cep, a saber: rua: logradouro do cep bairro: bairro do cep cidade: cidade do cep uf: Abreviacao do estado do cep complementento: informações adicionais sobre o cep outro: informações variadas sobre o cep como por exemplo o intervalo de numero de residência que o mesmo compreende. :param cep: string contendo o cep a ser consultado :return: dict contendo os dados do endereço do cep consultado. """ try: cep = cep.replace('-', '') cep = cep.replace('.', '') except AttributeError: raise CorreiosCEPInvalidCEPException('[ERRO] CEP deve ser do tipo string, ' 'mas o tipo encontrado foi %s!' % type(cep)) xml = Correios._mount_request(cep) try: response = requests.post(Correios.URL, data=xml, headers={'Content-type': 'text/xml'}, verify=False) except requests.exceptions.Timeout: raise CorreiosTimeOutException('Connection Timeout, please retry later') except requests.exceptions.TooManyRedirects: raise CorreiosCEPTooManyRedirectsException('Bad URL, check the formatting ' 'of your request and try again') except requests.ConnectionError: raise CorreiosCEPConnectionErrorException('Could not connect to the API. ' 'Please check your connection') else: if not response.ok: msg = Correios._parse_error(response.text) raise CorreiosCEPInvalidCEPException(msg) address_data = Correios._parse_response(response.text) return address_data @staticmethod def _mount_request(cep): xml = Correios.HEADER xml += '<cli:consultaCEP>' xml += '<cep>%s</cep>' % cep xml += '</cli:consultaCEP>' xml += Correios.FOOTER return xml @staticmethod def _parse_response(xml): end = Et.fromstring(xml).find('.//return') response = { 'rua': end.findtext('end'), 'bairro': end.findtext('bairro'), 'cidade': end.findtext('cidade'), 'uf': end.findtext('uf'), 'complemento': end.findtext('complemento'), 'outro': end.findtext('complemento2') } return response @staticmethod def _parse_error(xml): return Et.fromstring(xml).findtext('.//faultstring')
Python
0
@@ -1354,38 +1354,24 @@ uests%0A%0Afrom -pycep_correios .correios_ex @@ -1423,38 +1423,24 @@ eption%0Afrom -pycep_correios .correios_ex @@ -1487,38 +1487,24 @@ eption%0Afrom -pycep_correios .correios_ex @@ -1553,22 +1553,8 @@ rom -pycep_correios .cor @@ -1634,16 +1634,8 @@ eios -(object) :%0A%0A
d77777c2a011e77b284748d1dfbd3cd31e6c8565
make verifier regexing more robust
c_test_environment/verifier.py
c_test_environment/verifier.py
import re import sys def verify(testout, expected, ordered): test = ({}, []) expect = ({}, []) def addTuple(tc, t): if ordered: tcl = tc[1] tcl.append(t) else: tcs = tc[0] if t not in tcs: tcs[t] = 1 else: tcs[t]+=1 with open(testout, 'r') as file: for line in file.readlines(): if re.match(r'Materialized', line): tlist = [] for number in re.finditer(r'(\d+)', line): tlist.append(int(number.group(0))) t = tuple(tlist) addTuple(test, t) with open(expected, 'r') as file: for line in file.readlines(): tlist = [] for number in re.finditer(r'(\d+)', line): tlist.append(int(number.group(0))) t = tuple(tlist) addTuple(expect, t) print test print expect assert test == expect, "\n test: %s !=\n expect:%s" % (test, expect) print "pass" if __name__ == '__main__': testout=sys.argv[1] expected=sys.argv[2] ordered = False if len(sys.argv) > 3: if sys.argv[3] == 'o': ordered = True verify(testout, expected, ordered)
Python
0.000004
@@ -55,16 +55,93 @@ dered):%0A + numpat = re.compile(r'(%5Cd+)')%0A tuplepat = re.compile(r'Materialized')%0A test @@ -500,42 +500,50 @@ -if re.match(r'Materialized', line) +m = tuplepat.search(line)%0A if m :%0A @@ -589,34 +589,38 @@ for number in -re +numpat .finditer(r'(%5Cd+ @@ -609,38 +609,37 @@ at.finditer( -r'(%5Cd+)', line +line, m.end() ):%0A @@ -878,18 +878,22 @@ mber in -re +numpat .findite @@ -898,18 +898,8 @@ ter( -r'(%5Cd+)', line
82641a936b2215480e29896cdafed3872c2928c6
Remove xfails for newly passing tests in test_recipe_integration.py
test/test_recipes_integration.py
test/test_recipes_integration.py
import pytest import os import subprocess import json # Each test with recipe and appropriate parameters in one line # Using bracket annotation to set it optional (xfail) TEST_CASES = [ "activedata_usage", ["backout_rate"], ["code_coverage --path caps --rev 45715ece25fc"], "code_coverage_by_suite --path caps --rev 45715ece25fc", "config_durations", "files_with_coverage", ["intermittent_tests"], ["intermittent_test_data"], ["raw_coverage --path caps --rev 45715ece25fc"], "test_durations", ["tests_config_times -t test-windows10-64/opt-awsy-e10s"], ["tests_in_duration"], ["try_efficiency"], ["try_usage"], ["try_users"] ] def load_tests(tests): return [pytest.param(test[0], marks=pytest.mark.xfail) if isinstance(test, list) else test for test in tests] @pytest.mark.skipif(os.getenv("TRAVIS_EVENT_TYPE") != "cron", reason="Not run by cron job") @pytest.mark.parametrize("recipe", load_tests(TEST_CASES)) def test_recipe_integration(recipe): command = ['adr', '--format', 'json'] command.extend(recipe.split(" ")) data = subprocess.check_output(command, stderr=subprocess.STDOUT) result = json.loads(data) assert result assert len(result)
Python
0.000004
@@ -208,17 +208,16 @@ e%22,%0A -%5B %22backout @@ -222,17 +222,16 @@ ut_rate%22 -%5D ,%0A %5B%22 @@ -390,25 +390,24 @@ erage%22,%0A -%5B %22intermitten @@ -414,24 +414,22 @@ t_tests%22 -%5D ,%0A -%5B %22intermi @@ -444,17 +444,16 @@ st_data%22 -%5D ,%0A %5B%22 @@ -584,25 +584,24 @@ e10s%22%5D,%0A -%5B %22tests_in_du @@ -607,24 +607,22 @@ uration%22 -%5D ,%0A -%5B %22try_eff @@ -629,24 +629,22 @@ iciency%22 -%5D ,%0A -%5B %22try_usa @@ -646,17 +646,16 @@ y_usage%22 -%5D ,%0A %5B%22
4be67b6f46c5f4a7f8a2b89199cff2373dcc7a43
Fix casing
ca_qc_trois_rivieres/people.py
ca_qc_trois_rivieres/people.py
# coding: utf-8 from utils import CanadianScraper, CanadianPerson as Person import re COUNCIL_PAGE = 'http://www.v3r.net/a-propos-de-la-ville/vie-democratique/conseil-municipal/conseillers-municipaux' MAYOR_URL = 'http://www.v3r.net/a-propos-de-la-ville/vie-democratique/mairie' class TroisRivieresPersonScraper(CanadianScraper): def scrape(self): # mayor first, can't find email page = self.lxmlize(MAYOR_URL) photo_url = page.xpath('//img[contains(@alt, "Maire")]//@src')[0] name = page.xpath('//img/@alt[contains(., "Maire")]')[0] assert len(name), "missing mayor's name" name = re.sub(r'Maire', '', name, flags=re.I).strip() p = Person(primary_org='legislature', name=name, district="Trois-Rivières", role="Maire", image=photo_url) p.add_source(MAYOR_URL) yield p page = self.lxmlize(COUNCIL_PAGE) members = page.xpath('//div[@class="photos_conseillers"]//figure') assert len(members), 'No councillors found' for member in members: photo_url = member.xpath('.//a//img/@src')[0] url = member.xpath('.//figcaption//a/@href')[0] email = self.lxmlize(url).xpath( '//div[@class="content-page"]//a[starts-with(@href, "mailto:")]/@href')[0] email = re.sub('^mailto:', '', email) name, district = map( lambda x: x.strip(), member.xpath('.//figcaption//text()')) if district.lower() not in ('des estacades', 'des plateaux', 'des terrasses', 'du sanctuaire'): district = re.sub('\A(?:de(?: la)?|des|du) ', '', district, flags=re.I) p = Person(primary_org='legislature', name=name, district=district, role='Conseiller', image=photo_url) p.add_source(COUNCIL_PAGE) p.add_source(url) p.add_contact('email', email) yield p
Python
0.000006
@@ -1530,12 +1530,8 @@ er() - not in @@ -1639,16 +1639,156 @@ aire'):%0A + district = re.sub('%5CA(?:de(?: la)?%7Cdes%7Cdu) ', lambda match: match.group(0).lower(), district, flags=re.I)%0A else:%0A
7471d1bcfc80864ee4e9ca9ad3b8ad10868a3cdc
use tempest img
ceilometer/tests/tempest/scenario/test_autoscaling.py
ceilometer/tests/tempest/scenario/test_autoscaling.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import unittest from gabbi import driver from tempest import config from tempest import test from ceilometer.tests.tempest.service import client class ClientManager(client.Manager): load_clients = [ 'image_client_v2', ] class TestAutoscalingGabbi(test.BaseTestCase): credentials = ['admin'] client_manager = ClientManager @classmethod def skip_checks(cls): super(TestAutoscalingGabbi, cls).skip_checks() for name in ["aodh_plugin", "gnocchi", "nova", "heat", "ceilometer", "glance"]: cls._check_service(name) @classmethod def _check_service(cls, name): if not getattr(config.CONF.service_available, name, False): raise cls.skipException("%s support is required" % name.capitalize()) @classmethod def resource_setup(cls): super(TestAutoscalingGabbi, cls).resource_setup() test_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'integration', 'gabbi', 'gabbits-live') cls.tests = driver.build_tests( test_dir, unittest.TestLoader(), host='localhost', port='13245', test_loader_name='tempest.scenario.telemetry-autoscaling.test') auth = cls.os_admin.auth_provider.get_auth() os.environ["ADMIN_TOKEN"] = auth[0] os.environ["AODH_SERVICE_URL"] = cls._get_endpoint_for( auth, "alarming_plugin") os.environ["GNOCCHI_SERVICE_URL"] = cls._get_endpoint_for( auth, "metric") os.environ["HEAT_SERVICE_URL"] = cls._get_endpoint_for( auth, "orchestration") os.environ["NOVA_SERVICE_URL"] = cls._get_endpoint_for(auth, "compute") os.environ["GLANCE_SERVICE_URL"] = cls._get_endpoint_for(auth, "image") images = cls.os_admin.image_client_v2.list_images()["images"] for img in images: name = img["name"] # devstack or tempest format if ((name.startswith("cirros") and name.endswith("-uec")) or name == 'scenario-img'): os.environ["GLANCE_IMAGE_NAME"] = name break else: cls.skipException("A cirros-.*-uec/cirros image is required") @staticmethod def clear_credentials(): # FIXME(sileht): We don't want the token to be invalided, but # for some obcurs reason, clear_credentials is called before/during run # So, make the one used by tearDropClass a dump, and call it manually # in run() pass def run(self, result=None): self.setUp() try: self.tests.run(result) finally: super(TestAutoscalingGabbi, self).clear_credentials() self.tearDown() @staticmethod def _get_endpoint_for(auth, service): opt_section = getattr(config.CONF, service) endpoint_type = opt_section.endpoint_type is_keystone_v3 = 'catalog' in auth[1] if is_keystone_v3: if endpoint_type.endswith("URL"): endpoint_type = endpoint_type[:-3] catalog = auth[1]['catalog'] endpoints = [e['endpoints'] for e in catalog if e['type'] == opt_section.catalog_type] if not endpoints: raise Exception("%s endpoint not found" % config.CONF.metric.catalog_type) endpoints = [e['url'] for e in endpoints[0] if e['interface'] == endpoint_type] if not endpoints: raise Exception("%s interface not found for endpoint %s" % (endpoint_type, config.CONF.metric.catalog_type)) return endpoints[0] else: if not endpoint_type.endswith("URL"): endpoint_type += "URL" catalog = auth[1]['serviceCatalog'] endpoints = [e for e in catalog if e['type'] == opt_section.catalog_type] if not endpoints: raise Exception("%s endpoint not found" % config.CONF.metric.catalog_type) return endpoints[0]['endpoints'][0][endpoint_type] @staticmethod def test_fake(): # NOTE(sileht): A fake test is needed to have the class loaded # by the test runner pass
Python
0.000013
@@ -661,166 +661,32 @@ pest - import test%0A%0Afrom ceilometer.tests.tempest.service import client%0A%0A%0Aclass ClientManager(client.Manager):%0A load_clients = %5B%0A 'image_client_v2',%0A %5D +.scenario import manager %0A%0A%0Ac @@ -715,25 +715,28 @@ bbi( -test.BaseTestCase +manager.ScenarioTest ):%0A @@ -764,44 +764,20 @@ min' -%5D%0A client_manager = ClientManager +, 'primary'%5D %0A%0A @@ -2266,461 +2266,8 @@ ge%22) -%0A images = cls.os_admin.image_client_v2.list_images()%5B%22images%22%5D%0A for img in images:%0A name = img%5B%22name%22%5D%0A # devstack or tempest format%0A if ((name.startswith(%22cirros%22) and name.endswith(%22-uec%22)) or%0A name == 'scenario-img'):%0A os.environ%5B%22GLANCE_IMAGE_NAME%22%5D = name%0A break%0A%0A else:%0A cls.skipException(%22A cirros-.*-uec/cirros image is required%22) %0A%0A @@ -2600,24 +2600,24 @@ sult=None):%0A - self @@ -2621,24 +2621,93 @@ elf.setUp()%0A + os.environ%5B%22GLANCE_IMAGE_NAME%22%5D = self.glance_image_create()%0A try:
5ecab61cd66b821d70e73006f60d8f7908bfb403
Remove comment
capstone/mdp/fixed_game_mdp.py
capstone/mdp/fixed_game_mdp.py
from .mdp import MDP from .game_mdp import GameMDP from ..utils import utility class FixedGameMDP(GameMDP): def __init__(self, game, opp_player, opp_idx): ''' opp_player: the opponent player opp_idx: the idx of the opponent player in the game ''' self._game = game self._opp_player = opp_player self._opp_idx = opp_idx self._agent_idx = opp_idx ^ 1 self._states = {} ####### # MDP # ####### def reward(self, game, move, next_game): return utility(next_game, self._agent_idx) if next_game.is_over() else 0 def start_state(self): new_game = self._game.copy() if not new_game.is_over() and new_game.cur_player() == self._opp_idx: chosen_move = self._opp_player.choose_move(new_game) new_game.make_move(chosen_move) return new_game def transitions(self, game, move): if game.is_over(): return [] new_game = game.copy().make_move(move) if not new_game.is_over() and new_game.cur_player() == self._opp_idx: chosen_move = self._opp_player.choose_move(new_game) new_game.make_move(chosen_move) return [(new_game, 1.0)]
Python
0
@@ -444,46 +444,8 @@ %7B%7D%0A%0A - #######%0A # MDP #%0A #######%0A%0A%0A
8b29dd1e3b343d2e7f6e20c88bf6d09bb65f5c5e
raise AttributeError instead of asserting
config_loader/loader.py
config_loader/loader.py
from __future__ import unicode_literals import yaml from .errors import ConfigError class ConfigLoader(object): config_root_class = None def __init__(self, config_text, context={}, *args, **kwargs): assert self.config_root_class is not None self.config_text = config_text self.config_dict = None self.config_root = None self.variable_context = context self._errors = [] self.load() def load(self): """ - should not be empty - yaml itself should be valid (should we grab more than 1 yaml error at a time?) - we should have whitelist of fields the can set at each level, and start getting objects out of those, processing grammar if necessary, validating other settings errors coming all the way back up """ if not self.config_text.strip(): self._errors.append(ConfigError(title='YAML is empty', description='Your configuration file appears to be empty.')) return # simple way to check that yaml itself is valid try: self.config_dict = yaml.load(self.config_text) except yaml.YAMLError as e: error = ConfigError.create_from_yaml_error(e) self._errors.append(error) # could have more than 1 line, keep giong if self.config_dict: # we have valid yaml with data, so start checking the components node_tree = yaml.compose(self.config_text) # give it the parsed settings, and the node info self.config_root = self.config_root_class(value=self.config_dict, value_node=node_tree, context=self.variable_context) @property def errors(self): if self.config_root: return self._errors + self.config_root._get_all_errors() return self._errors def is_valid(self): return not self.errors def __getitem__(self, key): # just pass off as dict right now return self.config_dict[key] def as_dict(self): d = { 'config_text': self.config_text, 'config': self.config_root._as_dict() if self.config_root else None, } if self.errors: d['errors'] = [x.as_dict() for x in self.errors] return d def as_text(self, simple=False): if self.is_valid(): return self.config_text else: output = [] errored_lines = set([x.line for x in self.errors]) for index, line in enumerate(self.config_text.splitlines()): if simple: if index in errored_lines: output.append('{} # FIXME <url>'.format(line)) else: output.append(line) else: output.append(line) if index in errored_lines: errors_on_line = [x for x in self.errors if x.line == index] # If there is more than one error on a line, try to group them by title and place in a # single comment block if len(errors_on_line) > 1: error_str = """# {line}\n# ^\n# --------\n""".format(line=line) unique_titles = set([x.title for x in errors_on_line]) for t in sorted(unique_titles): error_str += """# {title}\n""".format(title=t) for d in sorted([x.description for x in errors_on_line if x.title == t]): error_str += """# - {description}\n""".format(description=d) error_str += """# --------""" else: e = errors_on_line[0] num_markers = 1 if not e.end_column else e.end_column - e.start_column markers = '^' * num_markers error_str = """\ # {line} # {markers} # -------- # {title} # - {description} # --------""".format( line=line, markers=markers.rjust(e.start_column + 1), title=e.title, description=e.description, ) output.append(error_str) text = '\n'.join([str(x) for x in output]) return text
Python
0
@@ -216,20 +216,30 @@ -assert +if not hasattr( self -. +, ' conf @@ -255,20 +255,108 @@ lass - is not None +'):%0A raise AttributeError('config_root_class must defined in subclasses of ConfigLoader') %0A%0A
b8c3ad8c9eb4cdf2618839b425b8413181a443ff
Fix bug in writePrecisePathToSnapshot not bactracking prperly to the initial structure
AdaptivePELE/analysis/writePrecisePathToSnapshot.py
AdaptivePELE/analysis/writePrecisePathToSnapshot.py
""" Recreate the trajectory fragments to the led to the discovery of a snapshot, specified by the tuple (epoch, trajectory, snapshot) and write as a pdb file """ import os import sys import argparse import glob import itertools from AdaptivePELE.utilities import utilities def parseArguments(): """ Parse the command-line options :returns: :py:class:`.Clustering`, int, int, int, str -- Clustering object, number of trajectory, number of snapshot, number of epoch, output path where to write the files """ desc = "Write the information related to the conformation network to file\n" parser = argparse.ArgumentParser(description=desc) parser.add_argument("clusteringObject", type=str, help="Path to the clustering object") parser.add_argument("trajectory", type=int, help="Trajectory number") parser.add_argument("snapshot", type=int, help="Snapshot to select (in accepted steps)") parser.add_argument("epoch", type=str, help="Path to the epoch to search the snapshot") parser.add_argument("-o", type=str, default=None, help="Output path where to write the files") args = parser.parse_args() return args.clusteringObject, args.trajectory, args.snapshot, args.epoch, args.o if __name__ == "__main__": clusteringObject, trajectory, snapshot, epoch, outputPath = parseArguments() if outputPath is not None: outputPath = os.path.join(outputPath, "") if not os.path.exists(outputPath): os.makedirs(outputPath) else: outputPath = "" sys.stderr.write("Reading clustering object...\n") cl = utilities.readClusteringObject(clusteringObject) pathway = [] # Strip out trailing backslash if present pathPrefix, epoch = os.path.split(epoch.rstrip("/")) sys.stderr.write("Creating pathway...\n") while epoch != "0": filename = glob.glob(os.path.join(pathPrefix,epoch,"*traj*_%d.pdb" % trajectory)) snapshots = utilities.getSnapshots(filename[0]) snapshots = snapshots[:snapshot+1] pathway.insert(0, snapshots) procMapping = open(os.path.join(pathPrefix, epoch, "processorMapping.txt")).read().rstrip().split(':') epoch, trajectory, snapshot = map(int, procMapping[trajectory-1][1:-1].split(',')) epoch = str(epoch) sys.stderr.write("Writing pathway...\n") with open(outputPath+"pathway.pdb", "a") as f: f.write("ENDMDL\n".join(itertools.chain.from_iterable(pathway)))
Python
0
@@ -1857,20 +1857,12 @@ ile -epoch != %220%22 +True :%0A @@ -1912,22 +1912,24 @@ hPrefix, + epoch, + %22*traj*_ @@ -2087,16 +2087,210 @@ pshots)%0A + if epoch == '0':%0A # Once we get to epoch 0, we just need to append the trajectory%0A # where the cluster was found and we can break out of the loop%0A break%0A @@ -2301,17 +2301,16 @@ Mapping - = open(o
d5196874181ce55585770bbb72f6d8b8ef1df5a3
Add tests for joins and wildcard tables
test/test_sqlalchemy_bigquery.py
test/test_sqlalchemy_bigquery.py
from sqlalchemy.engine import create_engine from sqlalchemy.schema import Table, MetaData, Column from sqlalchemy import types, func, case from sqlalchemy.sql import expression, select, literal_column from sqlalchemy.exc import NoSuchTableError from sqlalchemy.orm import sessionmaker import pytest import sqlalchemy import datetime ONE_ROW_CONTENTS = [ 588, datetime.datetime(2013, 10, 10, 11, 27, 16, tzinfo=datetime.timezone.utc), 'W 52 St & 11 Ave', 40.76727216, False, datetime.date(2013, 10, 10), datetime.datetime(2013, 10, 10, 11, 27, 16), datetime.time(11, 27, 16), b'\xef' ] @pytest.fixture(scope='session') def engine(): engine = create_engine('bigquery://', echo=True) return engine @pytest.fixture(scope='session') def table(engine): return Table('test_pybigquery.sample', MetaData(bind=engine), autoload=True) @pytest.fixture(scope='session') def table_one_row(engine): return Table('test_pybigquery.sample_one_row', MetaData(bind=engine), autoload=True) @pytest.fixture(scope='session') def session(engine): Session = sessionmaker(bind=engine) session = Session() return session @pytest.fixture(scope='session') def query(table): col1 = literal_column("TIMESTAMP_TRUNC(timestamp, DAY)").label("timestamp_label") col2 = func.sum(table.c.integer) query = ( select([ col1, col2, ]) .where(col1 < datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) .group_by(col1) .order_by(col2) ) return query def test_reflect_select(engine, table): assert len(table.c) == 9 assert isinstance(table.c.integer, Column) assert isinstance(table.c.integer.type, types.Integer) assert isinstance(table.c.timestamp.type, types.TIMESTAMP) assert isinstance(table.c.string.type, types.String) assert isinstance(table.c.float.type, types.Float) assert isinstance(table.c.boolean.type, types.Boolean) assert isinstance(table.c.date.type, types.DATE) assert isinstance(table.c.datetime.type, types.DATETIME) assert isinstance(table.c.time.type, types.TIME) assert isinstance(table.c.bytes.type, types.BINARY) rows = table.select().execute().fetchall() assert len(rows) == 1000 def test_content_from_raw_queries(engine): rows = engine.execute('SELECT * FROM test_pybigquery.sample_one_row').fetchall() assert list(rows[0]) == ONE_ROW_CONTENTS def test_content_from_reflect(engine, table_one_row): rows = table_one_row.select().execute().fetchall() assert list(rows[0]) == ONE_ROW_CONTENTS def test_unicode(engine, table_one_row): unicode_str = "白人看不懂" returned_str = sqlalchemy.select( [expression.bindparam("好", unicode_str)], from_obj=table_one_row, ).scalar() assert returned_str == unicode_str def test_reflect_select_shared_table(engine): one_row = Table('bigquery-public-data.samples.natality', MetaData(bind=engine), autoload=True) row = one_row.select().limit(1).execute().first() assert len(row) >= 1 def test_reflect_table_does_not_exist(engine): with pytest.raises(NoSuchTableError): table = Table('test_pybigquery.table_does_not_exist', MetaData(bind=engine), autoload=True) assert Table('test_pybigquery.table_does_not_exist', MetaData(bind=engine)).exists() is False def test_reflect_dataset_does_not_exist(engine): with pytest.raises(NoSuchTableError): Table('dataset_does_not_exist.table_does_not_exist', MetaData(bind=engine), autoload=True) def test_tables_list(engine): assert 'test_pybigquery.sample' in engine.table_names() assert 'test_pybigquery.sample_one_row' in engine.table_names() def test_group_by(session, table): """labels in SELECT clause should be correclty formatted (dots are replaced with underscores)""" result = session.query(table.c.string, func.count(table.c.integer)).group_by(table.c.string).all() assert len(result) > 0 def test_session_query(session, table): col_concat = func.concat(table.c.string).label('concat') result = ( session .query( table.c.string, col_concat, func.avg(table.c.integer), func.sum(case([(table.c.boolean == True, 1)], else_=0)) ) .group_by(table.c.string, col_concat) .having(func.avg(table.c.integer) > 10) ).all() assert len(result) > 0 def test_custom_expression(engine, query): """GROUP BY clause should use labels instead of expressions""" result = engine.execute(query).fetchall() assert len(result) > 0 def test_compiled_query_literal_binds(engine, query): compiled = query.compile(engine, compile_kwargs={"literal_binds": True}) result = engine.execute(compiled).fetchall() assert len(result) > 0
Python
0
@@ -1564,16 +1564,17 @@ query%0A%0A +%0A def test @@ -4811,28 +4811,549 @@ %0A assert len(result) %3E 0%0A +%0A%0Adef test_joins(session, table, table_one_row):%0A result = (session.query(table.c.string, func.count(table_one_row.c.integer))%0A .join(table_one_row, table_one_row.c.string == table.c.string)%0A .group_by(table.c.string).all())%0A%0A assert len(result) %3E 0%0A%0A%0Adef test_querying_wildcard_tables(engine, query):%0A table = Table('bigquery-public-data.noaa_gsod.gsod*', MetaData(bind=engine), autoload=True)%0A rows = table.select().limit(1).execute().first()%0A assert len(rows) %3E 0
c072f88df49dada1c664c0f4d71e8084c3690449
Make the message optional in /ping.
horsefax/bot/modules/groups.py
horsefax/bot/modules/groups.py
from pony.orm import * import pony.orm from typing import cast, Optional from horsefax.telegram.services.command import Command from ..core import HorseFaxBot, ModuleTools, BaseModule, ChatService from ..db import db from .tracking import TelegramUser class PingGroup(db.Entity): id = PrimaryKey(int, auto=True) name = Required(str, unique=True) added_by = pony.orm.Optional(int) members = Set(TelegramUser) class CollectionModule(BaseModule): def __init__(self, bot: HorseFaxBot, tools: ModuleTools) -> None: self.bot = bot self.util = tools self.util.register_command("addgroup", self.add_group) self.util.register_command("joingroup", self.join_group) self.util.register_command("leavegroup", self.leave_group) self.util.register_command("removegroup", self.remove_group) self.util.register_command("listgroups", self.list_groups) self.util.register_command("showgroup", self.show_group) self.util.register_command("ping", self.ping_group) @db_session def add_group(self, command: Command) -> str: if len(command.args) != 1: return "Syntax: `/addgroup <group name>`" name = command.args[0].lower() if PingGroup.get(name=name) is None: group = PingGroup(name=name, added_by=command.message.sender.id) return f"Created group `{group.name}`. Join it using `/joingroup {group.name}`." else: return f"The group {name} already exists. Join it using `/joingroup {group.name}`." @db_session def join_group(self, command: Command) -> str: if len(command.args) != 1: return "Syntax: `/joingroup <group name>`" name = command.args[0].lower() group = PingGroup.get(name=name) if group is None: return f"No such group: `{name}`." user = TelegramUser[command.message.sender.id] if user in group.members: return f"You are already a member of the group `{name}`." if user.username is None: return "You cannot join a group unless you have a set a username in Telegram." group.members.add(user) return f"Joined {group.name}, which now has {len(group.members)} member(s)." @db_session def ping_group(self, command: Command) -> Optional[str]: if len(command.args) < 2: return "Syntax: `/ping <group name> <message>`" message = ' '.join(command.args[1:]) group_name = command.args[0].lower() group = PingGroup.get(name=group_name) if group is None: return f"The group `{group_name}` does not exist." if len(group.members) == 0: return f"The group `{group.name}` has np members." output = f"{' '.join(f'@{x}' for x in group.members.username if x)}: {message}" self.bot.message(command.message.chat, output, parsing=ChatService.ParseMode.NONE) @db_session def leave_group(self, command: Command) -> str: if len(command.args) != 1: return "Syntax: `/leavegroup <group name>`" name = command.args[0].lower() group = PingGroup.get(name=name) if group is None: return f"No such group: `{name}`." user = TelegramUser[command.message.sender.id] if user not in group.members: return f"You are not a member of the group `{group.name}`." group.members.remove(user) return f"Removed you from {group.name}, which now has {len(group.members)} member(s)." @db_session def remove_group(self, command: Command) -> str: if len(command.args) != 1: return "Syntax: `/removegroup <group name>`" name = command.args[0].lower() group = PingGroup.get(name=name) if group is None: return f"No such group: `{name}`." member_count = len(group.members) group.delete() return f"Deleted group {name}, which had {member_count} member(s)." @db_session def list_groups(self, command: Command) -> str: return f"The following groups exist: " \ f"{', '.join(f'`{x.name}`' for x in PingGroup.select().order_by(PingGroup.name))}" @db_session def show_group(self, command: Command) -> str: if len(command.args) != 1: return "Syntax: `/showgroup <group name>`" name = command.args[0].lower() group = PingGroup.get(name=name) if group is None: return f"The group `{group_name}` does not exist." if len(group.members) == 0: return f"The group `{group.name}` has np members." return f"`{name}`: {', '.join(x for x in group.members.username if x)}"
Python
0.000002
@@ -2379,9 +2379,9 @@ ) %3C -2 +1 :%0A @@ -2430,17 +2430,17 @@ me%3E -%3C +%5B message -%3E +%5D %60%22%0A @@ -2744,33 +2744,33 @@ oup.name%7D%60 has n -p +o members.%22%0A
1549510fd9371818cff6644984896a5a9060cb36
Fix print statements with python3 syntax.
benchmarks/TSP/compare_to_BKS.py
benchmarks/TSP/compare_to_BKS.py
# -*- coding: utf-8 -*- import json, sys, os import numpy as np # Compare a set of computed solutions to best known solutions on the # same problems. def s_round(v, d): if d == 0: return str(int(v)) else: return str(round(v, d)) def log_comparisons(BKS, files): print ','.join(["Instance", "Jobs", "Vehicles", "Best known cost", "Solution cost", "Gap (%)", "Computing time (ms)"]) jobs = [] gaps = [] computing_times = [] for f in files: instance = f[0:f.rfind("_sol.json")] instance = instance[instance.rfind('/') + 1:] if instance not in BKS: continue indicators = BKS[instance] BK_cost = indicators['best_known_cost'] nb_job = indicators['jobs'] jobs.append(nb_job) line = [ instance, nb_job, indicators['vehicles'], BK_cost ] with open(f, 'r') as sol_file: solution = json.load(sol_file) if solution['code'] != 0: continue cost = solution['summary']['cost'] line.append(cost) gap = 100 * (float(cost) / BK_cost - 1) line.append(round(gap, 2)) gaps.append(gap) computing_time = solution['summary']['computing_times']['loading'] + solution['summary']['computing_times']['solving'] line.append(computing_time) computing_times.append(computing_time) print ','.join(map(lambda x: str(x), line)) print ',' print 'Average,' + s_round(np.mean(jobs), 1) + ',,,,' + s_round(np.mean(gaps), 2) + ',' + s_round(np.mean(computing_times), 0) # Percentiles print ',' gaps_percentiles = np.percentile(gaps, [0, 10, 25, 50, 75, 90, 100]) ct_percentiles = np.percentile(computing_times, [0, 10, 25, 50, 75, 90, 100]) print ',Gaps,Computing times' titles = ['Min', 'First decile', 'Lower quartile', 'Median', 'Upper quartile', 'Ninth decile', 'Max'] for i in range(len(titles)): print titles[i] + ',' + s_round(gaps_percentiles[i], 2) + ',' + s_round(ct_percentiles[i], 0) if __name__ == "__main__": # First argument if the best known solution file. with open(sys.argv[1], 'r') as sol_file: bks = json.load(sol_file) # Remaining arguments are computed solution files to use. log_comparisons(bks, sys.argv[2:])
Python
0.000038
@@ -273,25 +273,25 @@ es):%0A print - +( ','.join(%5B%22I @@ -390,16 +390,17 @@ (ms)%22%5D) +) %0A%0A jobs @@ -1308,17 +1308,17 @@ print - +( ','.join @@ -1346,16 +1346,17 @@ , line)) +) %0A%0A prin @@ -1356,20 +1356,21 @@ %0A print - +( ',' +) %0A%0A prin @@ -1370,17 +1370,17 @@ %0A print - +( 'Average @@ -1491,16 +1491,17 @@ mes), 0) +) %0A%0A # Pe @@ -1517,20 +1517,21 @@ %0A print - +( ',' +) %0A gaps_ @@ -1681,17 +1681,17 @@ %0A print - +( ',Gaps,C @@ -1705,16 +1705,17 @@ g times' +) %0A title @@ -1851,17 +1851,17 @@ print - +( titles%5Bi @@ -1939,16 +1939,17 @@ s%5Bi%5D, 0) +) %0A%0Aif __n
63bd0d8905ea9392e56f501381c054ba3a4ed1a7
Update __init__.py
chainer/optimizers/__init__.py
chainer/optimizers/__init__.py
from chainer.optimizers.ada_delta import AdaDelta # NOQA from chainer.optimizers.ada_grad import AdaGrad # NOQA from chainer.optimizers.adam import Adam # NOQA from chainer.optimizers.momentum_sgd import MomentumSGD # NOQA from chainer.optimizers.msvag import MSVAG # NOQA from chainer.optimizers.nesterov_ag import NesterovAG # NOQA from chainer.optimizers.rmsprop import RMSprop # NOQA from chainer.optimizers.rmsprop_graves import RMSpropGraves # NOQA from chainer.optimizers.sgd import SGD # NOQA from chainer.optimizers.smorms3 import SMORMS3 # NOQA
Python
0.000072
@@ -1,28 +1,60 @@ +-# import classes and functions%0A from chainer.optimizers.ada_
f77b48036968ce49ad1094c9913b06d977b2d6e0
add year() type checker
arg_type.py
arg_type.py
'''type verifiers for argparse each function either - returns an argument parsed from a string (possible the string); OR - raises argpare.ArgumentTypeError ''' import argparse import multiprocessing import pdb if False: pdb def features(s): 's is a name for a group of features' return _in_set(s, set('s', 'sw', 'swp', 'swpn')) def features_hps(s): 'return s or raise error' try: pieces = s.split('-') assert len(pieces) == 2, pieces # verify type of each piece maybe_features, maybe_hps = pieces features(maybe_features) hps(maybe_hps) return s except: raise argparse.ArgumentTypeError('%s is not a featuregroup-hps-yearmonth' % s) def features_hps_month(s): 'return s or raise error' try: pieces = s.split('-') assert len(pieces) == 3, pieces # verify type of each piece maybe_features, maybe_hps, maybe_month = pieces features(maybe_features) hps(maybe_hps) month(maybe_month) return s except: raise argparse.ArgumentTypeError('%s is not a featuregroup-hps-yearmonth' % s) def features_hps_locality(s): 'return s or raise error' try: pieces = s.split('-') assert len(pieces) == 3, pieces # verify type of each piece maybe_features, maybe_hps, maybe_locality = pieces features(maybe_features) hps(maybe_hps) locality(maybe_locality) assert pieces[2] in ['global', 'census', 'city', 'zip'] return s except: raise argparse.ArgumentTypeError('%s is not a featuregroup-hps-locality' % s) def features_hps_locality_month(s): 'return s or raise error' try: pieces = s.split('-') pdb.set_trace() assert len(pieces) == 4, pieces # verify type of each piece features(pieces[0]) hps(pieces[1]) assert pieces[2] in ['global', 'census', 'city', 'zip'] month(pieces[3]) return s except: raise argparse.ArgumentTypeError('%s is not a featuregroup-hps-yearmonth-locality' % s) def hps(s): 's is the name of a group of hyperparameters' return in_set(s, set('all', 'best1')) def _in_set(s, allowed): 'return s or raise ArgumentTypeError' try: assert s in allowed return s except: raise argparse.ArgumentTypeError('s not in allowed values {%s}' (s, allowed)) def locality_choices(s): return set(['census', 'city', 'global', 'zip']) model_choices = set(['en', 'gb', 'rf']) def month(s): 's is a string of the form YYYYMM' try: s_year = s[:4] s_month = s[4:] int_year = int(s_year) assert 0 <= int_year <= 2016 int_month = int(s_month) assert 1 <= int_month <= 12 return s except: raise argparse.ArgumentTypeError('%s is not a yearmonth of form YYYYMM' % s) def neighborhood(s): 's is global or a city name' # if a city name, replace _ by ' ' if s == 'global': return s else: return s.replace('_', ' ') def n_processes(s): 'return int value of s, if it is valid for system we are running on' cpu_count = multiprocessing.cpu_count() try: result = int(s) assert 1 <= result <= cpu_count return result except: raise argparse.ArgumentTypeError('%s not an itteger in [1,%d]' % (s, cpu_count)) def positive_int(s): 'convert s to a positive integer or raise exception' try: value = int(s) assert value > 0 return value except: raise argparse.ArgumentTypeError('%s is not a positive integer' % s) training_data_choices = set(['all', 'train'])
Python
0.000002
@@ -3744,8 +3744,228 @@ train'%5D) +%0A%0A%0Adef year(s):%0A 'convert s to integer that could be a year'%0A try:%0A assert len(s) == 4%0A value = int(s)%0A return value%0A except:%0A raise argparse.ArgumentTypeError('%25s is not a year' %25 s)
7a7661bd03c947212ee46ca598cae5cd316757c1
Fix flake8
chainercv/datasets/__init__.py
chainercv/datasets/__init__.py
from chainercv.datasets.camvid.camvid_dataset import camvid_ignore_label_color # NOQA from chainercv.datasets.camvid.camvid_dataset import camvid_label_colors # NOQA from chainercv.datasets.camvid.camvid_dataset import camvid_label_names # NOQA from chainercv.datasets.camvid.camvid_dataset import CamVidDataset # NOQA from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_labels # NOQA from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_label_names # NOQA from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_label_colors # NOQA from chainercv.datasets.cityscapes.cityscapes_semantic_segmentation_dataset import CityscapesSemanticSegmentationDataset # NOQA from chainercv.datasets.cub.cub_keypoint_dataset import CUBKeypointDataset # NOQA from chainercv.datasets.cub.cub_label_dataset import CUBLabelDataset # NOQA from chainercv.datasets.cub.cub_utils import cub_label_names # NOQA from chainercv.datasets.directory_parsing_classification_dataset import directory_parsing_label_names # NOQA from chainercv.datasets.directory_parsing_classification_dataset import DirectoryParsingClassificationDataset # NOQA from chainercv.datasets.online_products.online_products_dataset import OnlineProductsDataset # NOQA from chainercv.datasets.transform_dataset import TransformDataset # NOQA from chainercv.datasets.voc.voc_detection_dataset import VOCDetectionDataset # NOQA from chainercv.datasets.voc.voc_semantic_segmentation_dataset import VOCSemanticSegmentationDataset # NOQA from chainercv.datasets.voc.voc_utils import voc_detection_label_names # NOQA from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_ignore_label_color # NOQA from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_label_colors # NOQA from chainercv.datasets.voc.voc_utils import voc_semantic_segmentation_label_names # NOQA
Python
0
@@ -308,32 +308,161 @@ Dataset # NOQA%0A +from chainercv.datasets.cityscapes.cityscapes_semantic_segmentation_dataset import CityscapesSemanticSegmentationDataset # NOQA%0A from chainercv.d @@ -520,16 +520,22 @@ es_label +_color s # NOQ @@ -705,144 +705,9 @@ abel -_colors # NOQA%0Afrom chainercv.datasets.cityscapes.cityscapes_semantic_segmentation_dataset import CityscapesSemanticSegmentationDataset +s #
b27398e4dd246d542c0a82ecc35da60911edc9fd
revert to dev version
regionmask/version.py
regionmask/version.py
version = "0.7.0"
Python
0
@@ -9,10 +9,14 @@ = %220.7.0 ++dev %22%0A
b318ced455f13477743a6d2d81b3556695b27374
Make to_factorized_noisy support args
chainerrl/links/noisy_chain.py
chainerrl/links/noisy_chain.py
import chainer from chainer.links import Linear from chainerrl.links.noisy_linear import FactorizedNoisyLinear def to_factorized_noisy(link): """Add noisiness to components of given link Currently this function supports L.Linear (with and without bias) """ _map_links(_func_to_factorized_noisy, link) def _func_to_factorized_noisy(link): if isinstance(link, Linear): return FactorizedNoisyLinear(link) else: return link def _map_links(func, link): if isinstance(link, chainer.Chain): children_names = link._children.copy() for name in children_names: child = getattr(link, name) new_child = func(child) if new_child is child: _map_links(func, child) else: delattr(link, name) with link.init_scope(): setattr(link, name, func(child)) elif isinstance(link, chainer.ChainList): children = link._children for i in range(len(children)): child = children[i] new_child = func(child) if new_child is child: _map_links(func, child) else: # mimic ChainList.add_link children[i] = func(child) children[i].name = str(i)
Python
0.000001
@@ -131,24 +131,41 @@ d_noisy(link +, *args, **kwargs ):%0A %22%22%22Ad @@ -287,63 +287,17 @@ %22%22%22%0A +%0A -_map_links(_func_to_factorized_noisy, link)%0A%0A%0A def -_ func @@ -320,32 +320,36 @@ oisy(link):%0A + + if isinstance(li @@ -361,16 +361,20 @@ inear):%0A + @@ -406,18 +406,39 @@ ear(link +, *args, **kwargs )%0A + else @@ -431,32 +431,36 @@ )%0A else:%0A + return l @@ -464,16 +464,64 @@ n link%0A%0A + _map_links(func_to_factorized_noisy, link)%0A%0A %0Adef _ma
af9006169d6f537d26f58926873334312bd6ed99
Add simple bounded cache decorator
pykit/utils/convenience.py
pykit/utils/convenience.py
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import try: import __builtin__ as builtins except ImportError: import builtins import string import functools import collections from itertools import chain map = lambda *args: list(builtins.map(*args)) invert = lambda d: dict((v, k) for k, v in d.items()) def nestedmap(f, args): """ Map `f` over `args`, which contains elements or nested lists """ result = [] for arg in args: if isinstance(arg, list): result.append(list(map(f, arg))) else: result.append(f(arg)) return result def flatten(args): """Flatten nested lists (return as iterator)""" for arg in args: if isinstance(arg, list): for x in arg: yield x else: yield arg def mutable_flatten(args): """Flatten nested lists (return as iterator)""" for arg in args: if isinstance(arg, list): for x in arg: yield x else: yield arg def mergedicts(*dicts): """Merge all dicts into a new dict""" return dict(chain(*[d.items() for d in dicts])) def listify(f): """Decorator to turn generator results into lists""" @functools.wraps(f) def wrapper(*args, **kwargs): return list(f(*args, **kwargs)) return wrapper @listify def prefix(iterable, prefix): """Prefix each item from the iterable with a prefix""" for item in iterable: yield prefix + item # ______________________________________________________________________ # Strings def substitute(s, **substitutions): """Use string.Template to substitute placeholders in a string""" return string.Template(s).substitute(**substitutions) # ______________________________________________________________________ def hashable(x): try: hash(x) except TypeError: return False else: return True # ______________________________________________________________________ class ValueDict(object): """ Use dict values as attributes. """ def __init__(self, d): self.__getattr__ = d.__getitem__ self.__setattr__ = d.__setitem__ self.__detattr__ = d.__detitem__ # ______________________________________________________________________ def cached(f): """Cache the result of the function""" result = [] def wrapper(*args, **kwargs): if len(result) == 0: ret = f(*args, **kwargs) result.append(ret) return result[0] return wrapper call_once = cached # ______________________________________________________________________ def make_temper(): """Return a function that returns temporary names""" temps = collections.defaultdict(int) seen = set() def temper(input=""): name, dot, tail = input.rpartition('.') if tail.isdigit(): varname = name else: varname = input count = temps[varname] temps[varname] += 1 if varname and count == 0: result = varname else: result = "%s.%d" % (varname, count) assert result not in seen seen.add(result) return result return temper # ______________________________________________________________________
Python
0
@@ -2353,20 +2353,23 @@ %0A%0Adef ca -ched +ll_once (f):%0A @@ -2404,16 +2404,47 @@ function +, so that it's called only once %22%22%22%0A @@ -2636,26 +2636,378 @@ er%0A%0A -call_once = cached +def cached(limit=1000):%0A %22%22%22Cache the result for the arguments just once%22%22%22%0A def decorator(f):%0A cache = %7B%7D%0A def wrapper(*args):%0A if args not in cache:%0A if len(cache) %3E limit:%0A cache.popitem()%0A cache%5Bargs%5D = f(*args)%0A return cache%5Bargs%5D%0A return wrapper%0A return decorator %0A%0A#
c23169b4b6557ebf83a20888bb5f9408d5e4d34b
Remove ipdb in single_request
src/core/vkstalk.py
src/core/vkstalk.py
# -*- coding: utf-8 -*- # Required modules from __future__ import unicode_literals from bs4 import BeautifulSoup from datetime import datetime, timedelta from helpers.h_logging import get_logger from helpers.utils import clear_screen, as_client_tz, make_data_updates_string,\ delta_minutes from core.parser import Parser from core.models import * from config import settings import time # used for time.sleep() import sys class VKStalk: def __init__(self, user_id): get_logger('file').info('Initializing VKStalk') self.birth = datetime.now().strftime(settings.DATETIME_FORMAT) self.user = User.from_vk_id(user_id) self.logs_counter = 0 clear_screen() def scrape(self): while True: self.single_request() time.sleep(settings.DATA_FETCH_INTERVAL) def single_request(self): try: get_logger('console').info('Fetching user data...') data = self.parse_user_data() self.store_user_data(data) self.console_log() except Exception, e: import ipdb; ipdb.set_trace() def parse_user_data(self): p = Parser(self.user.url) user_data = p.get_user_data() return user_data def store_user_data(self, user_data): try: db_session = Session() db_session.add(self.user) changes = {} changes['data'] = UserData.get_diff(self.user.data, UserData.from_dict(user_data)) for key, val in changes['data'].items(): if val['new']: setattr(self.user.data, key, val['new']) else: changes['data'].pop(key, None) activity_log = UserActivityLog.from_dict(user_data) if changes['data']: user_data_changes = make_data_updates_string(changes['data']) activity_log.updates = user_data_changes.strip() if self.user.activity_logs: changes['activity_log'] = UserActivityLog.get_diff( self.user.activity_logs[-1], activity_log ) if is_change_valid(changes): self.user.activity_logs.append(activity_log) self.logs_counter += 1 except Exception, e: func_name = sys._getframe().f_code.co_name message = "Error in '{0}. Exception: {1}'".format(func_name, e) get_logger('file').fatal(message) db_session.rollback() get_logger('file').info("Session changes were rolled back.") raise finally: db_session.commit() db_session.close() def console_log(self): log = self.generate_console_log() clear_screen() get_logger('console').info(log) def generate_console_log(self): db_session = Session() db_session.add(self.user) log_tmpl = "{0} -- {1}\nStatus: {2}\n\n" self.log = log_tmpl.format(self.user.data.name, self.user.last_visit_text, self.user.activity_logs[-1].status, ) # Generating a timestamp and adding it to the log string dt_client_now = pytz.timezone(settings.SERVER_TZ).localize(datetime.now()) dt_client_now = as_client_tz(dt_client_now) check_time = datetime.strftime(dt_client_now, settings.LOG_CHECKED_TMPL) dt_log_timestamp = self.user.activity_logs[-1].timestamp dt_log_timestamp = as_client_tz(dt_log_timestamp) log_time = datetime.strftime(dt_log_timestamp, settings.LOG_DATETIME_TMPL) self.log = check_time + log_time + self.log.rstrip() updates = self.user.activity_logs[-1].updates if updates: self.log += '\n' + updates self.log += '\n\n' console_log = settings.CONSOLE_LOG_TEMPLATE.format( self.birth, self.user.vk_id, self.user.data.name, self.logs_counter, self.log ) db_session.close() return console_log def is_change_valid(changes): # TBD. Remove this BS "Kostyl'". # It is a workaround for when user was last seen X mins ago, the last_visit # timestamp for appx an hour bounces with 1 minute delta. has_changes = False if changes['data']: return True if changes['activity_log']: has_changes = True keys = changes['activity_log'].keys() if "last_visit" in keys and len(keys) == 1: minutes = delta_minutes(changes['activity_log']['last_visit']['new'], changes['activity_log']['last_visit']['old']) has_changes = minutes > 1 return has_changes
Python
0.000001
@@ -859,33 +859,16 @@ (self):%0A - try:%0A @@ -927,20 +927,16 @@ - data = s @@ -953,28 +953,24 @@ user_data()%0A - self @@ -996,28 +996,24 @@ ta)%0A - self.console @@ -1022,79 +1022,8 @@ og() -%0A except Exception, e:%0A import ipdb; ipdb.set_trace() %0A%0A
1fd876e82bc856d044f30903b74f19e2047009d9
improve certs error
betfairlightweight/baseclient.py
betfairlightweight/baseclient.py
import os import time import requests import collections from .exceptions import PasswordError, AppKeyError, CertsError from .utils import default_user_agent IDENTITY = "https://identitysso.betfair{tld}/api/" IDENTITY_CERT = "https://identitysso-cert.betfair{tld}/api/" API = "https://api.betfair.com/exchange/" NAVIGATION = ( "https://api.betfair{tld}/exchange/betting/rest/v1/{locale}/navigation/menu.json" ) USER_AGENT = default_user_agent() class BaseClient: """ Base API client """ IDENTITY_URLS = collections.defaultdict( lambda: IDENTITY.format(tld=".com"), spain=IDENTITY.format(tld=".es"), italy=IDENTITY.format(tld=".it"), romania=IDENTITY.format(tld=".ro"), sweden=IDENTITY.format(tld=".se"), australia=IDENTITY.format(tld=".com.au"), ) IDENTITY_CERT_URLS = collections.defaultdict( lambda: IDENTITY_CERT.format(tld=".com"), spain=IDENTITY_CERT.format(tld=".es"), italy=IDENTITY_CERT.format(tld=".it"), romania=IDENTITY_CERT.format(tld=".ro"), sweden=IDENTITY_CERT.format(tld=".se"), ) API_URLS = collections.defaultdict(lambda: API) NAVIGATION_URLS = collections.defaultdict( lambda: NAVIGATION.format(tld=".com", locale="en"), spain=NAVIGATION.format(tld=".es", locale="es"), italy=NAVIGATION.format(tld=".it", locale="it"), ) SESSION_TIMEOUT = collections.defaultdict(lambda: 8 * 60 * 60, italy=20 * 60) def __init__( self, username: str, password: str = None, app_key: str = None, certs: str = None, locale: str = None, cert_files: list = None, lightweight: bool = False, session: requests.Session = None, ): """ Creates base client for API operations. :param str username: Betfair username :param str password: Betfair password for supplied username, if None will look in .bashprofile :param str app_key: App Key for account, if None will look in .bashprofile :param str certs: Directory for certificates, if None will look in /certs :param str locale: Exchange to be used, defaults to international (.com) exchange :param list cert_files: Certificate and key files. If None will use `self.certs` :param bool lightweight: If True endpoints will return dict not a resource (22x faster) :param requests.Session session: Pass requests session object, defaults to a new request each request """ self.username = username self.password = password self.app_key = app_key self.certs = certs self.locale = locale self.cert_files = cert_files self.lightweight = lightweight self.session = session if session else requests self._login_time = None self.session_token = None self.identity_uri = self.IDENTITY_URLS[locale] self.identity_cert_uri = self.IDENTITY_CERT_URLS[locale] self.api_uri = self.API_URLS[locale] self.navigation_uri = self.NAVIGATION_URLS[locale] self.session_timeout = self.SESSION_TIMEOUT[locale] self.get_password() self.get_app_key() def set_session_token(self, session_token: str) -> None: """ Sets session token and new login time. :param str session_token: Session token from request. """ self.session_token = session_token self._login_time = time.time() def get_password(self) -> str: """ If password is not provided will look in environment variables for self.username+'password'. """ if self.password is None: if os.environ.get(self.username + "password"): self.password = os.environ.get(self.username + "password") else: raise PasswordError(self.username) return self.password def get_app_key(self) -> str: """ If app_key is not provided will look in environment variables for username. """ if self.app_key is None: if os.environ.get(self.username): self.app_key = os.environ.get(self.username) else: raise AppKeyError(self.username) return self.app_key def client_logout(self) -> None: """ Resets session token and login time. """ self.session_token = None self._login_time = None @property def session_expired(self) -> bool: """ Returns True if login_time not set or seconds since login time is greater half session timeout. """ if not self._login_time or time.time() - self._login_time > ( self.session_timeout / 2 ): return True else: return False @property def cert(self) -> list: """ The betfair certificates, by default it looks for the certificates in /certs/. :return: Path of cert files :rtype: str """ if self.cert_files is not None: return self.cert_files certs = self.certs or "/certs/" ssl_path = os.path.join(os.pardir, certs) try: cert_path = os.listdir(ssl_path) except FileNotFoundError as e: raise CertsError(str(e)) cert = None key = None for file in cert_path: ext = os.path.splitext(file)[-1] if ext in [".crt", ".cert"]: cert = os.path.join(ssl_path, file) elif ext == ".key": key = os.path.join(ssl_path, file) if cert is None or key is None: raise CertsError("Certificates not found in directory: '%s'" % ssl_path) return [cert, key] @property def login_headers(self) -> dict: return { "Accept": "application/json", "X-Application": self.app_key, "content-type": "application/x-www-form-urlencoded", "User-Agent": USER_AGENT, } @property def keep_alive_headers(self) -> dict: return { "Accept": "application/json", "X-Application": self.app_key, "X-Authentication": self.session_token, "content-type": "application/x-www-form-urlencoded", "User-Agent": USER_AGENT, } @property def request_headers(self) -> dict: return { "X-Application": self.app_key, "X-Authentication": self.session_token, "content-type": "application/json", "Accept-Encoding": "gzip, deflate", "Connection": "keep-alive", "User-Agent": USER_AGENT, }
Python
0.000016
@@ -5754,16 +5754,33 @@ tsError( +%0A %22Certifi @@ -5817,20 +5817,91 @@ '%25s' -%22 %25 ssl_path + (make sure .crt and .key file is present)%22%0A %25 ssl_path%0A )%0A
a5da284b70e3e04a919679475e9cf2e276430077
Fix "--attribution" option (doesn't need an argument)
renderchan/manager.py
renderchan/manager.py
__author__ = 'Konstantin Dmitriev' from gettext import gettext as _ from optparse import OptionParser import os.path from renderchan.core import RenderChan from renderchan.core import Attribution from renderchan.file import RenderChanFile from renderchan.project import RenderChanProject def process_args(): parser = OptionParser( usage=_(""" %prog """)) # The --freeze and --unfreeze options are temporary disabled, because this function should behave differently. #parser.add_option("--freeze", dest="freezeList", # action="append", # help=_("Freeze path.")) #parser.add_option("--unfreeze", dest="unfreezeList", # action="append", # help=_("Un-freeze path.")) parser.add_option("--lang", dest="setLanguage", action="store", nargs=1, help=_("Switch project language.")) parser.add_option("--attribution", dest="getAttribution", action="store", nargs=1, help=_("Get attribution information from file.")) options, args = parser.parse_args() return options, args def main(argv): options, args = process_args() # Parse frozen parameters # The --freeze and --unfreeze options are temporary disabled, because this function should behave differently. #if options.freezeList or options.unfreezeList: # renderchan = RenderChan() # if not options.freezeList: # options.freezeList=[] # if not options.unfreezeList: # options.unfreezeList=[] # frozenListChanged=False # for filename in options.freezeList: # filename=os.path.abspath(filename) # if not filename in options.unfreezeList: # taskfile = RenderChanFile(filename, renderchan.modules, renderchan.projects) # taskfile.setFrozen(True) # frozenListChanged=True # for filename in options.unfreezeList: # filename=os.path.abspath(filename) # if not filename in options.freezeList: # taskfile = RenderChanFile(filename, renderchan.modules, renderchan.projects) # taskfile.setFrozen(False) # frozenListChanged=True # if frozenListChanged: # taskfile.project.saveFrozenPaths() if options.setLanguage: project = RenderChanProject(os.getcwd()) project.switchLanguage(options.setLanguage) if options.getAttribution: filename=os.path.abspath(options.getAttribution) info = Attribution(filename) info.output()
Python
0.001461
@@ -967,34 +967,57 @@ ction=%22store -%22, nargs=1 +_true%22,%0A default=False ,%0A
629d8a699e0ab944d4775bd6a31709546d4ca839
add doc string
contacts/models/misc.py
contacts/models/misc.py
#!/usr/bin/python #Django Imports from django.db import models from django.conf import settings from django.contrib.auth.models import User from django.core.exceptions import ObjectDoesNotExist from jsonfield import JSONField #Local Imports import transports from utils.models import TimeStampedModel,BaseQuerySet class Connection(models.Model): class Meta: app_label = 'contacts' objects = BaseQuerySet.as_manager() identity = models.CharField(max_length=25,primary_key=True) contact = models.ForeignKey(settings.MESSAGING_CONTACT,blank=True,null=True) description = models.CharField(max_length=30,blank=True,null=True,help_text='Description of phone numbers relationship to contact') is_primary = models.BooleanField(default=False,verbose_name='Primary') def __unicode__(self): return "{} ({})".format(self.contact.study_id if self.contact else '',self.identity) def send_custom(self,text,translated_text='',languages='',**kwargs): return self.send_message(text,translation_status='cust',translated_text=translated_text,languages=languages,is_system=False,**kwargs) def send_message(self,text,**kwargs): # Send message over system transport try: msg_id, msg_success, external_data = transports.send(self.identity,text) except transports.TransportError as e: msg_id = "" msg_success = False external_data = {"error":str(e)} # Create new message new_message = self.message_set.create( text=text, connection=self, external_id=msg_id, external_success=msg_success, external_status="Sent" if msg_success else external_data.get("status","Failed"), external_data=external_data, **kwargs) return new_message class PractitionerQuerySet(BaseQuerySet): def for_participant(self,participant): return self.filter(facility=participant.facility).exclude(user__first_name='').select_related('user').first() class Practitioner(models.Model): ''' User profile for nurse practitioners to link a User profile to a Facility ''' class Meta: app_label = 'contacts' objects = PractitionerQuerySet.as_manager() user = models.OneToOneField(User) facility = models.CharField(max_length=15,choices=settings.FACILITY_CHOICES) password_changed = models.BooleanField(default=False) @property def username(self): return self.user.username def __str__(self): return '{0}'.format(self.user.username) def __repr__(self): return '<{0!s}> <{1}>'.format(self.facility,self.user.username) class EventLog(TimeStampedModel): class Meta: app_label = 'contacts' objects = BaseQuerySet.as_manager() user = models.ForeignKey(User) event = models.CharField(max_length=25,help_text="Event Name") data = JSONField()
Python
0.000002
@@ -2701,16 +2701,17 @@ rname)%0A%0A +%0A class Ev @@ -2731,24 +2731,323 @@ ampedModel): +%0A %22%22%22%0A The basic idea behind this model is to keep track of which staff accounts take which actions.%0A%0A These are currently created in the %22visit seen%22 and %22attended DRF%22 end points, however%0A there is not currently any logic that accesses / uses the data anywhere in the codebase.%0A %22%22%22 %0A%0A class
6535755cfdc914efc5e1efc6a89ed9dca7c78b87
Correct docstrings of result_suite/sample.py
checker/result_suite/sample.py
checker/result_suite/sample.py
from checker.base import BakeryTestCase as TestCase class SampleTest(TestCase): target = 'result' path = '.' def setUp(self): # read ttf # self.font = fontforge.open(self.path) pass def test_ok(self): """ This test failed """ self.assertTrue(True) def test_failure(self): """ This test failed """ self.assertTrue(False) def test_error(self): """ Unexpected error """ 1 / 0 self.assertTrue(False)
Python
0.000002
@@ -252,38 +252,40 @@ %22%22%22 This test -fail +succe ed +s %22%22%22%0A sel @@ -358,18 +358,17 @@ est fail -ed +s %22%22%22%0A
1c88a828ee2df37148b9744500f700d20b512095
Fix for issue #1; devnull
dbbackup/commander.py
dbbackup/commander.py
""" Process the Backup or Restore commands. """ import copy, re, shlex, sys from datetime import datetime from django.conf import settings from django.core.management.base import CommandError from subprocess import Popen READ_FILE = '<READ_FILE>' WRITE_FILE = '<WRITE_FILE>' DATE_FORMAT = getattr(settings, 'DBBACKUP_DATE_FORMAT', '%Y-%m-%d-%H%M%S') SERVER_NAME = getattr(settings, 'DBBACKUP_SERVER_NAME', '') FILENAME_TEMPLATE = getattr(settings, 'DBBACKUP_FILENAME_TEMPLATE', '{databasename}-{servername}-{datetime}.{extension}') ################################## # MySQL Settings ################################## class MYSQL_SETTINGS: EXTENSION = getattr(settings, 'DBBACKUP_MYSQL_EXTENSION', 'mysql') BACKUP_COMMANDS = getattr(settings, 'DBBACKUP_MYSQL_BACKUP_COMMANDS', [ shlex.split('mysqldump -u{username} -p{password} {databasename} >'), ]) RESTORE_COMMANDS = getattr(settings, 'DBBACKUP_MYSQL_RESTORE_COMMANDS', [ shlex.split('mysql -u{username} -p{password} {databasename} <'), ]) ################################## # PostgreSQL Settings ################################## class POSTGRESQL_SETTINGS: EXTENSION = getattr(settings, 'DBBACKUP_POSTGRESQL_EXTENSION', 'psql') BACKUP_COMMANDS = getattr(settings, 'DBBACKUP_POSTGRESQL_BACKUP_COMMANDS', [ shlex.split('pg_dump {databasename} >'), ]) RESTORE_COMMANDS = getattr(settings, 'DBBACKUP_POSTGRESQL_RESTORE_COMMANDS', [ shlex.split('dropdb {databasename}'), shlex.split('createdb {databasename} --owner={username}'), shlex.split('psql -1 {databasename} <'), ]) ################################## # Sqlite Settings ################################## class SQLITE_SETTINGS: EXTENSION = getattr(settings, 'DBBACKUP_SQLITE_EXTENSION', 'sqlite') BACKUP_COMMANDS = getattr(settings, 'DBBACKUP_SQLITE_BACKUP_COMMANDS', [ [READ_FILE, '{databasename}'], ]) RESTORE_COMMANDS = getattr(settings, 'DBBACKUP_SQLITE_RESTORE_COMMANDS', [ [WRITE_FILE, '{databasename}'], ]) ################################## # Commander Class ################################## class Commander: """ Process the Backup or Restore commands. """ def __init__(self, database): self.database = database self.engine = self.database['ENGINE'].split('.')[-1] self.settings = self._get_settings() def _get_settings(self): """ Returns the proper settings dictionary. """ if self.engine == 'mysql': return MYSQL_SETTINGS elif self.engine == 'postgresql_psycopg2': return POSTGRESQL_SETTINGS elif self.engine == 'sqlite3': return SQLITE_SETTINGS def filename(self, servername=None, wildcard=None): """ Create a new backup filename. """ datestr = wildcard or datetime.now().strftime(DATE_FORMAT) filename = FILENAME_TEMPLATE.replace('{databasename}', self.database['NAME']) filename = filename.replace('{servername}', servername or SERVER_NAME) filename = filename.replace('{datetime}', datestr) filename = filename.replace('{extension}', self.settings.EXTENSION) filename = filename.replace('--', '-') return filename def filename_match(self, servername=None, wildcard='*'): """ Return the prefix for backup filenames. """ return self.filename(servername, wildcard) def filter_filepaths(self, filepaths, servername=None): """ Returns a list of backups file paths from the dropbox entries. """ regex = self.filename_match(servername, '.*?') return filter(lambda path: re.search(regex, path), filepaths) def translate_command(self, command): """ Translate the specified command. """ command = copy.copy(command) for i in range(len(command)): command[i] = command[i].replace('{username}', self.database['USER']) command[i] = command[i].replace('{password}', self.database['PASSWORD']) command[i] = command[i].replace('{databasename}', self.database['NAME']) return command def run_backup_commands(self, stdout): """ Translate and run the backup commands. """ return self.run_commands(self.settings.BACKUP_COMMANDS, stdout=stdout) def run_restore_commands(self, stdin): """ Translate and run the backup commands. """ stdin.seek(0) return self.run_commands(self.settings.RESTORE_COMMANDS, stdin=stdin) def run_commands(self, commands, stdin=None, stdout=None): """ Translate and run the specified commands. """ for command in commands: command = self.translate_command(command) if (command[0] == READ_FILE): self.read_file(command[1], stdout) elif (command[0] == WRITE_FILE): self.write_file(command[1], stdin) else: self.run_command(command, stdin, stdout) def run_command(self, command, stdin=None, stdout=None): """ Run the specified command. """ devnull = open('/dev/null', 'w') pstdin = stdin if command[-1] == '<' else None pstdout = stdout if command[-1] == '>' else devnull command = filter(lambda arg: arg not in ['<', '>'], command) print " Running: %s" % ' '.join(command) process = Popen(command, stdin=pstdin, stdout=pstdout) process.wait() devnull.close() if process.poll(): raise CommandError("Error running: %s" % command) def read_file(self, filepath, stdout): """ Read the specified file to stdout. """ print " Reading: %s" % filepath stdout.write(open(filepath, 'r').read()) def write_file(self, filepath, stdin): """ Write the specified file from stdin. """ print " Writing: %s" % filepath writehandle = open(filepath, 'w') writehandle.write(stdin.read()) writehandle.close()
Python
0
@@ -53,16 +53,20 @@ rt copy, + os, re, shl @@ -71,13 +71,8 @@ hlex -, sys %0Afro @@ -5020,19 +5020,18 @@ pen( -'/ +os. dev -/ null -' , 'w