commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
8356da27e34b269052e57734b424f65a3c636b07
remove testing kludge
build-db.py
build-db.py
# usage: python build-db.py <wod ascii file name> <table name to append to> from wodpy import wod import sys, sqlite3 import util.main as main import util.dbutils as dbutils import numpy as np import qctests.CSIRO_wire_break if len(sys.argv) == 3: conn = sqlite3.connect('iquod.db', isolation_level=None) cur = conn.cursor() # Identify tests testNames = main.importQC('qctests') testNames.sort() # set up our table query = "DROP TABLE IF EXISTS " + sys.argv[2] + ";" cur.execute(query) query = "CREATE TABLE " + sys.argv[2] + """( raw text, truth BLOB, uid integer PRIMARY KEY, year integer, month integer, day integer, time real, lat real, long real, cruise integer, probe integer, training integer, """ for i in range(len(testNames)): query += testNames[i].lower() + ' BLOB' if i<len(testNames)-1: query += ',' else: query += ');' cur.execute(query) def assessProfile(p): 'decide whether this profile is acceptable for QC or not; False = skip this profile' # not interested in standard levels if int(p.primary_header['Profile type']) == 1: return False # no temperature data in profile if p.var_index() is None: return False # temperature data is in profile but all masked out if np.sum(p.t().mask == False) == 0: return False # all depths are less than 10 cm and there are at least two levels (ie not just a surface measurement) if np.sum(p.z() < 0.1) == len(p.z()) and len(p.z()) > 1: return False # no valid originator flag type if int(p.originator_flag_type()) not in range(1,15): return False temp = p.t() tempqc = p.t_level_qc(originator=True) for i in range(len(temp)): # don't worry about levels with masked temperature if temp.mask[i]: continue # if temperature isn't masked: # it had better be a float if not isinstance(temp.data[i], float): return False # needs to have a valid QC decision: if tempqc.mask[i]: return False if not isinstance(tempqc.data[i], int): return False if not tempqc.data[i] > 0: return False return True def encodeTruth(p): 'encode a per-level true qc array, with levels marked with 99 temperature set to qc code 99' truth = p.t_level_qc(originator=True) for i,temp in enumerate(p.t()): if temp > 99 and temp < 100: truth[i] = 99 return truth # populate table from wod-ascii data fid = open(sys.argv[1]) uids = [] good = 0 bad = 0 #while True: for qq in range(100): # extract profile as wodpy object and raw text start = fid.tell() profile = wod.WodProfile(fid) end = fid.tell() fid.seek(start) raw = fid.read(end-start) fid.seek(end) # set up dictionary for populating query string p = profile.npdict() p['raw'] = "'" + raw + "'" # check for duplicate profiles in raw data if p['uid'] in uids: if profile.is_last_profile_in_file(fid) == True: break else: continue uids.append(p['uid']) # skip pathological profiles isgood = assessProfile(profile) if not isgood and profile.is_last_profile_in_file(fid) == True: break elif not isgood: continue # encode temperature error codes into truth array truth = encodeTruth(profile) p['truth'] = main.pack_array(truth) # keep tabs on how many good and how many bad profiles have been added to db # nowire == index of first wire break level wireqc = qctests.CSIRO_wire_break.test(profile, {}) try: nowire = list(wireqc).index(True) except: nowire = len(truth) # flag only counts if its before the wire break: flagged = dbutils.summarize_truth(truth[0:nowire]) if flagged: bad += 1 else: good += 1 query = "INSERT INTO " + sys.argv[2] + " (raw, truth, uid, year, month, day, time, lat, long, cruise, probe) values (?,?,?,?,?,?,?,?,?,?,?);" values = (p['raw'], p['truth'], p['uid'], p['year'], p['month'], p['day'], p['time'], p['latitude'], p['longitude'], p['cruise'], p['probe_type']) main.dbinteract(query, values) if profile.is_last_profile_in_file(fid) == True: break conn.commit() print 'number of clean profiles written:', good print 'number of flagged profiles written:', bad else: print 'Usage: python build-db.py inputdatafile databasetable'
Python
0.000001
@@ -3041,17 +3041,16 @@ = 0%0A -# while Tr @@ -3057,34 +3057,8 @@ ue:%0A - for qq in range(100):%0A
100fa2f08656009e3fa2ee4fd66a85c5aca35f9d
Comment typo fix
sacad/rate_watcher.py
sacad/rate_watcher.py
""" This module provides a class with a context manager to help avoid overloading web servers. """ import collections import logging import os import sqlite3 import threading import time import urllib.parse import lockfile MIN_WAIT_TIME_S = 0.01 SUSPICIOUS_LOCK_AGE_S = 120 class WaitNeeded(Exception): """ Exception raised when access can not be granted without waiting. """ def __init__(self, wait_time_s): self.wait_s = wait_time_s class AccessRateWatcher: """ Access rate limiter, supporting concurrent access by threads and/or processes. """ thread_locks = collections.defaultdict(threading.Lock) thread_dict_lock = threading.Lock() def __init__(self, db_filepath, url, min_delay_between_accesses): self.domain = urllib.parse.urlsplit(url).netloc self.min_delay_between_accesses = min_delay_between_accesses self.connection = sqlite3.connect(db_filepath) with self.connection: self.connection.executescript("""PRAGMA journal_mode = MEMORY; PRAGMA synchronous = OFF; CREATE TABLE IF NOT EXISTS access_timestamp (domain TEXT PRIMARY KEY, timestamp FLOAT NOT NULL);""") self.lock_dir = os.path.join(os.path.dirname(db_filepath), "plocks") os.makedirs(self.lock_dir, exist_ok=True) def __enter__(self): self._raiseOrLock() self._access() def __exit__(self, exc_type, exc_value, traceback): self._releaseLock() def _access(self): """ Notify the watcher that the server is accessed. """ with self.connection: self.connection.execute("""INSERT OR REPLACE INTO access_timestamp (domain, timestamp) VALUES (?, ?)""", (self.domain, time.time(),)) def _raiseOrLock(self): """ Get lock or raise WaitNeeded exception. """ for try_lock in (True, False): with self.connection: last_access_time = self.connection.execute("""SELECT timestamp FROM access_timestamp WHERE domain = ?;""", (self.domain,)).fetchone() if last_access_time is not None: last_access_time = last_access_time[0] now = time.time() time_since_last_access = now - last_access_time if time_since_last_access < self.min_delay_between_accesses: time_to_wait = self.min_delay_between_accesses - time_since_last_access raise WaitNeeded(time_to_wait) if try_lock: locked = self._getLock() if locked: break else: raise WaitNeeded(MIN_WAIT_TIME_S) def _getLock(self): with __class__.thread_dict_lock: tlock = __class__.thread_locks[self.domain] if tlock.acquire(blocking=False): plock = lockfile.FileLock(os.path.join(self.lock_dir, self.domain)) try: plock.acquire(timeout=0) except (lockfile.LockTimeout, lockfile.AlreadyLocked): # detect and break locks of dead processes lock_age = time.time() - os.path.getmtime(plock.lock_file) if lock_age > SUSPICIOUS_LOCK_AGE_S: logging.getLogger().warning("Breaking suspicious lock '%s' created %.2f seconds ago" % (plock.lock_file, lock_age)) plock.break_lock() tlock.release() except: tlock.release() raise else: return True else: # lock not availale: wait for it, release it immediately and return as if locking fails # we do this to wait for the right amount of time but still re-read the cache with tlock: pass return False def _releaseLock(self): lockfile.FileLock(os.path.join(self.lock_dir, self.domain)).release() __class__.thread_locks[self.domain].release()
Python
0
@@ -2744,16 +2744,55 @@ break%0A + # loop again to find wait time%0A el @@ -3721,16 +3721,17 @@ t availa +b le: wait
e63c463a3200d9843bc5be6c1c3ee36fb267cbde
Update hyper space setting.
matchzoo/engine/param_table.py
matchzoo/engine/param_table.py
"""Parameters table class.""" import typing from matchzoo.engine import Param class ParamTable(object): """ Parameter table class. Example: >>> params = ParamTable() >>> params.add(Param('ham', 'Parma Ham')) >>> params.add(Param('egg', 'Over Easy')) >>> params['ham'] 'Parma Ham' >>> params['egg'] 'Over Easy' >>> print(params) ham Parma Ham egg Over Easy >>> params.add(Param('egg', 'Sunny side Up')) Traceback (most recent call last): ... ValueError: Parameter named egg already exists. To re-assign parameter egg value, use `params["egg"] = value` instead. """ def __init__(self): """Parameter table constrctor.""" self._params = {} def add(self, param: Param): """:param param: parameter to add.""" if not isinstance(param, Param): raise TypeError("Only accepts a Param instance.") if param.name in self._params: msg = f"Parameter named {param.name} already exists.\n" \ f"To re-assign parameter {param.name} value, " \ f"use `params[\"{param.name}\"] = value` instead." raise ValueError(msg) self._params[param.name] = param def get(self, key) -> Param: """:return: The parameter in the table named `key`.""" return self._params[key] def set(self, key, param: Param): """Set `key` to parameter `param`.""" if not isinstance(param, Param): raise ValueError self._params[key] = param @property def hyper_space(self) -> dict: """:return: Hyper space of the table, a valid `hyperopt` graph.""" return { param.name: param.hyper_space for param in self._params.values() if param.hyper_space is not None } def __getitem__(self, key: str) -> typing.Any: """:return: The value of the parameter in the table named `key`.""" return self._params[key].value def __setitem__(self, key: str, value: typing.Any): """ Set the value of the parameter named `key`. :param key: Name of the parameter. :param value: New value of the parameter to set. """ self._params[key].value = value def __str__(self): """:return: Pretty formatted parameter table.""" return '\n'.join(param.name.ljust(30) + str(param.value) for param in self._params.values()) def __iter__(self) -> typing.Iterator: """:return: A iterator that iterates over all parameter instances.""" yield from self._params.values() def completed(self) -> bool: """ :return: `True` if all params are filled, `False` otherwise. Example: >>> import matchzoo >>> model = matchzoo.models.NaiveModel() >>> model.params.completed() False >>> model.guess_and_fill_missing_params(verbose=0) >>> model.params.completed() True """ return all(param for param in self) def keys(self) -> typing.KeysView: """:return: Parameter table keys.""" return self._params.keys() def __contains__(self, item): """:return: `True` if parameter in parameters.""" return item in self._params
Python
0
@@ -72,16 +72,30 @@ rt Param +, hyper_spaces %0A%0A%0Aclass @@ -1821,16 +1821,50 @@ -return %7B +full_space = %7B%7D%0A for param in self: %0A @@ -1876,19 +1876,71 @@ +if param. -name: +hyper_space is not None:%0A param_space = par @@ -1970,97 +1970,204 @@ + -for param in self._params.values()%0A if param.hyper_space is not None%0A %7D + if isinstance(param_space, hyper_spaces.HyperoptProxy):%0A param_space = param_space(param.name)%0A full_space%5Bparam.name%5D = param_space%0A return full_space %0A%0A
4a6846b969746b79f1acd0e0615232d97ed54b1f
replace import-time cluster dependencies (#1544)
frameworks/template/tests/test_sanity.py
frameworks/template/tests/test_sanity.py
import pytest import sdk_install import sdk_utils from tests import config FOLDERED_SERVICE_NAME = sdk_utils.get_foldered_name(config.SERVICE_NAME) @pytest.fixture(scope='module', autouse=True) def configure_package(configure_security): try: sdk_install.uninstall(config.PACKAGE_NAME, FOLDERED_SERVICE_NAME) # note: this package isn't released to universe, so there's nothing to test_upgrade() with sdk_install.install( config.PACKAGE_NAME, FOLDERED_SERVICE_NAME, config.DEFAULT_TASK_COUNT, additional_options={"service": { "name": FOLDERED_SERVICE_NAME } }) yield # let the test session execute finally: sdk_install.uninstall(config.PACKAGE_NAME, FOLDERED_SERVICE_NAME) @pytest.mark.sanity @pytest.mark.smoke def test_install(): pass # package installed and appeared healthy!
Python
0
@@ -73,81 +73,8 @@ ig%0A%0A -FOLDERED_SERVICE_NAME = sdk_utils.get_foldered_name(config.SERVICE_NAME)%0A %0A@py @@ -215,33 +215,59 @@ CKAGE_NAME, -FOLDERED_ +sdk_utils.get_foldered_name(config. SERVICE_NAME @@ -267,16 +267,17 @@ CE_NAME) +) %0A%0A @@ -443,25 +443,51 @@ -FOLDERED_ +sdk_utils.get_foldered_name(config. SERVICE_ @@ -486,24 +486,25 @@ SERVICE_NAME +) ,%0A @@ -585,25 +585,51 @@ %22name%22: -FOLDERED_ +sdk_utils.get_foldered_name(config. SERVICE_ @@ -632,16 +632,17 @@ ICE_NAME +) %7D %7D)%0A%0A @@ -753,17 +753,43 @@ ME, -FOLDERED_ +sdk_utils.get_foldered_name(config. SERV @@ -797,16 +797,17 @@ CE_NAME) +) %0A%0A%0A@pyte
0eef0efbe716feb3dc02fb45a756496d5517966c
Update docs.
matchzoo/models/naive_model.py
matchzoo/models/naive_model.py
"""Naive model with a simplest structure for testing purposes.""" import keras from matchzoo import engine class NaiveModel(engine.BaseModel): """Naive model with a simplest structure for testing purposes.""" def build(self): """Build.""" x_in = self._make_inputs() x = keras.layers.concatenate(x_in) x_out = self._make_output_layer()(x) self._backend = keras.models.Model(inputs=x_in, outputs=x_out)
Python
0
@@ -147,16 +147,21 @@ %0A %22%22%22 +%0A Naive mo @@ -207,24 +207,159 @@ ng purposes. +%0A%0A Bare minimum functioning model. The best choice to get things rolling.%0A The worst choice to fit and evaluate performance.%0A %22%22%22%0A%0A def
0859bb58a4fa24f5e278e95da491a2b4409f0b2b
Tag 0.5.3
koordinates/__init__.py
koordinates/__init__.py
# -*- coding: utf-8 -*- """ Koordinates Python API Client Library :copyright: (c) Koordinates Limited. :license: BSD, see LICENSE for more details. """ __version__ = "0.5.0" from .exceptions import ( KoordinatesException, ClientError, ClientValidationError, InvalidAPIVersion, ServerError, BadRequest, AuthenticationError, Forbidden, NotFound, NotAllowed, Conflict, RateLimitExceeded, InternalServerError, ServiceUnvailable, ) from .client import Client from .layers import Layer, Table from .licenses import License from .metadata import Metadata from .publishing import Publish from .sets import Set from .sources import Source, UploadSource from .users import Group, User from .permissions import Permission from .exports import Export, CropLayer, DownloadError
Python
0.000001
@@ -166,17 +166,17 @@ = %220.5. -0 +3 %22%0A%0Afrom
b6554b00fdb0387a27671eeb39589dc7e7109f6e
Add collecter function
app/main.py
app/main.py
from flask import Flask app = Flask(__name__) app.config.update( DEBUG=True ) @app.route("/") def index(): return "Hello python" if __name__ == "__main__": app.run()
Python
0.000001
@@ -16,16 +16,100 @@ rt Flask +, request, jsonify%0Afrom urllib.request import urlopen%0Afrom bs4 import BeautifulSoup%0A %0Aapp = F @@ -197,68 +197,585 @@ -return %22Hello python%22%0A%0Aif __name__ == %22__main__%22:%0A app.run() +url = request.args.get('url', '')%0A res = collecter(url)%0A return jsonify(res)%0A%0Aif __name__ == %22__main__%22:%0A app.run()%0A%0A%0Adef collecter(url):%0A %22%22%22%0A %E7%94%BB%E5%83%8F%E3%81%AE%E3%82%B9%E3%82%AF%E3%83%AC%E3%82%A4%E3%83%94%E3%83%B3%E3%82%B0%E3%82%92%E8%A1%8C%E3%81%84%E3%80%81%E7%B5%90%E6%9E%9C%E3%82%92json%E3%81%A7%E8%BF%94%E3%81%99%0A @param url %E3%82%B9%E3%82%AF%E3%83%AC%E3%82%A4%E3%83%94%E3%83%B3%E3%82%B0%E3%81%97%E3%81%9F%E3%81%84URL%0A @return %E3%82%B9%E3%82%AF%E3%83%AC%E3%82%A4%E3%83%94%E3%83%B3%E3%82%B0%E7%B5%90%E6%9E%9C%E3%81%AEjson%0A %22%22%22%0A if(url == %22%22):%0A return%0A %0A count = 0%0A pic = %7B%7D%0A%0A html = urlopen(url)%0A soup = BeautifulSoup(html, %22html.parser%22)%0A for a in soup.find_all(%22a%22):%0A text = str(a.string)%0A if text.endswith(%22jpg%22) or text.endswith(%22png%22):%0A count += 1%0A pic.update(%7Bcount: text%7D)%0A%0A return pic %0A
7b58f59ec288dd055cf931dd47c4e8e59bb9ad1d
update atx-agent version
uiautomator2/version.py
uiautomator2/version.py
# coding: utf-8 # __apk_version__ = '1.1.5' # 1.1.5 waitForExists use UiObject2 method first then fallback to UiObject.waitForExists # 1.1.4 add ADB_EDITOR_CODE broadcast support, fix bug (toast捕获导致app闪退) # 1.1.3 use thread to make watchers.watched faster, try to fix input method type multi # 1.1.2 fix count error when have child && sync watched, to prevent watchers.remove error # 1.1.1 support toast capture # 1.1.0 update uiautomator-v18:2.1.2 -> uiautomator-v18:2.1.3 (This version fixed setWaitIdleTimeout not working bug) # 1.0.14 catch NullException, add gps mock support # 1.0.13 whatsinput suppoort, but not very well # 1.0.12 add toast support # 1.0.11 add auto install support # 1.0.10 fix service not started bug # 1.0.9 fix apk version code and version name # ERR: 1.0.8 bad version number. show ip on notification # ERR: 1.0.7 bad version number. new input method, some bug fix __atx_agent_version__ = '0.4.5' # 0.4.5 add http log, change atx-agent -d into atx-agent server -d # 0.4.4 this version is gone # 0.4.3 ignore sigint to prevent atx-agent quit # 0.4.2 hot fix, close upgrade-self # 0.4.1 fix app-download time.Timer panic error, use safe-time.Timer instead. # 0.4.0 add go-daemon lib. use safe-time.Timer to prevent panic error. this will make it run longer # 0.3.6 support upload zip and unzip, fix minicap rotation error when atx-agent is killed -9 # 0.3.5 hot fix for session # 0.3.4 fix session() sometimes can not get mainActivity error # 0.3.3 /shell support timeout # 0.3.2 fix dns resolve error when network changes # 0.3.0 use github.com/codeskyblue/heartbeat library instead of websocket, add /whatsinput # 0.2.1 support occupy /minicap connection # 0.2.0 add session support # 0.1.8 fix screenshot always the same image. (BUG in 0.1.7), add /shell/stream add timeout for /shell # 0.1.7 fix dns resolve error in /install # 0.1.6 change download logic. auto fix orientation # 0.1.5 add singlefight for minicap and minitouch, proxy dial-timeout change 30 to 10 # 0.1.4 phone remote control # 0.1.2 /download support # 0.1.1 minicap buildin
Python
0
@@ -918,18 +918,69 @@ = '0.4. -5' +6'%0A# 0.4.6 fix download dns resolve error (sometimes) %0A# 0.4.5
0c9accce7b3df8889ecf57b6df89a36628cb908c
add timeout for running scheduler
sbin/run_scheduler.py
sbin/run_scheduler.py
import subprocess import tempfile import time, os import re import sys # cd ~/.config/sublime-text-3/Packages/UnitTesting # python sbin/run_scheduler.py PACKAGE # script directory __dir__ = os.path.dirname(os.path.abspath(__file__)) version = int(subprocess.check_output(["subl","--version"]).decode('utf8').strip()[-4]) # sublime package directory if sys.platform == "darwin": sublime_package = os.path.expanduser("~/Library/Application Support/Sublime Text %d/Packages" % version) elif "linux" in sys.platform: sublime_package = os.path.expanduser("~/.config/sublime-text-%d/Packages" % version) sys.path.append(os.path.join(sublime_package, "UnitTesting")) from jsonio import * package = sys.argv[1] if len(sys.argv)>1 else "UnitTesting" outdir = os.path.join(sublime_package, "User", "UnitTesting", "tests_output") outfile = os.path.join(outdir, package) # remove output if os.path.exists(outfile): os.unlink(outfile) # add schedule jpath = os.path.join(sublime_package, "User", "UnitTesting", "schedule.json") j = jsonio(jpath) schedule = j.load() if not any([s['package']==package for s in schedule]): schedule.append({'package': package}) j.save(schedule) tasks = subprocess.check_output(['ps', 'xw']).decode('utf8') sublime_is_running = "Sublime" in tasks or "sublime_text" in tasks if sublime_is_running: subprocess.Popen(["subl", "-b", "--command", "unit_testing_run_scheduler"]) else: subprocess.Popen(["subl"]) # wait until the file has something while (not os.path.exists(outfile) or os.stat(outfile).st_size == 0): sys.stdout.write('.') sys.stdout.flush() time.sleep(1) print("\nstart to read output") # todo: use notification instead of polling with open(outfile, 'r') as f: while True: result = f.read() m = re.search("^(OK|FAILED|ERROR)", result, re.MULTILINE) # break when OK or Failed if m: break time.sleep(0.2) f.seek(0) result = f.read() print(result) success = m.group(0)=="OK" if not success: sys.exit(1)
Python
0.000001
@@ -1483,16 +1483,37 @@ mething%0A +startt = time.time()%0A while (n @@ -1623,16 +1623,125 @@ flush()%0A + if time.time()-startt %3E 60:%0A print(%22Timeout: Sublime Text is not responding%22)%0A sys.exit(1)%0A time
c9e676b22b6d57e14106df22eeeaa586a7c6d356
Revert r4157.
scipy/linalg/setup.py
scipy/linalg/setup.py
## Automatically adapted for scipy Oct 18, 2005 by #!/usr/bin/env python import os import sys import re from distutils.dep_util import newer_group, newer from glob import glob from os.path import join #------------------- # To skip wrapping single precision atlas/lapack/blas routines, set # the following flag to True: skip_single_routines = 0 # Some OS distributions (e.g. Redhat, Suse) provide a blas library that # is built using incomplete blas sources that come with lapack tar-ball. # In order to use such a library in scipy.linalg, the following flag # must be set to True: using_lapack_blas = 0 #-------------------- def needs_cblas_wrapper(info): """Returns true if needs c wrapper around cblas for calling from fortran.""" import re r_accel = re.compile("Accelerate") r_vec = re.compile("vecLib") res = False try: tmpstr = info['extra_link_args'] for i in tmpstr: if r_accel.search(i) or r_vec.search(i): res = True except KeyError: pass return res def configuration(parent_package='',top_path=None): from numpy.distutils.system_info import get_info, NotFoundError from numpy.distutils.misc_util import Configuration from interface_gen import generate_interface config = Configuration('linalg',parent_package,top_path) lapack_opt = get_info('lapack_opt') if not lapack_opt: raise NotFoundError,'no lapack/blas resources found' atlas_version = ([v[3:-3] for k,v in lapack_opt.get('define_macros',[]) \ if k=='ATLAS_INFO']+[None])[0] if atlas_version: print 'ATLAS version',atlas_version target_dir = '' skip_names = {'clapack':[],'flapack':[],'cblas':[],'fblas':[]} if skip_single_routines: target_dir = 'dbl' skip_names['clapack'].extend(\ 'sgesv sgetrf cgetrf sgetrs cgetrs sgetri cgetri'\ ' sposv cposv spotrf cpotrf spotrs cpotrs spotri cpotri'\ ' slauum clauum strtri ctrtri'.split()) skip_names['flapack'].extend(skip_names['clapack']) skip_names['flapack'].extend(\ 'sgesdd cgesdd sgelss cgelss sgeqrf cgeqrf sgeev cgeev'\ ' sgegv cgegv ssyev cheev slaswp claswp sgees cgees' ' sggev cggev'.split()) skip_names['cblas'].extend('saxpy caxpy'.split()) skip_names['fblas'].extend(skip_names['cblas']) skip_names['fblas'].extend(\ 'srotg crotg srotmg srot csrot srotm sswap cswap sscal cscal'\ ' csscal scopy ccopy sdot cdotu cdotc snrm2 scnrm2 sasum scasum'\ ' isamax icamax sgemv cgemv chemv ssymv strmv ctrmv'\ ' sgemm cgemm'.split()) if using_lapack_blas: target_dir = join(target_dir,'blas') skip_names['fblas'].extend(\ 'drotmg srotmg drotm srotm'.split()) if atlas_version=='3.2.1_pre3.3.6': target_dir = join(target_dir,'atlas321') skip_names['clapack'].extend(\ 'sgetri dgetri cgetri zgetri spotri dpotri cpotri zpotri'\ ' slauum dlauum clauum zlauum strtri dtrtri ctrtri ztrtri'.split()) elif atlas_version>'3.4.0' and atlas_version<='3.5.12': skip_names['clapack'].extend('cpotrf zpotrf'.split()) def generate_pyf(extension, build_dir): name = extension.name.split('.')[-1] target = join(build_dir,target_dir,name+'.pyf') if name[0]=='c' and atlas_version is None and newer(__file__,target): f = open(target,'w') f.write('python module '+name+'\n') f.write('usercode void empty_module(void) {}\n') f.write('interface\n') f.write('subroutine empty_module()\n') f.write('intent(c) empty_module\n') f.write('end subroutine empty_module\n') f.write('end interface\nend python module'+name+'\n') f.close() return target if newer_group(extension.depends,target): generate_interface(name, extension.depends[0], target, skip_names[name]) return target depends = ['generic_fblas.pyf', 'generic_fblas1.pyf', 'generic_fblas2.pyf', 'generic_fblas3.pyf', 'interface_gen.py', join('src','fblaswrap_veclib_c.c'), join('src','fblaswrap.f'), ] # fblas: if needs_cblas_wrapper(lapack_opt): config.add_extension('fblas', sources = [generate_pyf, join('src','fblaswrap_veclib_c.c')], depends = depends, extra_info = lapack_opt ) else: config.add_extension('fblas', sources = [generate_pyf, join('src','fblaswrap.f')], depends = depends, extra_info = lapack_opt ) # cblas: config.add_extension('cblas', sources = [generate_pyf], depends = ['generic_cblas.pyf', 'generic_cblas1.pyf', 'interface_gen.py'], extra_info = lapack_opt ) # flapack: config.add_extension('flapack', sources = [generate_pyf], depends = ['generic_flapack.pyf', 'flapack_user_routines.pyf', 'interface_gen.py'], extra_info = lapack_opt ) # clapack: config.add_extension('clapack', sources = [generate_pyf], depends = ['generic_clapack.pyf', 'interface_gen.py'], extra_info = lapack_opt ) # _flinalg: config.add_extension('_flinalg', sources = [join('src','det.f'),join('src','lu.f')], extra_info = lapack_opt ) # calc_lwork: config.add_extension('calc_lwork', [join('src','calc_lwork.f')], extra_info = lapack_opt ) # atlas_version: config.add_extension('atlas_version', ['atlas_version.c'], extra_info = lapack_opt ) config.add_data_dir('tests') return config if __name__ == '__main__': from numpy.distutils.core import setup from linalg_version import linalg_version setup(version=linalg_version, **configuration(top_path='').todict())
Python
0.000061
@@ -1864,16 +1864,22 @@ 'sgesv +cgesv sgetrf c
42f4ed206a9c79799b9bb0b13b829c8cf9c979e4
write to file
scraper/parse_dump.py
scraper/parse_dump.py
#!/usr/bin/python # Simple script to parse the devpost dump and place results in a json import os import json from multiprocessing import Pool from bs4 import BeautifulSoup OUTPUT_FNAME="devpostdump.json" DUMP_DIR = "output/" projects = [os.path.join(DUMP_DIR, f) for f in os.listdir(DUMP_DIR)] # projects = projects[:100] projects_json = [] def process_project(inp): i, project = inp print "%d %s" % (i, project) proj_html = BeautifulSoup(open(project, 'r').read(), 'html.parser') proj_data = {} proj_data['name'] = proj_html.find(id='app-title').string proj_data['id'] = project[len(DUMP_DIR):] # Number of likes and comments num_likes = proj_html.find('span', { 'class' : 'ss-heart' }).next_sibling.next_sibling proj_data['num_likes'] = int(num_likes.string) if num_likes is not None else 0 num_comments = proj_html.find('span', { 'class' : 'ss-quote' }).next_sibling.next_sibling proj_data['num_comments'] = int(num_comments.string) if num_comments is not None else 0 # Length of the description proj_data['description_length'] = len(proj_html.find(id="app-details").get_text()) # Number of contributors proj_data['num_contributors'] = len(proj_html.find_all('li', { 'class' : 'software-team-member' })) # Tags proj_data['tags'] = sorted([tag.string for tag in proj_html.find_all('span', { 'class' : 'cp-tag' })]) # Hackathon details hackathon_deets = proj_html.find('div', { 'class' : 'software-list-content' }) if hackathon_deets: proj_data['hackathon_name'] = hackathon_deets.find('a').string proj_data['num_prizes'] = len(hackathon_deets.find_all('span', { 'class' : 'winner' })) return proj_data if __name__ == '__main__': num_cores = multiprocessing.cpu_count() p = Pool(num_cores) json = p.map(process_project, enumerate(projects[:1000]))
Python
0.000001
@@ -1820,19 +1820,16 @@ s)%0A j -son = p.map @@ -1873,10 +1873,111 @@ :1000%5D)) +%0A print %22Creating json file%22%0A with open(OUTPUT_FNAME, %22w+%22) as f:%0A f.write(json.dump(j)) %0A%0A
8dc3cf60a57d6cce644c619385985c781c60c638
use the begin of the line
commands.py
commands.py
import sublime import sublime_plugin import os from .textgetter import TextGetter, \ RTextGetter, \ PythonTextGetter, \ JuliaTextGetter, \ MarkDownTextGetter from .textsender import TextSender, PythonTextSender class SendTextPlusCommand(sublime_plugin.WindowCommand): def run(self, cmd=None, prog=None): self.window.active_view().run_command( "send_text_plus_internal", {"cmd": cmd, "prog": prog} ) class SendTextPlusInternalCommand(sublime_plugin.TextCommand): @staticmethod def escape_dquote(cmd): cmd = cmd.replace('\\', '\\\\') cmd = cmd.replace('"', '\\"') return cmd def resolve(self, cmd): view = self.view file = view.file_name() if file: file_name = os.path.basename(file) file_path = os.path.dirname(file) file_base_name, file_ext = os.path.splitext(file_name) cmd = cmd.replace("$file_path", self.escape_dquote(file_path)) cmd = cmd.replace("$file_name", self.escape_dquote(file_name)) cmd = cmd.replace("$file_base_name", self.escape_dquote(file_base_name)) cmd = cmd.replace("$file_extension", file_ext) cmd = cmd.replace("$file", self.escape_dquote(file)) pd = view.window().project_data() if pd and "folders" in pd and len(pd["folders"]) > 0: project_path = pd["folders"][0].get("path") if project_path: cmd = cmd.replace("$project_path", self.escape_dquote(project_path)) # resolve $project_path again if file and file_path: cmd = cmd.replace("$project_path", self.escape_dquote(file_path)) return cmd def run(self, edit, cmd=None, prog=None): view = self.view pt = view.sel()[0].begin() if len(view.sel()) > 0 else 0 if cmd: cmd = self.resolve(cmd) else: if view.score_selector(pt, "source.r"): getter = RTextGetter(view) elif view.score_selector(pt, "source.python"): getter = PythonTextGetter(view) elif view.score_selector(pt, "source.julia"): getter = JuliaTextGetter(view) elif view.score_selector(pt, "text.html.markdown"): getter = MarkDownTextGetter(view) else: getter = TextGetter(view) cmd = getter.get_text() if view.score_selector(pt, "source.python"): sender = PythonTextSender(view, prog) else: sender = TextSender(view, prog) sender.send_text(cmd) class SendTextPlusChooseProgramCommand(sublime_plugin.WindowCommand): def show_quick_panel(self, options, done): sublime.set_timeout(lambda: self.window.show_quick_panel(options, done), 10) def run(self): plat = sublime.platform() if plat == 'osx': self.app_list = ["[Defaults]", "Terminal", "iTerm", "R", "RStudio", "Chrome-RStudio", "Chrome-Jupyter", "Safari-RStudio", "Safari-Jupyter", "tmux", "screen", "SublimeREPL"] elif plat == "windows": self.app_list = ["[Defaults]", "Cmder", "Cygwin", "R32", "R64", "RStudio", "SublimeREPL"] elif plat == "linux": self.app_list = ["[Defaults]", "tmux", "screen", "SublimeREPL"] else: sublime.error_message("Platform not supported!") self.show_quick_panel(self.app_list, self.on_done) def on_done(self, action): if action == -1: return settings = sublime.load_settings('SendText+.sublime-settings') settings.set("prog", self.app_list[action] if action > 0 else None) sublime.save_settings('SendText+.sublime-settings') class SendTextPlusListener(sublime_plugin.EventListener): def on_query_context(self, view, key, operator, operand, match_all): if view.is_scratch() or view.settings().get('is_widget'): return if key == 'send_text_plus_keybinds': settings = sublime.load_settings('SendText+.sublime-settings') return settings.get("send_text_plus_keybinds", True)
Python
0.006477
@@ -1814,16 +1814,26 @@ pt = +view.line( view.sel @@ -1837,16 +1837,17 @@ sel()%5B0%5D +) .begin()
c924f2d4a0072299d477a05da675646ce306941a
fix host name
wallace/heroku/clock.py
wallace/heroku/clock.py
"""A clock process.""" from apscheduler.schedulers.blocking import BlockingScheduler from wallace import db import os import imp import inspect from psiturk.models import Participant from datetime import datetime from psiturk.psiturk_config import PsiturkConfig from boto.mturk.connection import MTurkConnection import requests import smtplib from email.mime.text import MIMEText import subprocess config = PsiturkConfig() config.load_config() # Specify the experiment. try: exp = imp.load_source('experiment', "wallace_experiment.py") classes = inspect.getmembers(exp, inspect.isclass) exps = [c for c in classes if (c[1].__bases__[0].__name__ in "Experiment")] this_experiment = exps[0][0] mod = __import__('wallace_experiment', fromlist=[this_experiment]) experiment = getattr(mod, this_experiment) except ImportError: print "Error: Could not import experiment." session = db.session aws_access_key_id = config.get('AWS Access', 'aws_access_key_id') aws_secret_access_key = config.get('AWS Access', 'aws_secret_access_key') conn = MTurkConnection(aws_access_key_id, aws_secret_access_key) scheduler = BlockingScheduler() @scheduler.scheduled_job('interval', minutes=0.25) def check_db_for_missing_notifications(): # get all participants with status < 100 participants = Participant.query.all() participants = [p for p in participants if p.status < 100] print "{} participants found".format(len(participants)) # get current time current_time = datetime.now() print "current time is {}".format(current_time) # get experiment duration in seconds duration = float(config.get('HIT Configuration', 'duration'))*60*60 print "hit duration is {}".format(duration) print "bhgkfbshkgf bsgkf bsgjk fbsh kg bfshjk gfhjks" data_string = '{"auto_recruit": "false"}' try: host = os.environ['HOST'] print host subprocess.call( 'curl -n -X PATCH https://api.heroku.com/apps/{}/config-vars \ -H "Accept: application/vnd.heroku+json; version=3" \ -H "Content-Type: application/json" \ -d {}'.format(host, data_string) ) except: import traceback traceback.print_exc() print "2574257429 y5742y5742 7546279 4567296 457296 754892" # for each participant, if current_time - start_time > duration + 5 mins emergency = False for p in participants: p_time = (current_time - p.beginhit).total_seconds if p_time > (duration + 300): emergency = True print "participant {} has been playing for too long and no notification has arrived - running emergency code".format(p) # get their assignment assignment_id = p.assignmentid # ask amazon for the status of the assignment try: assignment = conn.get_assignment(assignment_id) status = assignment.Assignment.AssignmentStatus except: import traceback traceback.print_exc() print "assignment status from AWS is {}".format(status) if status in ["Submitted", "Approved", "Rejected"]: # if it has been submitted then resend a submitted notification args = { 'Event.1.EventType': 'AssignmentSubmitted', 'Event.1.AssignmentId': assignment_id } requests.post("http://" + os.environ['HOST'] + '/notifications', data=args) # send the researcher an email to let them know username = os.getenv('wallace_email_username') fromaddr = username + "@gmail.com" email_password = os.getenv("wallace_email_key") toaddr = config.get('HIT Configuration', 'contact_email_on_error') msg = MIMEText("Dearest Friend,\n\nI am writing to let you know that at {}, during my regular (and thoroughly enjoyable) \ perousal of the most charming participant data table, I happened to notice that assignment \ {} has been taking longer than we were expecting. I recall you had suggested {} minutes as an upper limit \ for what was an acceptable length of time for each assignement, however this assignment had been underway \ for a shocking {} minutes, a full {} minutes over your allowance. I immediately dispatched a \ telegram to our mutual friends at AWS and they were able to assure me that although the notification \ had failed to be correctly processed, the assignment had in fact been completed. Rather than trouble you, \ I dealt with this myself and I can assure you there is no immediate cause for concern. \ Nonetheless, for my own peace of mind, I would appreciate you taking the time to look into this matter \ at your earliest convenience.\n\nI remain your faithful and obedient servant,\nAlfred R. Wallace\n\nP.S. Please do not respond to this message, \ I am busy with other matters.".format(datetime.now(), assignment_id, round(duration/60), round(p_time/60), round((p_time-duration)/60))) msg['Subject'] = "A matter of minor concern." server = smtplib.SMTP('smtp.gmail.com:587') server.starttls() server.login(username, email_password) server.sendmail(fromaddr, toaddr, msg.as_string()) server.quit() else: data_string = '{"auto_recruit": "false"' subprocess.call( "curl -n -X PATCH https://api.heroku.com/apps/{}/config-vars \ -H 'Accept: application/vnd.heroku+json; version=3' \ -H 'Content-Type: application/json' \ -d {}".format(os.environ['HOST'], data_string) ) # if it has not been submitted shut everything down pass # and send the researcher an email to let them know if emergency is False: print "No evidence of missing notifications :-)" scheduler.start()
Python
0.000357
@@ -1888,16 +1888,61 @@ 'HOST'%5D%0A + host = host%5B:-len(%22.herokuapp.com%22)%5D%0A @@ -2050,30 +2050,16 @@ ig-vars -%5C%0A -H %22Acce @@ -2101,30 +2101,16 @@ rsion=3%22 - %5C%0A -H %22Con @@ -2137,30 +2137,16 @@ on/json%22 - %5C%0A -d %7B%7D'.
94182e97ed1635e3aa4993f3db69c248e16b7600
Undo previous commit
unnaturalcode/ucUser.py
unnaturalcode/ucUser.py
# Copyright 2013, 2014 Joshua Charles Campbell # # This file is part of UnnaturalCode. # # UnnaturalCode is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # UnnaturalCode is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with UnnaturalCode. If not, see <http://www.gnu.org/licenses/>. from unnaturalcode.ucUtil import * from unnaturalcode.unnaturalCode import * from unnaturalcode.pythonSource import * from unnaturalcode.mitlmCorpus import * from unnaturalcode.sourceModel import * from unnaturalcode.genericSource import * import shutil class genericUser(object): def getHome(self): self.homeDir = os.path.expanduser("~") self.ucDir = os.getenv("UC_DATA", os.path.join(self.homeDir, ".unnaturalCode")) if not os.path.exists(self.ucDir): os.mknod(self.ucDir) assert os.access(self.ucDir, os.X_OK & os.R_OK & os.W_OK) assert os.path.isdir(self.ucDir) def __init__(self, ngram_order=10): self.getHome() self.readCorpus = os.path.join(self.ucDir, 'genericCorpus') if not os.path.exists(self.readCorpus): os.makedirs(self.readCorpus) self.logFilePath = os.path.join(self.ucDir, 'genericLogFile') self.lm = genericSource self.basicSetup(ngram_order) def basicSetup(self, ngram_order=10): self.uc = unnaturalCode(logFilePath=self.logFilePath) # Oiugh... thank you, dependecy injection. self.cm = mitlmCorpus(readCorpus=self.readCorpus, writeCorpus=self.readCorpus, uc=self.uc, order=ngram_order) self.sm = sourceModel(cm=self.cm, language=self.lm) def release(self): self.cm.release() def delete(self): # Ain't gotta do nothing if the file doesn't exist. if os.path.exists(self.readCorpus): replacementPath = self.readCorpus + '.bak' shutil.move(self.readCorpus, replacementPath) class pyUser(genericUser): def __init__(self, ngram_order=10): self.getHome() self.readCorpus = os.path.join(self.ucDir, 'pyCorpus') if not os.path.exists(self.readCorpus): os.mknod(self.readCorpus) self.logFilePath = os.path.join(self.ucDir, 'pyLogFile') self.lm = pythonSource self.basicSetup(ngram_order)
Python
0
@@ -1266,28 +1266,31 @@ os.m -knod +akedirs (self.ucDir) @@ -2673,12 +2673,15 @@ os.m -knod +akedirs (sel
c8a010e6e9a917c50843dd10303f8f9497b4687c
Bump version
waterbutler/__init__.py
waterbutler/__init__.py
__version__ = '0.2.2' __import__("pkg_resources").declare_namespace(__name__)
Python
0
@@ -12,17 +12,17 @@ = '0.2. -2 +3 '%0D__impo
d377c835f20e839dfd5e95c5ab14db98a441c6a8
Add product variation redirect test
shuup_tests/admin/test_product_variation.py
shuup_tests/admin/test_product_variation.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2019, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. import pytest import six from django.forms import formset_factory from shuup.admin.modules.products.forms import ( SimpleVariationChildForm, SimpleVariationChildFormSet, VariableVariationChildrenForm ) from shuup.core.excs import ImpossibleProductModeException from shuup.testing.factories import create_product from shuup.utils.excs import Problem from shuup_tests.utils import printable_gibberish from shuup_tests.utils.forms import get_form_data @pytest.mark.django_db def test_simple_children_formset(): FormSet = formset_factory(SimpleVariationChildForm, SimpleVariationChildFormSet, extra=5, can_delete=True) parent = create_product(printable_gibberish()) child = create_product(printable_gibberish()) # No links yet formset = FormSet(parent_product=parent) assert formset.initial_form_count() == 0 # No children yet # Save a link data = dict(get_form_data(formset, True), **{"form-0-child": child.pk}) formset = FormSet(parent_product=parent, data=data) formset.save() assert parent.variation_children.filter(pk=child.pk).exists() # Got link'd! # Remove the link formset = FormSet(parent_product=parent) assert formset.initial_form_count() == 1 # Got the child here data = dict(get_form_data(formset, True), **{"form-0-DELETE": "1"}) formset = FormSet(parent_product=parent, data=data) formset.save() assert not parent.variation_children.exists() # Got unlinked @pytest.mark.django_db def test_impossible_simple_variation(): FormSet = formset_factory(SimpleVariationChildForm, SimpleVariationChildFormSet, extra=5, can_delete=True) parent = create_product(printable_gibberish()) child = create_product(printable_gibberish()) grandchild = create_product(printable_gibberish()) grandchild.link_to_parent(child) assert child.variation_children.exists() formset = FormSet(parent_product=parent) data = dict(get_form_data(formset, True), **{"form-0-child": child.pk}) formset = FormSet(parent_product=parent, data=data) assert formset.is_valid() # It's technically valid, but... with pytest.raises(Problem) as ei: formset.save() if six.PY3: # Can only test inner exceptions on Py3. Ah well. inner_exc = ei.value.__context__ assert isinstance(inner_exc, ImpossibleProductModeException) assert inner_exc.code == "multilevel" @pytest.mark.django_db def test_variable_variation_form(): var1 = printable_gibberish() var2 = printable_gibberish() parent = create_product(printable_gibberish()) for a in range(4): for b in range(3): child = create_product(printable_gibberish()) child.link_to_parent(parent, variables={var1: a, var2: b}) assert parent.variation_children.count() == 4 * 3 form = VariableVariationChildrenForm(parent_product=parent) assert len(form.fields) == 12 # TODO: Improve this test?
Python
0
@@ -312,16 +312,61 @@ _factory +%0Afrom django.core.urlresolvers import reverse %0A%0Afrom s @@ -612,16 +612,34 @@ _product +, get_default_shop %0Afrom sh @@ -1695,32 +1695,32 @@ formset.save()%0A - assert not p @@ -1771,16 +1771,1018 @@ inked%0A%0A%0A +@pytest.mark.django_db%0Adef test_simple_children_redirect(client):%0A %22%22%22%0A view should redirect from child url to parent url%0A with selected variation as param%0A %22%22%22%0A shop = get_default_shop()%0A parent = create_product(printable_gibberish(), shop)%0A child = create_product(printable_gibberish(), shop)%0A%0A children_url = reverse(%0A 'shuup:product',%0A kwargs=dict(pk=child.pk, slug=child.slug)%0A )%0A response = client.get(children_url)%0A assert response.status_code == 200%0A%0A child.link_to_parent(parent)%0A response = client.get(children_url, follow=True)%0A assert response.status_code == 200%0A%0A last_url, status_code = response.redirect_chain%5B-1%5D%0A assert status_code == 302%0A%0A expected_url = '%7B%7D?variation=%7B%7D'.format(reverse(%0A 'shuup:product',%0A kwargs=dict(pk=parent.pk, slug=parent.slug)%0A ), child.sku)%0A if last_url.startswith('http'):%0A assert last_url.endswith(expected_url)%0A else:%0A assert last_url == expected_url%0A%0A%0A @pytest.
61d69f2cc850df49186f88c540d22cf4c0462b41
Add test for full-path import of Django View class
python/ql/test/experimental/library-tests/frameworks/django-v2-v3/routing_test.py
python/ql/test/experimental/library-tests/frameworks/django-v2-v3/routing_test.py
"""testing views for Django 2.x and 3.x""" from django.urls import path, re_path from django.http import HttpResponse, HttpResponseRedirect, JsonResponse, HttpResponseNotFound from django.views import View def url_match_xss(request, foo, bar, no_taint=None): # $requestHandler routedParameter=foo routedParameter=bar return HttpResponse('url_match_xss: {} {}'.format(foo, bar)) # $HttpResponse def get_params_xss(request): # $requestHandler return HttpResponse(request.GET.get("untrusted")) # $HttpResponse def post_params_xss(request): # $requestHandler return HttpResponse(request.POST.get("untrusted")) # $HttpResponse def http_resp_write(request): # $requestHandler rsp = HttpResponse() # $HttpResponse rsp.write(request.GET.get("untrusted")) # $HttpResponse return rsp class Foo(object): # Note: since Foo is used as the super type in a class view, it will be able to handle requests. def post(self, request, untrusted): # $ MISSING: requestHandler routedParameter=untrusted return HttpResponse('Foo post: {}'.format(untrusted)) # $HttpResponse class ClassView(View, Foo): def get(self, request, untrusted): # $ requestHandler routedParameter=untrusted return HttpResponse('ClassView get: {}'.format(untrusted)) # $HttpResponse def show_articles(request, page_number=1): # $requestHandler routedParameter=page_number page_number = int(page_number) return HttpResponse('articles page: {}'.format(page_number)) # $HttpResponse def xxs_positional_arg(request, arg0, arg1, no_taint=None): # $requestHandler routedParameter=arg0 routedParameter=arg1 return HttpResponse('xxs_positional_arg: {} {}'.format(arg0, arg1)) # $HttpResponse urlpatterns = [ re_path(r"^url_match/(?P<foo>[^/]+)/(?P<bar>[^/]+)", url_match_xss), # $routeSetup="^url_match/(?P<foo>[^/]+)/(?P<bar>[^/]+)" re_path(r"^get_params", get_params_xss), # $routeSetup="^get_params" re_path(r"^post_params", post_params_xss), # $routeSetup="^post_params" re_path(r"^http_resp_write", http_resp_write), # $routeSetup="^http_resp_write" re_path(r"^class_view/(?P<untrusted>.+)", ClassView.as_view()), # $routeSetup="^class_view/(?P<untrusted>.+)" # one pattern to support `articles/page-<n>` and ensuring that articles/ goes to page-1 re_path(r"articles/^(?:page-(?P<page_number>\d+)/)?", show_articles), # $routeSetup="articles/^(?:page-(?P<page_number>\d+)/)?" # passing as positional argument is not the recommended way of doing things, but it is certainly # possible re_path(r"^([^/]+)/(?:foo|bar)/([^/]+)", xxs_positional_arg, name='xxs_positional_arg'), # $routeSetup="^([^/]+)/(?:foo|bar)/([^/]+)" ] # Show we understand the keyword arguments to django.urls.re_path def re_path_kwargs(request): # $requestHandler return HttpResponse('re_path_kwargs') # $HttpResponse urlpatterns = [ re_path(view=re_path_kwargs, route=r"^specifying-as-kwargs-is-not-a-problem") # $routeSetup="^specifying-as-kwargs-is-not-a-problem" ] ################################################################################ # Using path ################################################################################ # saying page_number is an externally controlled *string* is a bit strange, when we have an int converter :O def page_number(request, page_number=1): # $requestHandler routedParameter=page_number return HttpResponse('page_number: {}'.format(page_number)) # $HttpResponse def foo_bar_baz(request, foo, bar, baz): # $requestHandler routedParameter=foo routedParameter=bar routedParameter=baz return HttpResponse('foo_bar_baz: {} {} {}'.format(foo, bar, baz)) # $HttpResponse def path_kwargs(request, foo, bar): # $requestHandler routedParameter=foo routedParameter=bar return HttpResponse('path_kwargs: {} {} {}'.format(foo, bar)) # $HttpResponse def not_valid_identifier(request): # $requestHandler return HttpResponse('<foo!>') # $HttpResponse urlpatterns = [ path("articles/", page_number), # $routeSetup="articles/" path("articles/page-<int:page_number>", page_number), # $routeSetup="articles/page-<int:page_number>" path("<int:foo>/<str:bar>/<baz>", foo_bar_baz, name='foo-bar-baz'), # $routeSetup="<int:foo>/<str:bar>/<baz>" path(view=path_kwargs, route="<foo>/<bar>"), # $routeSetup="<foo>/<bar>" # We should not report there is a request parameter called `not_valid!` path("not_valid/<not_valid!>", not_valid_identifier), # $routeSetup="not_valid/<not_valid!>" ] # This version 1.x way of defining urls is deprecated in Django 3.1, but still works from django.conf.urls import url def deprecated(request): # $requestHandler return HttpResponse('deprecated') # $HttpResponse urlpatterns = [ url(r"^deprecated/", deprecated), # $routeSetup="^deprecated/" ] class PossiblyNotRouted(View): # Even if our analysis can't find a route-setup for this class, we should still # consider it to be a handle incoming HTTP requests def get(self, request, possibly_not_routed=42): # $ requestHandler routedParameter=possibly_not_routed return HttpResponse('PossiblyNotRouted get: {}'.format(possibly_not_routed)) # $HttpResponse
Python
0
@@ -199,16 +199,49 @@ rt View%0A +import django.views.generic.base%0A %0A%0Adef ur @@ -1337,24 +1337,208 @@ pResponse%0A%0A%0A +# direct import with full path to %60View%60 class (not supported)%0Aclass ClassView2(django.views.generic.base.View):%0A def get(self, request): # $ MISSING: requestHandler%0A pass%0A%0A%0A def show_art
b0e3886ee24689f1eb249e0ed3c66d887b317f60
Delete table test
tst/test.py
tst/test.py
#!/usr/bin/python import grpc import keyvalue_pb2 import os import sys if __name__ == '__main__': conn_str = os.environ['GRPCROCKSDB_PORT'].split("/")[2] print "Connecting on: " + conn_str channel = grpc.insecure_channel(conn_str) stub = keyvalue_pb2.KeyValueStub(channel) create_table_res = stub.CreateTable(keyvalue_pb2.CreateTableReq(tablename='test-table-1')) put_res = stub.Put(keyvalue_pb2.PutReq(tablename='test-table-1',item=keyvalue_pb2.Item(key='myKey', value='12345'))) get_res = stub.Get(keyvalue_pb2.GetReq(tablename='test-table-1',key='myKey')) assert get_res.item.value == "12345" try: put_res = stub.Put(keyvalue_pb2.PutReq(tablename='test-table-1',item=keyvalue_pb2.Item(key='myKey', value='99999'),condition="hello")) print "Condition should not be met!" sys.exit(1) except Exception: pass get_res = stub.Get(keyvalue_pb2.GetReq(tablename='test-table-1',key='myKey')) assert get_res.item.value == "12345" put_res = stub.Put(keyvalue_pb2.PutReq(tablename='test-table-1',item=keyvalue_pb2.Item(key='myKey', value='99999'),condition="12345")) get_res = stub.Get(keyvalue_pb2.GetReq(tablename='test-table-1',key='myKey')) assert get_res.item.value == "99999"
Python
0.000002
@@ -1261,8 +1261,103 @@ %2299999%22%0A + delete_table_res = stub.DeleteTable(keyvalue_pb2.DeleteTableReq(tablename='test-table-1'))%0A
c45017a80c17fcb6f0bb2595392a24b287aba3ca
Update collect_sql to support objects of different types but same name
smartmin/management/commands/collect_sql.py
smartmin/management/commands/collect_sql.py
from __future__ import print_function, unicode_literals import six import sqlparse from collections import OrderedDict from datetime import datetime from django.core.management.base import BaseCommand from django.db.migrations import RunSQL from django.db.migrations.executor import MigrationExecutor from enum import Enum from six.moves import filter from sqlparse import sql from sqlparse import tokens as sql_tokens from textwrap import dedent class InvalidSQLException(Exception): def __init__(self, s): super(InvalidSQLException, self).__init__("Invalid SQL: %s" % s) class SqlType(Enum): """ The different SQL types that we can extract from migrations """ INDEX = 1 FUNCTION = 2 TRIGGER = 3 @six.python_2_unicode_compatible class SqlObjectOperation(object): def __init__(self, statement, sql_type, obj_name, is_create): self.statement = statement self.sql_type = sql_type self.obj_name = obj_name self.is_create = is_create @classmethod def parse(cls, raw): # get non-whitespace non-comment tokens tokens = [t for t in raw.tokens if not t.is_whitespace and not isinstance(t, sql.Comment)] if len(tokens) < 3: return None # check statement is of form "CREATE|DROP TYPE ..." if tokens[0].ttype != sql_tokens.DDL or tokens[1].ttype != sql_tokens.Keyword: return None if tokens[0].value.upper() in ('CREATE', 'CREATE OR REPLACE'): is_create = True elif tokens[0].value.upper() in ('DROP',): is_create = False else: return None try: sql_type = SqlType[tokens[1].value.upper()] except KeyError: return None if sql_type == SqlType.FUNCTION: function = next(filter(lambda t: isinstance(t, sql.Function), tokens), None) if not function: raise InvalidSQLException(raw.value) name = function.get_name() else: identifier = next(filter(lambda t: isinstance(t, sql.Identifier), tokens), None) if not identifier: raise InvalidSQLException(raw.value) name = identifier.value return cls(raw.value.strip(), sql_type, name, is_create) def __str__(self): return self.statement[:79].replace('\n', ' ') class Command(BaseCommand): # pragma: no cover help = """Collects SQL statements from migrations to compile lists of indexes, functions and triggers""" def add_arguments(self, parser): parser.add_argument( '--preserve-order', action='store_true', dest='preserve_order', default=False, help='Whether to preserve order of operations rather than sorting by object name.', ) parser.add_argument( '--output-dir', action='store', dest='output_dir', default='temba/sql', help='The output directory for generated SQL files.', ) def handle(self, *args, **options): preserve_order = options.get('preserve_order') output_dir = options.get('output_dir') self.verbosity = options.get('verbosity') self.stdout.write("Loading migrations...") migrations = self.load_migrations() self.stdout.write("Loaded %s migrations" % self.style.SUCCESS(len(migrations))) self.stdout.write("Extracting SQL operations...") operations = self.extract_operations(migrations) self.stdout.write("Extracted %s SQL operations" % self.style.SUCCESS(len(operations))) self.stdout.write("Normalizing SQL operations...") normalized = self.normalize_operations(operations) self.stdout.write("Removed %s redundant operations" % self.style.SUCCESS(len(operations) - len(normalized))) self.write_type_dumps(normalized, preserve_order, output_dir) def load_migrations(self): """ Loads all migrations in the order they would be applied to a clean database """ executor = MigrationExecutor(connection=None) # create the forwards plan Django would follow on an empty database plan = executor.migration_plan(executor.loader.graph.leaf_nodes(), clean_start=True) if self.verbosity >= 2: for migration, _ in plan: self.stdout.write(" > %s" % migration) return [m[0] for m in plan] def extract_operations(self, migrations): """ Extract SQL operations from the given migrations """ operations = [] for migration in migrations: for operation in migration.operations: if isinstance(operation, RunSQL): statements = sqlparse.parse(dedent(operation.sql)) for statement in statements: operation = SqlObjectOperation.parse(statement) if operation: operations.append(operation) if self.verbosity >= 2: self.stdout.write(" > %s (%s)" % (operation, migration)) return operations def normalize_operations(self, operations): """ Removes redundant SQL operations - e.g. a CREATE X followed by a DROP X """ normalized = OrderedDict() for operation in operations: # do we already have an operation for this object? if operation.obj_name in normalized: if self.verbosity >= 2: self.stdout.write(" < %s" % normalized[operation.obj_name]) del normalized[operation.obj_name] # don't add DROP operations for objects not previously created if operation.is_create: normalized[operation.obj_name] = operation elif self.verbosity >= 2: self.stdout.write(" < %s" % operation) return normalized.values() def write_type_dumps(self, operations, preserve_order, output_dir): """ Splits the list of SQL operations by type and dumps these to separate files """ by_type = {SqlType.INDEX: [], SqlType.FUNCTION: [], SqlType.TRIGGER: []} for operation in operations: by_type[operation.sql_type].append(operation) # optionally sort each operation list by the object name if not preserve_order: for obj_type, ops in by_type.items(): by_type[obj_type] = sorted(ops, key=lambda o: o.obj_name) if by_type[SqlType.INDEX]: self.write_dump('indexes', by_type[SqlType.INDEX], output_dir) if by_type[SqlType.FUNCTION]: self.write_dump('functions', by_type[SqlType.FUNCTION], output_dir) if by_type[SqlType.TRIGGER]: self.write_dump('triggers', by_type[SqlType.TRIGGER], output_dir) def write_dump(self, type_label, operations, output_dir): filename = '%s/current_%s.sql' % (output_dir, type_label) with open(filename, 'w') as f: header = '-- Generated by collect_sql on %s UTC\n\n' % datetime.utcnow().strftime("%Y-%m-%d %H:%M") f.write(header) for operation in operations: f.write(operation.statement) f.write('\n\n') self.stdout.write("Saved %s" % filename)
Python
0
@@ -2362,10 +2362,11 @@ nt%5B: -79 +100 %5D.re @@ -5108,16 +5108,21 @@ te(%22 %3E %25 + -100 s (%25s)%22 @@ -5359,24 +5359,93 @@ eredDict()%0A%0A + def op_key(op):%0A return op.sql_type, op.obj_name%0A%0A for @@ -5682,32 +5682,31 @@ lized%5Bop -eration.obj_name +_key(operation) %5D)%0A%0A @@ -5734,32 +5734,31 @@ lized%5Bop -eration.obj_name +_key(operation) %5D%0A%0A @@ -5892,32 +5892,31 @@ lized%5Bop -eration.obj_name +_key(operation) %5D = oper
4437565016d0b1edc3b5a5f96d405cd0c41ca5b9
Use DataHandle timestep helpers in sample_project
smif/sample_project/models/energy_demand.py
smif/sample_project/models/energy_demand.py
"""Energy demand dummy model """ import numpy as np from smif.data_layer.data_handle import RelativeTimestep from smif.model.sector_model import SectorModel class EDMWrapper(SectorModel): """Energy model """ def initialise(self, initial_conditions): pass def simulate(self, data): # Get the current timestep now = data.current_timestep self.logger.info("EDMWrapper received inputs in %s", now) # Demonstrates how to get the value for a model parameter parameter_value = data.get_parameter('smart_meter_savings') self.logger.info('Smart meter savings: %s', parameter_value) # Demonstrates how to get the value for a model input # (defaults to the current time period) current_energy_demand = data.get_data('energy_demand') self.logger.info("Current energy demand in %s is %s", now, current_energy_demand) # Demonstrates how to get the value for a model input from the base # timeperiod base_energy_demand = data.get_data('energy_demand', RelativeTimestep.BASE) base_year = RelativeTimestep.BASE.resolve_relative_to(now, data.timesteps) self.logger.info("Base year energy demand in %s was %s", base_year, base_energy_demand) # Demonstrates how to get the value for a model input from the previous # timeperiod if now > base_year: prev_energy_demand = data.get_data('energy_demand', RelativeTimestep.PREVIOUS) prev_year = RelativeTimestep.PREVIOUS.resolve_relative_to( now, data.timesteps) self.logger.info("Previous energy demand in %s was %s", prev_year, prev_energy_demand) # Pretend to call the 'energy model' # This code prints out debug logging messages for each input # defined in the energy_demand configuration for name in self.inputs.names: time_intervals = self.inputs[name].get_interval_names() regions = self.inputs[name].get_region_names() for i, region in enumerate(regions): for j, interval in enumerate(time_intervals): self.logger.info( "%s %s %s", interval, region, data.get_data(name)[i, j]) # Write pretend results to data handler data.set_results("cost", np.ones((3, 1)) * 3) data.set_results("water_demand", np.ones((3, 1)) * 3) self.logger.info("EDMWrapper produced outputs in %s", now) def extract_obj(self, results): return 0
Python
0
@@ -1206,130 +1206,26 @@ r = -RelativeTimestep.BASE.resolve_relative_to(now,%0A data. +base_ timestep s)%0A @@ -1212,34 +1212,32 @@ ta.base_timestep -s) %0A self.lo @@ -1638,89 +1638,30 @@ r = -RelativeTimestep.PREVIOUS.resolve_relative_to(%0A now, data. +data.previous_ timestep s)%0A @@ -1656,18 +1656,16 @@ timestep -s) %0A
d5fe2e21c8ed4e1dc66098e16011cb2f9094e573
Fix ConditionalJump to manually increment the PC
bytecode.py
bytecode.py
class BytecodeBase: autoincrement = True # For jump def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class ReadMemory(BytecodeBase): def __init__(self, index): self.index = index def execute(self, machine): machine.push(machine.read_memory(self.index)) class WriteMemory(BytecodeBase): def __init__(self, index, value): self.index, self.value = index, value def execute(self, machine): machine.write_memory(self.index, self.value) class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(b+a) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(b-a) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(b*a) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(b/a) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False class Jump(BytecodeBase): def __init__(self, jump_to): self.autoincrement = False self.jump_to = jump_to def execute(self, machine): machine.pc = self.jump_to class ConditionalJump(BytecodeBase): def __init__(self, value, jump_to): self.value = value self.jump_to = jump_to def execute(self, machine): val = machine.pop() machine.push(val) if val == self.value: self.autoincrement = False machine.pc = self.jump_to class Print(BytecodeBase): def execute(self, machine): val = machine.pop() machine.push(val) print(val) class WriteTop(BytecodeBase): def __init__(self, index): # We need this because we can't layer bytecodes # WriteMemory(Pop()) Fails because only WriteMemory gets executed self.index = index def execute(self, machine): machine.write_memory(self.index, machine.pop())
Python
0.000001
@@ -1775,32 +1775,67 @@ alue, jump_to):%0A + self.autoincrement = False%0A self.val @@ -2009,34 +2009,47 @@ -self.autoincrement = Fa +machine.pc = self.jump_to%0A e lse +: %0A @@ -2064,38 +2064,29 @@ machine.pc ++ = -self.jump_to +1%0A %0A%0Aclass Prin
eeea990b6409085e38df4be4c137b9e42e354ec6
remove more target="_blank" for @tofumatt (bug 807049)
mkt/site/context_processors.py
mkt/site/context_processors.py
from django.conf import settings from django.contrib.auth.models import AnonymousUser from tower import ugettext as _ from access import acl import amo from amo.context_processors import get_collect_timings from amo.urlresolvers import reverse import mkt from zadmin.models import get_config def global_settings(request): """Store global Marketplace-wide info. used in the header.""" account_links = [] tools_links = [] context = {} tools_title = _('Tools') if request.user.is_authenticated() and hasattr(request, 'amo_user'): amo_user = request.amo_user account_links = [] context['is_reviewer'] = acl.check_reviewer(request) if getattr(request, 'can_view_consumer', True): account_links = [ # TODO: Coming soon with payments. # {'text': _('Account History'), # 'href': reverse('account.purchases')}, {'text': _('Account Settings'), 'href': reverse('account.settings')}, ] account_links += [ {'text': _('Change Password'), 'href': 'https://login.persona.org/signin'}, {'text': _('Log out'), 'href': reverse('users.logout')}, ] if '/developers/' not in request.path: tools_links.append({'text': _('Developer Hub'), 'href': reverse('ecosystem.landing'), 'target': '_blank'}) if amo_user.is_app_developer: tools_links.append({'text': _('My Submissions'), 'href': reverse('mkt.developers.apps'), 'target': '_blank'}) if '/reviewers/' not in request.path and context['is_reviewer']: tools_links.append({'text': _('Reviewer Tools'), 'href': reverse('reviewers.home')}) if acl.action_allowed(request, 'Localizers', '%'): tools_links.append({'text': _('Localizer Tools'), 'href': '/localizers'}) if acl.action_allowed(request, 'AccountLookup', '%'): tools_links.append({'text': _('Lookup Tool'), 'href': reverse('lookup.home')}) if acl.action_allowed(request, 'Admin', '%'): tools_links.append({'text': _('Admin Tools'), 'href': reverse('zadmin.home')}) context['amo_user'] = amo_user else: context['amo_user'] = AnonymousUser() is_walled = ('amo.middleware.LoginRequiredMiddleware' in settings.MIDDLEWARE_CLASSES) context.update(account_links=account_links, settings=settings, amo=amo, mkt=mkt, APP=amo.FIREFOX, tools_links=tools_links, tools_title=tools_title, ADMIN_MESSAGE=get_config('site_notice'), collect_timings_percent=get_collect_timings(), is_admin=acl.action_allowed(request, 'Addons', 'Edit'), is_walled=is_walled) return context
Python
0.00002
@@ -1421,60 +1421,8 @@ ng') -,%0A 'target': '_blank' %7D)%0A @@ -1605,64 +1605,8 @@ ps') -,%0A 'target': '_blank' %7D)%0A
462813f8f10db550a4897bfcf20aa1d675543edb
Exclude system sources from test coverage
mesonbuild/scripts/coverage.py
mesonbuild/scripts/coverage.py
# Copyright 2017 The Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from mesonbuild import environment import sys, os, subprocess def remove_dir_from_trace(lcov_command, covfile, dirname): tmpfile = covfile + '.tmp' subprocess.check_call([lcov_command, '--remove', covfile, dirname, '-o', tmpfile]) os.replace(tmpfile, covfile) def coverage(source_root, build_root, log_dir): (gcovr_exe, lcov_exe, genhtml_exe) = environment.find_coverage_tools() if gcovr_exe: subprocess.check_call([gcovr_exe, '-x', '-r', source_root, '-o', os.path.join(log_dir, 'coverage.xml'), ]) subprocess.check_call([gcovr_exe, '-r', source_root, '-o', os.path.join(log_dir, 'coverage.txt'), ]) if lcov_exe and genhtml_exe: htmloutdir = os.path.join(log_dir, 'coveragereport') covinfo = os.path.join(log_dir, 'coverage.info') initial_tracefile = covinfo + '.initial' run_tracefile = covinfo + '.run' subprocess.check_call([lcov_exe, '--directory', build_root, '--capture', '--initial', '--output-file', initial_tracefile]) subprocess.check_call([lcov_exe, '--directory', build_root, '--capture', '--output-file', run_tracefile, '--no-checksum', '--rc', 'lcov_branch_coverage=1', ]) # Join initial and test results. subprocess.check_call([lcov_exe, '-a', initial_tracefile, '-a', run_tracefile, '-o', covinfo]) remove_dir_from_trace(lcov_exe, covinfo, '/usr/include/*') remove_dir_from_trace(lcov_exe, covinfo, '/usr/local/include/*') subprocess.check_call([genhtml_exe, '--prefix', build_root, '--output-directory', htmloutdir, '--title', 'Code coverage', '--legend', '--show-details', '--branch-coverage', covinfo]) return 0 def run(args): if not os.path.isfile('build.ninja'): print('Coverage currently only works with the Ninja backend.') return 1 source_root, build_root, log_dir = args[:] return coverage(source_root, build_root, log_dir) if __name__ == '__main__': sys.exit(run(sys.argv[1:]))
Python
0
@@ -2687,32 +2687,95 @@ cal/include/*')%0A + remove_dir_from_trace(lcov_exe, covinfo, '/usr/src/*')%0A subproce
95114756c60f40c3da76e3d83af3e10d64453e0f
Simplify classfier logic
classify.py
classify.py
from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.ensemble import RandomForestRegressor from sklearn.pipeline import Pipeline from sklearn import cross_validation import numpy as np import sys import itertools import linecache import json import os import pickle from utils import Utilities class Classifier(object): def __init__(self, group, n_estimators, model_file=""): self.dataset_name = "commit_comments-dump.2015-01-29" self.group = group self.display = (self.group == "id") self.model_file = model_file self.n_estimators = n_estimators self.train_ids = set() def create_model(self, train=True): trained = False if self.model_file != "" and os.path.isfile(self.model_file): with open(self.model_file, 'rb') as f: objects = pickle.load(f) models = objects[0:-1] models[0][1].tokenizer = Utilities.split self.train_ids = objects[-1][1] trained = True else: models = [ ('tfidf', TfidfVectorizer(input='content', tokenizer=Utilities.split)), ('clf', RandomForestRegressor(n_estimators=self.n_estimators, n_jobs=2, min_samples_split=10)) ] self.regressor = Pipeline(models) if not trained and train: self.train() if self.model_file != "": with open(self.model_file, 'wb') as f: models[0][1].tokenizer = None models.append(('train_ids', self.train_ids)) pickle.dump(models, f) def get_train_data(self): # Collect the training data train_data = [] train_labels = [] with open(self.dataset_name + ".labeled.json", 'r') as f: i = 0 for data in Utilities.read_json(f, ['id','label'], self.group): i = i + 1 score = Utilities.label_to_score(data["label"]) if score is None: # unknown continue line = linecache.getline(self.dataset_name + '.json', i) json_object = json.loads(line) if json_object['id'] != data['id']: raise(ValueError('ID in label dataset does not match with dataset on line {}: {} vs {}'.format(i, data['id'], json_object['id']))) message = json_object['body'].replace('\r\n', '\n') self.train_ids.add(data['id']) train_data.append(message) train_labels.append(score) return (train_data, train_labels) def train(self): (train_data, train_labels) = self.get_train_data() # Train the regressor self.regressor.fit(train_data, train_labels) def cross_validate(self, folds=5): (train_data, train_labels) = self.get_train_data() # Crossvalidate the regressor on the labeled data return cross_validation.cross_val_score(self.regressor, train_data, train_labels, cv=folds) def split(self, data): if self.group != "score": self.test_group.append(data['group']) return data['message'] def filter(self, data): return data['id'] not in self.train_ids def predict(self): self.test_group = [] self.test_data = itertools.imap(self.split, itertools.ifilter(self.filter, Utilities.read_json(sys.stdin, 'id', self.group))) if self.display: self.test_data = list(self.test_data) return self.regressor.predict(self.test_data) def output(self, predictions): for i in xrange(len(predictions)): group = self.test_group[i] if self.group != "score" else "" prediction = predictions[i] message = "" if self.display: message = "\t" + Utilities.get_colored_text(prediction, self.test_data[i]).replace('\n', ' ') g = "{}\t".format(group) if group != "" else "" print("{}{:.2f}{}".format(g, prediction, message)) def main(argv): group = argv[0] if len(argv) > 0 else "id" n_estimators = int(argv[1]) if len(argv) > 1 else 100 model_file = argv[2] if len(argv) > 2 else "" cv_folds = 0 if model_file.isdigit(): cv_folds = int(model_file) if model_file != '0' else 5 model_file = "" classifier = Classifier(group, n_estimators, model_file) classifier.create_model(train=not cv_folds) if cv_folds > 0: print('Performing cross-validation on {} folds'.format(cv_folds)) results = classifier.cross_validate(cv_folds) print('Folds: {}'.format(results)) print('Average: {}'.format(results.mean())) print('Standard deviation: {}'.format(results.std())) else: classifier.output(classifier.predict()) if __name__ == "__main__": main(sys.argv[1:])
Python
0
@@ -53,16 +53,57 @@ torizer%0A +from sklearn.dummy import DummyRegressor%0A from skl @@ -400,38 +400,24 @@ self, group, - n_estimators, model_file= @@ -595,49 +595,8 @@ ile%0A - self.n_estimators = n_estimators%0A @@ -660,16 +660,58 @@ ain=True +, class_name=DummyRegressor, parameters=%7B%7D ):%0A @@ -1219,92 +1219,31 @@ f', -RandomForestRegressor(n_estimators=self.n_estimators, n_jobs=2, min_samples_split=10 +class_name(**parameters ))%0A @@ -4136,66 +4136,8 @@ id%22%0A - n_estimators = int(argv%5B1%5D) if len(argv) %3E 1 else 100%0A @@ -4154,17 +4154,17 @@ = argv%5B -2 +1 %5D if len @@ -4172,17 +4172,17 @@ argv) %3E -2 +1 else %22%22 @@ -4316,16 +4316,177 @@ e = %22%22%0A%0A + algorithm_class = RandomForestRegressor%0A algorithm_parameters = %7B%0A 'n_estimators': 100,%0A 'n_jobs': 2,%0A 'min_samples_split': 10%0A %7D%0A clas @@ -4515,22 +4515,8 @@ oup, - n_estimators, mod @@ -4566,24 +4566,85 @@ not cv_folds +, class_name=algorithm_class, parameters=algorithm_parameters )%0A if cv_
5e2b2342f94933a9c3e853802471776731b232c8
Add boot trigger API route.
app/urls.py
app/urls.py
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Define URLs and handlers to server them.""" import tornado.web import handlers.batch import handlers.bisect import handlers.boot import handlers.count import handlers.defconf import handlers.job import handlers.lab import handlers.report import handlers.send import handlers.test_case import handlers.test_set import handlers.test_suite import handlers.token import handlers.upload import handlers.version _JOB_URL = tornado.web.url( r"/job[s]?/?(?P<id>.*)", handlers.job.JobHandler, name="job" ) _DEFCONF_URL = tornado.web.url( r"/defconfig[s]?/?(?P<id>.*)", handlers.defconf.DefConfHandler, name="defconf" ) _BOOT_URL = tornado.web.url( r"/boot[s]?/?(?P<id>.*)", handlers.boot.BootHandler, name="boot" ) _COUNT_URL = tornado.web.url( r"/count[s]?/?(?P<id>.*)", handlers.count.CountHandler, name="count" ) _TOKEN_URL = tornado.web.url( r"/token[s]?/?(?P<id>.*)", handlers.token.TokenHandler, name="token" ) _BATCH_URL = tornado.web.url( r"/batch", handlers.batch.BatchHandler, name="batch" ) _BISECT_URL = tornado.web.url( r"/bisect[s]?/?(?P<id>.*)", handlers.bisect.BisectHandler, name="bisect" ) _LAB_URL = tornado.web.url( r"/lab[s]?/?(?P<id>.*)", handlers.lab.LabHandler, name="lab" ) _VERSION_URL = tornado.web.url( r"/version", handlers.version.VersionHandler, name="version" ) _REPORT_URL = tornado.web.url( r"/report[s]?/?(?P<id>.*)", handlers.report.ReportHandler, name="response" ) _UPLOAD_URL = tornado.web.url( r"/upload/?(?P<path>.*)", handlers.upload.UploadHandler, name="upload" ) _SEND_URL = tornado.web.url( r"/send/?", handlers.send.SendHandler, name="send" ) _TEST_SUITE_URL = tornado.web.url( r"/test[s]?/suite[s]?/?(?P<id>.*)", handlers.test_suite.TestSuiteHandler, name="test-suite" ) _TEST_SET_URL = tornado.web.url( r"/test[s]?/set[s]?/?(?P<id>.*)", handlers.test_set.TestSetHandler, name="test-set" ) _TEST_CASE_URL = tornado.web.url( r"/test[s]?/case[s]?/?(?P<id>.*)", handlers.test_case.TestCaseHandler, name="test-case" ) APP_URLS = [ _BATCH_URL, _BISECT_URL, _BOOT_URL, _COUNT_URL, _DEFCONF_URL, _JOB_URL, _LAB_URL, _TOKEN_URL, _VERSION_URL, _REPORT_URL, _UPLOAD_URL, _SEND_URL, _TEST_SUITE_URL, _TEST_SET_URL, _TEST_CASE_URL ]
Python
0
@@ -783,16 +783,45 @@ rs.boot%0A +import handlers.boot_trigger%0A import h @@ -2760,16 +2760,154 @@ case%22%0A)%0A +_BOOT_TRIGGER_URL = tornado.web.url(%0A r%22/trigger/boot%5Bs%5D?/?%22,%0A handlers.boot_trigger.BootTriggerHandler,%0A name=%22boot-trigger%22%0A)%0A%0A %0AAPP_URL @@ -3163,11 +3163,34 @@ CASE_URL +,%0A _BOOT_TRIGGER_URL %0A%5D%0A
96340529a8d5702ce8c880aa66966b2971b96449
change method
calc_cov.py
calc_cov.py
import mne import sys from mne import compute_covariance import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt from my_settings import * reject = dict(grad=4000e-13, # T / m (gradiometers) mag=4e-12, # T (magnetometers) eeg=180e-6 # ) subject = sys.argv[1] epochs = mne.read_epochs(epochs_folder + "%s_trial_start-epo.fif" % subject) epochs.drop_bad_epochs(reject) fig = epochs.plot_drop_log(subject=subject, show=False) fig.savefig(epochs_folder + "pics/%s_drop_log.png" % subject) # Make noise cov cov = compute_covariance(epochs, tmin=None, tmax=-0.2, method="shrunk") mne.write_cov(mne_folder + "%s-cov.fif" % subject, cov)
Python
0.000005
@@ -623,23 +623,57 @@ 0.2, - method=%22shrunk +%0A method=%22factor_analysis %22)%0Am
4be984747a41e5ab966b12afe9074a0e611faee2
Add license text to resampling.py
resampling.py
resampling.py
""" @author Talha Can Havadar (talhaHavadar) """ import random from collections import Counter class ResamplingWheel(object): """ A Class implementation for resampling wheel Creates an imaginary wheel that consist of weighted portions. According to these weights, you can pick an index value. Index with more weights has more chance to be picked up. """ def __init__(self, initiate_with=None): self.wheel = [] self.max_weight = None self.is_resampled = False self.beta = 0.0 self.last_index = 0 if initiate_with is not None and isinstance(initiate_with, list): self.wheel = initiate_with self.length = len(self.wheel) if self.length > 0: self.max_weight = max(self.wheel) self.last_index = int(random.random() * self.length) def get_pick_index(self): """ Returns an index value according to given data. Given data's length and weights matter """ if not self.is_resampled: self.__resample__() while self.beta > self.wheel[self.last_index]: self.beta -= self.wheel[self.last_index] self.last_index = (self.last_index + 1) % self.length self.is_resampled = False return self.last_index def __resample__(self): self.beta += random.random() * 2.0 * self.max_weight self.is_resampled = True def __len__(self): return len(self.wheel) if __name__ == "__main__": DATA = [10, 11, 12, 13, 14] SAMPLING = ResamplingWheel([5, 2, 1, 1, 1]) SAMPLED = [] print("Length of the sampling wheel:", len(SAMPLING)) for i in range(100): index = SAMPLING.get_pick_index() print(DATA[index]) SAMPLED.append(DATA[index]) print(Counter(SAMPLED))
Python
0
@@ -1,12 +1,1087 @@ %22%22%22%0A +MIT License%0A%0ACopyright (c) 2017 Talha Can Havadar%0A%0APermission is hereby granted, free of charge, to any person obtaining a copy%0Aof this software and associated documentation files (the %22Software%22), to deal%0Ain the Software without restriction, including without limitation the rights%0Ato use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0Acopies of the Software, and to permit persons to whom the Software is%0Afurnished to do so, subject to the following conditions:%0A%0AThe above copyright notice and this permission notice shall be included in all%0Acopies or substantial portions of the Software.%0A%0ATHE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0AIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0AFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0AAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0ALIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0AOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0ASOFTWARE.%0A%0A @author
5fa9e88e9402a4ca12f2f54298d397bc7b54728b
Revert "deactivated test for non-existent 'references'"
web/tests/test_views.py
web/tests/test_views.py
from django.test import TestCase, Client from django.urls import reverse from web.views import index, about, compare, reference class TestViews(TestCase): def test_index_view_GET(self): url = reverse('index') response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'index.html') self.assertTemplateUsed(response, 'base.html') def test_about_view_GET(self): url = reverse('about') response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'about.html') self.assertTemplateUsed(response, 'base.html') def test_compare_view_GET(self): url = reverse('compare') + '?concept=data_types&lang1=python&lang2=java' response = self.client.get(url) self.assertEquals(response.status_code, 200) self.assertTemplateUsed(response, 'compare.html') self.assertTemplateUsed(response, 'base.html') def test_reference_view_GET(self): pass # Uncomment these tests when 'reference' section is made # url = reverse('reference') + '?concept=data_types&lang=python' # response = self.client.get(url) # self.assertEquals(response.status_code, 200) # self.assertTemplateUsed(response, 'reference.html') # self.assertTemplateUsed(response, 'base.html')
Python
0
@@ -970,75 +970,8 @@ :%0A%09%09 -pass # Uncomment these tests when 'reference' section is made%0A%0A%09%09# url @@ -1031,18 +1031,16 @@ thon'%0A%09%09 -# response @@ -1066,18 +1066,16 @@ url)%0A%0A%09%09 -# self.ass @@ -1109,26 +1109,24 @@ ode, 200)%0A%09%09 -# self.assertT @@ -1171,10 +1171,8 @@ )%0A%09%09 -# self
a5fddaefdedef18b0b6b7d3b2ec65f64eaaaad65
fix date time bug
clean_db.py
clean_db.py
import MySQLdb, config, urllib, cgi, datetime from datetime import datetime sql = MySQLdb.connect(host="localhost", user=config.username, passwd=config.passwd, db=config.test_db) sql.query("SELECT `id` FROM `feedurls`") db_feed_query=sql.store_result() rss_urls=db_feed_query.fetch_row(0) table_name = "stories" date_from = datetime.strptime(raw_input("start date inc. in form 'dd-mm-yyyy'"),"%d-%m-%Y") date_to = datetime.strptime(raw_input("end date inc. in form 'dd-mm-yyyy'"),"%d-%m-%Y") for rss_url_data in rss_urls: feed_id=rss_url_data[0] i = start_date while i <= end_date: print end_dates
Python
0.000016
@@ -641,18 +641,17 @@ i = -start_date +date_from %0A @@ -663,24 +663,23 @@ le i %3C= -end_ date +_to :%0A @@ -690,16 +690,29 @@ int -end_dates +i.strftime(%22%25d/%25m/%25Y%22) %0A %0A
37fb65dd7763f7cbd1a53f613bbda16d739f11a3
Make `cctrluser create` work
cctrl/auth.py
cctrl/auth.py
# -*- coding: utf-8 -*- """ Copyright 2010 cloudControl UG (haftungsbeschraenkt) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __builtin__ import open, raw_input, range from exceptions import ImportError, ValueError from getpass import getpass import sys import os from cctrl.oshelpers import recode_input try: import json except ImportError: import simplejson as json from cctrl.error import messages, PasswordsDontMatchException from cctrl.settings import TOKEN_FILE_PATH, HOME_PATH def update_tokenfile(api): """ Because it is a real pain we don't want to ask developers for their username and password every time they call a method. Therefore we authenticate users via token for each request and only require email and password for a new token. A token is valid for a given period of time. Each successful API request resets the expiration time. """ if api.check_token(): write_tokenfile(api) return True return False def read_tokenfile(): """ Read the token from the token_file in TOKEN_FILE_PATH specified in cctrl.settings """ token = None if os.path.exists(TOKEN_FILE_PATH): token_file = open(TOKEN_FILE_PATH, "r") try: token = json.load(token_file) except ValueError: token = None token_file.close() return token def write_tokenfile(api): """ This method checks, if the .cloudControl directory inside the users home exists or is a file. If not, we create it and then write the token file. """ if os.path.isdir(HOME_PATH): pass elif os.path.isfile(HOME_PATH): print 'Error: ' + HOME_PATH + ' is a file, not a directory.' sys.exit(1) else: os.mkdir(HOME_PATH) tokenfile = open(TOKEN_FILE_PATH, "w") json.dump(api.get_token(), tokenfile) tokenfile.close() return True def delete_tokenfile(): """ We delete the tokenfile if we don't have a valid token to save. """ if os.path.exists(TOKEN_FILE_PATH): os.remove(TOKEN_FILE_PATH) return True return False def get_email(settings): sys.stderr.write(settings.login_name) sys.stderr.flush() email = raw_input() return email def get_password(create=False): password = None for i in range(3): password = recode_input(getpass('Password: ')) if create: password2 = recode_input(getpass('Password (again): ')) if password != password2: print messages['PasswordsDontMatch'] if i == 2: raise PasswordsDontMatchException() else: break else: break return password def get_credentials(settings, create=False): """ We use this to ask the user for his credentials in case we have no valid token. If create is true, the user is asked twice for the password, to make sure, that no typing error occurred. This is done three times after that a PasswordsDontMatchException is thrown. """ email = get_email(); password = get_password(create) return email, password
Python
0.000003
@@ -3710,18 +3710,25 @@ t_email( +settings ) -; %0A%0A pa
d64460c8bbbe045dcdf9f737562a31d84044acce
Change package name to 'cirm' to avoid confusion.
rest/setup.py
rest/setup.py
# # Copyright 2012 University of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from distutils.core import setup setup(name="cirm-rest", description="cirm web application", version="0.1", package_dir={"": "src"}, packages=["cirmrest"], requires=["web.py", "psycopg2"], classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Topic :: Internet :: WWW/HTTP", "Topic :: Software Development :: Libraries :: Python Modules", ], )
Python
0
@@ -767,20 +767,16 @@ s=%5B%22cirm -rest %22%5D,%0A
917d8e26a64a40de0a0b77085f1fa6d054af0ee8
Remove cleanup_testfn, no longer used.
conftest.py
conftest.py
import os import sys import platform import shutil import pytest collect_ignore = [] if platform.system() != 'Windows': collect_ignore.extend( [ 'distutils/msvc9compiler.py', ] ) @pytest.fixture def save_env(): orig = os.environ.copy() try: yield finally: for key in set(os.environ) - set(orig): del os.environ[key] for key, value in orig.items(): if os.environ.get(key) != value: os.environ[key] = value @pytest.fixture def needs_zlib(): pytest.importorskip('zlib') @pytest.fixture def distutils_logging_silencer(request): from distutils import log self = request.instance self.threshold = log.set_threshold(log.FATAL) # catching warnings # when log will be replaced by logging # we won't need such monkey-patch anymore self._old_log = log.Log._log log.Log._log = self._log self.logs = [] try: yield finally: log.set_threshold(self.threshold) log.Log._log = self._old_log @pytest.fixture def distutils_managed_tempdir(request): from distutils.tests import py38compat as os_helper self = request.instance self.old_cwd = os.getcwd() self.tempdirs = [] try: yield finally: # Restore working dir, for Solaris and derivatives, where rmdir() # on the current directory fails. os.chdir(self.old_cwd) while self.tempdirs: tmpdir = self.tempdirs.pop() os_helper.rmtree(tmpdir) @pytest.fixture def save_argv(): orig = sys.argv[:] try: yield finally: sys.argv[:] = orig @pytest.fixture def save_cwd(): orig = os.getcwd() try: yield finally: os.chdir(orig) @pytest.fixture def threshold_warn(): from distutils.log import set_threshold, WARN orig = set_threshold(WARN) yield set_threshold(orig) @pytest.fixture def pypirc(request, save_env, distutils_managed_tempdir): from distutils.core import PyPIRCCommand from distutils.core import Distribution self = request.instance self.tmp_dir = self.mkdtemp() os.environ['HOME'] = self.tmp_dir os.environ['USERPROFILE'] = self.tmp_dir self.rc = os.path.join(self.tmp_dir, '.pypirc') self.dist = Distribution() class command(PyPIRCCommand): def __init__(self, dist): super().__init__(dist) def initialize_options(self): pass finalize_options = initialize_options self._cmd = command @pytest.fixture def cleanup_testfn(): from distutils.tests import py38compat as os_helper yield path = os_helper.TESTFN if os.path.isfile(path): os.remove(path) elif os.path.isdir(path): shutil.rmtree(path) # from pytest-dev/pytest#363 @pytest.fixture(scope="session") def monkeysession(request): from _pytest.monkeypatch import MonkeyPatch mpatch = MonkeyPatch() yield mpatch mpatch.undo() @pytest.fixture(autouse=True, scope="session") def suppress_path_mangle(monkeysession): """ Disable the path mangling in CCompiler. Workaround for #169. """ from distutils import ccompiler monkeysession.setattr( ccompiler.CCompiler, '_make_relative', staticmethod(lambda x: x) ) @pytest.fixture def temp_home(tmp_path, monkeypatch): var = 'USERPROFILE' if platform.system() == 'Windows' else 'HOME' monkeypatch.setenv(var, str(tmp_path)) return tmp_path
Python
0
@@ -33,22 +33,8 @@ form -%0Aimport shutil %0A%0Aim @@ -2561,254 +2561,8 @@ d%0A%0A%0A -@pytest.fixture%0Adef cleanup_testfn():%0A from distutils.tests import py38compat as os_helper%0A%0A yield%0A path = os_helper.TESTFN%0A if os.path.isfile(path):%0A os.remove(path)%0A elif os.path.isdir(path):%0A shutil.rmtree(path)%0A%0A%0A # fr
1c8446fc05b3246de0f7da58ccb2362fe41ae05e
Throw on errors in promises
model/Parse.py
model/Parse.py
import urllib.request import urllib.parse import json import threading app_id = None rest_key = None def init(parse_app_id, parse_rest_key): global app_id, rest_key app_id = parse_app_id rest_key = parse_rest_key class ParsePromise(threading.Thread): def __init__(self, fun, *args, **kwargs): threading.Thread.__init__(self) self.__fun = fun self.__args = args self.__kwargs = kwargs self.__ret = None self.start() def run(self): self.__ret = self.__fun(*self.__args, **self.__kwargs) def prep(self): self.join() return self.__ret def then(self, fun): return ParsePromise(lambda: fun(self.prep())) class ParseBase(): api_url = "https://api.parse.com/1/classes/" @staticmethod def make_request(url, method, data = None): global app_id, rest_key headers = { "X-Parse-Application-Id": app_id, "X-Parse-REST-API-Key": rest_key, } if data is not None: if method != 'GET': data = json.dumps(data).encode("utf-8") headers["Content-Type"] = "application/json" else: url += "?" + urllib.parse.urlencode(data) data = None req = urllib.request.Request( url, headers = headers, data = data, ) req.get_method = lambda: method ret = urllib.request.urlopen(req) data = ret.read().decode("utf-8") ret.close() return data class ParseQuery(ParseBase): def __init__(self, cls): self.__cls = cls self.__where = {} self.__limit = None self.__skip = None self.__order = [] def __make_prop(self, prop, name, value): if prop in self.__where and type(self.__where[prop]) != dict: raise Exception(prop + " already has an equal to constraint") if prop not in self.__where: self.__where[prop] = {} self.__where[prop][name] = value def equal_to(self, prop, value): self.__where[prop] = value return self def not_equal_to(self, prop, value): self.__make_prop(prop, "$ne", value) return self def greater_than(self, prop, value): self.__make_prop(prop, "$gt", value) return self def greater_than_or_equal_to(self, prop, value): self.__make_prop(prop, "$gte", value) return self def less_than(self, prop, value): self.__make_prop(prop, "$lt", value) return self def less_than_or_equal_to(self, prop, value): self.__make_prop(prop, "$lte", value) return self def matches(self, prop, value): self.__make_prop(prop, "$regex", value) return self def limit(self, value): self.__limit = value return self def skip(self, value): self.__skip = value return self def ascending(self, prop): self.__order.append(prop) return self def descending(self, prop): self.__order.append("-" + prop) return self def gen_find(self): return ParsePromise(self.find) def find(self): data = {} if self.__where: data["where"] = json.dumps(self.__where) if self.__limit is not None: data["limit"] = self.__limit if self.__skip is not None: data["skip"] = self.__skip if self.__order: data["order"] = ",".join(self.__order) ret = self.make_request( self.api_url + self.__cls.__name__, "GET", data = data, ) results = json.loads(ret)["results"] return [self.__cls(**data) for data in results] @staticmethod def or_(*queries): cls = queries[0].__cls for query in queries: if query.__cls != cls: raise Exception( "All classes have to be the same, got {} and {}".format( query.__cls.__name__, cls.__name__, ) ) query = ParseQuery(cls) query.__where["$or"] = [q.__where for q in queries] return query class ParseObj(ParseBase): def __init__(self, properties, values): self.__cls = self.__class__ self.__cls_name = self.__cls.__name__ for prop in properties: if properties[prop].get("type") is None: raise AttributeError( "Property type not specified: {}.{}".format( self.__cls_name, prop, ) ) for prop in values: if prop in properties and values[prop] is not None: # Parse stores both int and float as number type, converting # here prevents type errors when save() is called. t = properties[prop]["type"] setattr(self, prop, t(values[prop])) else: setattr(self, prop, values[prop]) self.__properties = properties self.__base_url = self.api_url + self.__cls_name @classmethod def query(cls): return ParseQuery(cls) def save(self): self.before_save() data = {} for prop in self.__properties: val = getattr(self, prop, None) if val is None: if not self.__properties[prop].get("nullable", False): raise AttributeError(prop + " should not be empty") elif type(val) != self.__properties[prop]["type"]: raise AttributeError( "{}.{} expected type {} but got {} instead".format( self.__cls_name, prop, self.__properties[prop]["type"].__name__, type(val).__name__, ) ) data[prop] = val url = self.__base_url method = "POST" if hasattr(self, "objectId"): # update request url += "/" + getattr(self, "objectId") method = "PUT" ret = self.make_request(url, method, data = data) if not hasattr(self, "objectId"): # create request self.objectId = json.loads(ret)["objectId"] def destroy(self): self.before_destroy() if not hasattr(self, "objectId"): raise Exception("Can not destroy object that has not been saved") self.make_request( self.__base_url + "/" + getattr(self, "objectId"), "DELETE", ) delattr(self, "objectId") @classmethod def get(cls, objectId): cls_name = cls.__name__ url = "{}{}/{}".format(ParseObj.api_url, cls_name, objectId) ret = ParseObj.make_request(url, "get") data = json.loads(ret) return cls(**data) # override in actual class def before_save(self): pass def before_destroy(self): pass
Python
0.000003
@@ -446,32 +446,64 @@ lf.__ret = None%0A + self.__exception = None%0A self.sta @@ -519,32 +519,49 @@ def run(self):%0A + try:%0A self.__r @@ -611,49 +611,207 @@ gs)%0A -%0A def prep(self):%0A self.join()%0A + except Exception as e:%0A self.__exception = e%0A%0A def prep(self):%0A self.join()%0A if self.__exception is not None:%0A raise self.__exception%0A else:%0A
d1137c56b59ef4fec06726fa0dda4854d0631e6d
delete tempfile after uploading screenshot
restclient.py
restclient.py
import json import requests from bs4 import BeautifulSoup from PyQt5.QtCore import * from PyQt5.QtWidgets import * from PyQt5.QtGui import * from ui.Ui_LoginDialog import Ui_LoginDialog def getLoginToken(address, email, password, timeout=15): """ attempt to get a login token. KeyError means invalid username or password""" client = requests.session() soup = BeautifulSoup(client.get(address, timeout=timeout).text, "html.parser") csrf = soup.find('input', { 'name': "csrf_token" })['value'] login_data = json.dumps({ "email": email, "password": password, "csrf_token": csrf }) r = client.post(address, data=login_data, headers={ "content-type": "application/json" }, timeout=timeout).json() ## if there's a login failure here, the server will report back whether the username or password was wrong. ## https://github.com/mattupstate/flask-security/issues/673 return r['response']['user']['authentication_token'] def uploadFile(address, token, path): """ KeyError means the upload failed """ r = requests.post(address, headers={ "Authentication-Token": token }, files={ "image": open(path, "rb") }) return r.json()['url'] class UploadThread(QThread): resultReady = pyqtSignal(str, object) def __init__(self, addr, token, path, parent=None): super(UploadThread, self).__init__(parent) self.addr = addr self.path = path self.token = token def run(self): url, error = None, None try: url = uploadFile(self.addr, self.token, self.path) except Exception as e: error = e self.resultReady.emit(url, error) class LoginThread(QThread): resultReady = pyqtSignal(str, object) def __init__(self, addr, email, password, parent=None): super(LoginThread, self).__init__(parent) self.addr = addr self.email = email self.password = password def run(self): token, error = None, None try: token = getLoginToken(self.addr, self.email, self.password) except Exception as e: error = e self.resultReady.emit(token, error) class LoginDialog(QDialog, Ui_LoginDialog): def __init__(self, parent): super(LoginDialog, self).__init__(parent) self.setupUi(self) self.loginToken = None self.thread = QThread(self) def accept(self): self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False) addr = QSettings(QSettings.IniFormat, QSettings.UserScope, "GliTch_ Is Mad Studios", "PostIt").value("internet/address") self.thread = LoginThread(addr + "/login", self.emailAddressLineEdit.text(), self.passwordLineEdit.text(), self) self.thread.resultReady.connect(self.gotToken) self.thread.start() def reject(self): if self.thread.isRunning(): self.thread.terminate() super().reject() def gotToken(self, token, error): self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(True) if token and not error: self.loginToken = token super().accept() else: msg = '' if isinstance(error, KeyError): msg = "Invalid username or password." else: msg = str(error) QMessageBox.critical(self, "Login Failed", msg)
Python
0.000001
@@ -21,16 +21,26 @@ equests%0A +import os%0A from bs4 @@ -747,23 +747,16 @@ timeout) -.json() %0A%0A ## @@ -938,16 +938,23 @@ return r +.json() %5B'respon @@ -1026,16 +1026,29 @@ en, path +, delete=True ):%0A %22 @@ -1198,16 +1198,55 @@ rb%22) %7D)%0A + if delete:%0A os.unlink(path)%0A retu @@ -1705,24 +1705,16 @@ ror = e%0A - %0A
7484c8d4ab699ee16bc867cdff1e7ec699dbb142
Add profiling support to Melange. By assigning profile_main_as_logs or profile_main_as_html to main variable you can turn on profiling. profile_main_as_logs will log profile data to App Engine console logs, profile_main_as_html will show profile data as html at the bottom of the page. If you want to profile app on deployed app just set the profiling function and deploy it.
app/main.py
app/main.py
#!/usr/bin/python2.5 # # Copyright 2008 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __authors__ = [ # alphabetical order by last name, please '"Augie Fackler" <durin42@gmail.com>', ] import logging import os import sys from google.appengine.ext.webapp import util # Remove the standard version of Django. for k in [k for k in sys.modules if k.startswith('django')]: del sys.modules[k] # Force sys.path to have our own directory first, in case we want to import # from it. This lets us replace the built-in Django sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) sys.path.insert(0, os.path.abspath('django.zip')) ultimate_sys_path = None # Force Django to reload its settings. from django.conf import settings settings._target = None # Must set this env var before importing any part of Django os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import django.core.handlers.wsgi import django.core.signals import django.db # Log errors. def log_exception(*args, **kwds): logging.exception('Exception in request:') # Log all exceptions detected by Django. django.core.signals.got_request_exception.connect(log_exception) # Unregister the rollback event handler. django.core.signals.got_request_exception.disconnect( django.db._rollback_on_exception) def main(): global ultimate_sys_path if ultimate_sys_path is None: ultimate_sys_path = list(sys.path) else: sys.path[:] = ultimate_sys_path # Create a Django application for WSGI. application = django.core.handlers.wsgi.WSGIHandler() # Run the WSGI CGI handler with that application. util.run_wsgi_app(application) if __name__ == '__main__': main()
Python
0
@@ -1813,15 +1813,1320 @@ def -main(): +profile_main_as_html():%0A %22%22%22Main program for profiling. Profiling data added as HTML to the page.%0A %22%22%22%0A import cProfile%0A import pstats%0A import StringIO%0A%0A prof = cProfile.Profile()%0A prof = prof.runctx('real_main()', globals(), locals())%0A stream = StringIO.StringIO()%0A stats = pstats.Stats(prof, stream=stream)%0A # stats.strip_dirs() # Don't; too many modules are named __init__.py.%0A %0A # 'time', 'cumulative' or 'calls'%0A stats.sort_stats('time') %0A %0A # Optional arg: how many to print%0A stats.print_stats() %0A # The rest is optional.%0A # stats.print_callees()%0A # stats.print_callers()%0A print '%5Cn%3Chr%3E'%0A print '%3Ch1%3EProfile data%3C/h1%3E'%0A print '%3Cpre%3E'%0A print stream.getvalue()%5B:1000000%5D%0A print '%3C/pre%3E'%0A%0A%0Adef profile_main_as_logs():%0A %22%22%22Main program for profiling. Profiling data logged.%0A %22%22%22%0A import cProfile%0A import pstats%0A import StringIO%0A %0A prof = cProfile.Profile()%0A prof = prof.runctx(%22real_main()%22, globals(), locals())%0A stream = StringIO.StringIO()%0A stats = pstats.Stats(prof, stream=stream)%0A stats.sort_stats('time') # Or cumulative%0A stats.print_stats(80) # 80 = how many to print%0A # The rest is optional.%0A # stats.print_callees()%0A # stats.print_callers()%0A logging.info(%22Profile data:%5Cn%25s%22, stream.getvalue())%0A%0A%0Adef real_main():%0A %22%22%22Main program without profiling.%0A %22%22%22 %0A g @@ -3450,16 +3450,33 @@ ation)%0A%0A +main = real_main%0A %0Aif __na
04572dc4b4ad8a98ce293a3357e0a35088325e57
Fix spelling error in documentation.
db/apply.py
db/apply.py
#!/usr/bin/python import pprint #from planetlab.types import * from planetlab import session from slices import * from sites import * import sys def usage(): return """ apply.py takes static configurations stored in sites.py and slices.py and applies them to the PLC database adding or updating objects, tags, and other values when appropriate. TODO: Implement common operations: ./apply.yp --syncsite xyz --getbootimages This would setup the basic networking, and download boot images. Subsequent calls should assume these are done already. ./apply.py --syncsite xyz --syncslices all Basic networking is complete, double check ipv6, apply only to nodes at site xyz. Examples: ./apply.py --dryrun .... Only perform Get* api calls. Absolutely no changes are made to the PLC DB. HIGHLY recommended before changes. ./apply.py --syncsite all Does everything. Verifies existing sites & slices, creates sites that are non-existent. This will take a very long time due to the delays for every RPC call to the PLC api. ./apply.py --syncsite nuq01 Creates site, nodes, pcus, and associates slices with these machines. Pulls definitions from sites.py & slices.py ./apply.py --syncsite nuq01 --on mlab4.nuq01.measurement-lab.org Resync the node configuration for given hostname. ./apply.py --syncslice all Associates *all* slices with all machines and updates any pending slice attributes. Sites and slices should be defined in sites.py & slices.py ./apply.py --syncslice ooni_probe --skipwhitelist Like "--syncslice all" except only applied to the given slicename. ./apply.py --syncslice ooni_probe --on mlab4.nuq01.measurement-lab.org Performs the --syncslice operations, but only on the given target machine. This is useful for applying IPv6 address updates (or other slice attributes) to only a few machines, instead of all of them. Some slice attributes may be applied globally, despite "--on <hostname>". In this example, ooni_probe must be explicitly permitted to receive an ipv6 on mlab4.nuq01 in slices.py. Comments: Since an external sites & slices list was necessary while M-Lab was part of PlanetLab to differentiate mlab from non-mlab, it may be possible to eliminate sites.py now. That really only needs to run once and subsequent slice operations could query the DB for a list of current sites or hosts. More intelligent update functions could to re-assign nodes to nodegroups, assign which hosts are in the ipv6 pool, etc. just a thought. Keeping slices.py as a concise description of what and how slices are deployed to M-Lab is probably still helpful to see everything in one place. """ def main(): from optparse import OptionParser parser = OptionParser(usage=usage()) parser.set_defaults(syncsite=None, syncslice=None, ondest=None, skipwhitelist=False, skipsliceips=False, skipinterfaces=False, url=session.API_URL, debug=False, verbose=False, ) parser.add_option("", "--dryrun", dest="debug", action="store_true", help=("Only issues 'Get*' calls to the API. "+ "Commits nothing to the API")) parser.add_option("", "--verbose", dest="verbose", action="store_true", help="Print all the PLC API calls being made.") parser.add_option("", "--url", dest="url", help="PLC url to contact") parser.add_option("", "--on", metavar="hostname", dest="ondest", help="only act on the given host") parser.add_option("", "--syncsite", metavar="site", dest="syncsite", help="only sync sites, nodes, pcus, if needed. (saves time)") parser.add_option("", "--syncslice", metavar="slice", dest="syncslice", help="only sync slices and attributes of slices. (saves time)") parser.add_option("", "--skipwhitelist", dest="skipwhitelist", action="store_true", help=("dont try to white list the given slice. (saves time)")) parser.add_option("", "--skipsliceips", dest="skipsliceips", action="store_true", help="dont try to assign ips to slice. (saves time)") parser.add_option("", "--skipinterfaces", dest="skipinterfaces", action="store_true", help=("dont try to create new Interfaces or update existing "+ "Interfaces. This permits IPv6 maniuplation without "+ "changing legacy IPv4 configuration in DB.") ) (options, args) = parser.parse_args() if len(sys.argv) == 1: usage() parser.print_help() sys.exit(1) print "setup plc session" session.setup_global_session(options.url, options.debug, options.verbose) # always setup the configuration for everything (very fast) print "loading slice & site configuration" for sslice in slice_list: for site in site_list: for host in site['nodes']: h = site['nodes'][host] sslice.add_node_address(h) # begin processing arguments to apply filters, etc if ( options.syncsite is not None or options.syncslice is not None ): if options.syncsite is not None and options.syncslice is None: print "sync site" for site in site_list: # sync everything when syncsite is None, # or only when it matches if (options.syncsite == "all" or options.syncsite == site['name']): print "Syncing: site", site['name'] site.sync(options.ondest, options.skipinterfaces) if options.syncslice and options.syncsite is None: print options.syncslice for sslice in slice_list: if (options.syncslice == "all" or options.syncslice == sslice['name']): print "Syncing: slice", sslice['name'] sslice.sync(options.ondest, options.skipwhitelist, options.skipsliceips) if options.syncslice and options.syncsite: print "sync slices & site" if options.syncslice == "all": site = filter(lambda x: x['name'] == options.syncsite, site_list) #site.sync(options.ondest, options.skipinterfaces) for sslice in slice_list: sslice.sync(options.ondest, options.skipwhitelist, options.skipsliceips) if __name__ == "__main__": try: main() except KeyboardInterrupt: pass
Python
0.000002
@@ -431,18 +431,18 @@ ./apply. -y p +y --syncs
3da4ad7b8c58f2b94e038576bcd79794529b9f4f
fix unit test, order dataframes
_unittests/ut_pandas_helper/test_table_formula.py
_unittests/ut_pandas_helper/test_table_formula.py
""" @brief test log(time=2s) """ import sys import os import unittest import pandas from pandas.testing import assert_frame_equal try: import src except ImportError: path = os.path.normpath( os.path.abspath( os.path.join( os.path.split(__file__)[0], "..", ".."))) if path not in sys.path: sys.path.append(path) import src try: import pyquickhelper as skip_ except ImportError: path = os.path.normpath( os.path.abspath( os.path.join( os.path.split(__file__)[0], "..", "..", "..", "pyquickhelper", "src"))) if path not in sys.path: sys.path.append(path) import pyquickhelper as skip_ from pyquickhelper.loghelper import fLOG from src.pyensae.pandas_helper import TableFormula class TestTableFormula(unittest.TestCase): def test_TableFormula_groupby(self): fLOG(__file__, self._testMethodName, OutputPrint=__name__ == "__main__") table = TableFormula(data=dict(name="kA kA kB".split(), d_a=[1, 2, 3], d_b=[1.1, 2.1, 3.1], d_c=[3, 4, 5])) group = table.fgroupby(lambda v: v["name"], [lambda v: v["d_a"], lambda v: v["d_b"]], ["sum_d_a", "sum_d_b"]) exp = pandas.DataFrame(dict(sum_d_a=[3, 3], sum_d_b=[3.2, 3.1])) assert_frame_equal(group, exp) groupmax = table.fgroupby(lambda v: v["name"], [lambda v: v["d_a"], lambda v: v["d_b"]], ["max_d_a", "max_d_b"], [max, max]) exp = pandas.DataFrame(dict(max_d_a=[2, 3], max_d_b=[2.1, 3.1])) assert_frame_equal(groupmax, exp) group = table.fgroupby(lambda v: v["name"], [lambda v: v["d_a"]], ["sum_d_a"], [lambda vec, w: sum(vec) / w], lambda v: v["d_b"]) exp = pandas.DataFrame(dict(sum_d_a=[0.84127, 1.47619])) assert_frame_equal(group, exp) def test_TableFormula_add(self): fLOG(__file__, self._testMethodName, OutputPrint=__name__ == "__main__") table = TableFormula() table["A"] = [0, 1] table.add_column_index([4, 5]) table.add_column_vector("B", [6, 7]) table.addc("C", lambda row: row["A"] * row["B"]) exp = pandas.DataFrame(dict(sum_d_a=[0.84127, 1.47619])) exp = pandas.DataFrame( dict(A=[0, 1], B=[6, 7], C=[0, 7], __key__=[4, 5])) exp.set_index("__key__", inplace=True) assert_frame_equal(table, exp) def test_TableFormula_sort(self): fLOG(__file__, self._testMethodName, OutputPrint=__name__ == "__main__") table = TableFormula() table["A"] = [0, 1] table.add_column_vector("B", [6, 7]) table.sort(lambda row: -row["B"]) exp = pandas.DataFrame(dict(A=[1, 0], B=[7, 6], C=[1, 0])) exp = exp.set_index("C") exp.index.rename(None, inplace=True) assert_frame_equal(table, exp, check_index_type=False) if __name__ == "__main__": unittest.main()
Python
0.000001
@@ -1483,32 +1483,108 @@ _b=%5B3.2, 3.1%5D))%0A + exp.sort_index(inplace=True)%0A group.sort_index(inplace=True)%0A assert_f @@ -1602,24 +1602,24 @@ group, exp)%0A - %0A gro @@ -1907,32 +1907,111 @@ _b=%5B2.1, 3.1%5D))%0A + exp.sort_index(inplace=True)%0A groupmax.sort_index(inplace=True)%0A assert_f @@ -2295,24 +2295,24 @@ : v%5B%22d_b%22%5D)%0A - exp @@ -2356,32 +2356,108 @@ 127, 1.47619%5D))%0A + exp.sort_index(inplace=True)%0A group.sort_index(inplace=True)%0A assert_f
da3d6e89f660e7fd6d9b8d32a9aab50b7234e737
Fix comment
elizabeth/utils.py
elizabeth/utils.py
import functools import json from os.path import ( abspath, dirname, join ) from elizabeth.exceptions import UnsupportedLocale __all__ = ['pull'] PATH = abspath(join(dirname(__file__), 'data')) SUPPORTED_LOCALES = { "da": { "name": "Danish", "name_local": "Dansk" }, "de": { "name": "German", "name_local": "Deutsch" }, "en": { "name": "English", "name_local": "English" }, "en-gb": { "name": "British English", "name_local": "British English" }, "es": { "name": "Spanish", "name_local": "Español" }, "fa": { "name": "Farsi", "name_local": "فارسی" }, "fi": { "name": "Finnish", "name_local": "Suomi" }, "fr": { "name": "French", "name_local": "Français" }, 'hu': { 'name': 'Hungarian', 'name_local': 'Magyar' }, 'is': { 'name': 'Icelandic', 'name_local': 'Íslenska' }, "it": { "name": "Italian", "name_local": "Italiano" }, 'nl': { 'name': 'Dutch', 'name_local': 'Nederlands' }, "no": { "name": "Norwegian", "name_local": "Norsk" }, "pl": { 'name': "Polish", 'name_local': "Polski" }, "pt": { "name": "Portuguese", "name_local": "Português" }, "pt-br": { "name": "Brazilian Portuguese", "name_local": "Português Brasileiro" }, "ru": { "name": "Russian", "name_local": "Русский" }, "sv": { "name": "Swedish", "name_local": "Svenska" } } def luhn_checksum(num): """ Calculate a checksum for num using the Luhn algorithm. See: https://en.wikipedia.org/wiki/Luhn_algorithm :param num: The number to calculate a checksum for as a string :type num: str :returns: checksum for number :rtype: str :Example: >>> from elizabeth.utils import luhn_checksum >>> luhn_checksum("7992739871") 3 """ check = 0 for i, s in enumerate(reversed([x for x in num])): sx = int(s) sx = sx * 2 if i % 2 == 0 else sx sx = sx - 9 if sx > 9 else sx check += sx return str(check * 9 % 10) @functools.lru_cache(maxsize=None) def pull(file, locale='en'): """ Open file and get content from file. Memorize result using lru_cache. pull - is internal function, please do not use this function outside the module 'elizabeth'. +------------------------------+--------------+ | Locale Code | Folder | +==============================+==============+ | da - Danish | (data/da) | +------------------------------+--------------+ | de - German | (data/de) | +------------------------------+--------------+ | en - English | (data/en) | +------------------------------+--------------+ | en-gb - British English | (data/en-gb) | +------------------------------+--------------+ | ru - Russian | (data/ru) | +------------------------------+--------------+ | fa - Farsi | (data/fa) | +------------------------------+--------------+ | fi - Finnish | (data/fi) | +------------------------------+--------------+ | fr - French | (data/fr) | +------------------------------+--------------+ | es - Spanish | (data/es) | +------------------------------+--------------+ | hu - Hungarian | (data/hu) | +------------------------------+--------------+ | it - Italian | (data/it) | +------------------------------+--------------+ | is - Icelandic | (data/is) | +------------------------------+--------------+ | pl - Polish | (data/pl) | +------------------------------+--------------+ | pt - Portuguese | (data/pt) | +------------------------------+--------------+ | nl - Dutch | (data/nl) | +------------------------------+--------------+ | no - Norwegian | (data/no) | +------------------------------+--------------+ | pt-br - Brazilian Portuguese | (data/pt-br) | +------------------------------+--------------+ | sv - Swedish | (data/sv) | +------------------------------+--------------+ :param file: The name of file. :param locale: Locale. :returns: The content of the file. """ locale = locale.lower() if locale not in SUPPORTED_LOCALES: raise UnsupportedLocale("Locale %s does not supported" % locale) # Needs explicit encoding for Windows with open(join(PATH + '/' + locale, file), 'r', encoding='utf8') as f: data = json.load(f) return data
Python
0
@@ -1771,16 +1771,17 @@ orithm.%0A +%0A See:
fc05512b3ad40f6571ee3d942e4829a19e2a465e
Add core.models.Sensor
sensor/core/models.py
sensor/core/models.py
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.db import models class GenericSensor(models.Model): """Represents a sensor abstracting away the specifics of what it measures. A sensor measures one kind of thing. A physical device might have multiple logical sensors. """ name = models.CharField(max_length=256) model = models.CharField(max_length=128) class Meta: unique_together = [('name', 'model')]
Python
0.000002
@@ -602,8 +602,185 @@ model')%5D +%0A%0A%0Aclass Sensor(models.Model):%0A %22%22%22Base class for specific sensor types.%22%22%22%0A%0A generic_sensor = models.OneToOneField(GenericSensor)%0A%0A class Meta:%0A abstract = True
13054b372375cd1a0abf5cf2be1eff722fdb7213
Remove useless commented code
sentry/conf/server.py
sentry/conf/server.py
""" sentry.conf.server ~~~~~~~~~~~~~~~~~~ These settings act as the default (base) settings for the Sentry-provided web-server :copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from django.conf.global_settings import * import hashlib import os import os.path import socket import sys import urlparse DEBUG = False TEMPLATE_DEBUG = True ADMINS = () INTERNAL_IPS = ('127.0.0.1',) MANAGERS = ADMINS APPEND_SLASH = True PROJECT_ROOT = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) sys.path.insert(0, os.path.abspath(os.path.join(PROJECT_ROOT, '..'))) CACHE_BACKEND = 'locmem:///' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'sentry.db', 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } if 'DATABASE_URL' in os.environ: url = urlparse.urlparse(os.environ['DATABASE_URL']) # Ensure default database exists. DATABASES['default'] = DATABASES.get('default', {}) # Update with environment configuration. DATABASES['default'].update({ 'NAME': url.path[1:], 'USER': url.username, 'PASSWORD': url.password, 'HOST': url.hostname, 'PORT': url.port, }) if url.scheme == 'postgres': DATABASES['default']['ENGINE'] = 'django.db.backends.postgresql_psycopg2' if url.scheme == 'mysql': DATABASES['default']['ENGINE'] = 'django.db.backends.mysql' # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'UTC' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale USE_L10N = True USE_TZ = False # Make this unique, and don't share it with anybody. SECRET_KEY = hashlib.md5(socket.gethostname() + ')*)&8a36)6%74e@-ne5(-!8a(vv#tkv)(eyg&@0=zd^pl!7=y@').hexdigest() # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'sentry.middleware.SentryMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) ROOT_URLCONF = 'sentry.conf.urls' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. os.path.join(PROJECT_ROOT, 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.admin', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'crispy_forms', 'djcelery', 'kombu.transport.django', 'raven.contrib.django', 'sentry', 'sentry.plugins.sentry_mail', 'sentry.plugins.sentry_servers', 'sentry.plugins.sentry_sites', 'sentry.plugins.sentry_urls', 'sentry.plugins.sentry_user_emails', 'sentry.plugins.sentry_useragents', 'south', ) ADMIN_MEDIA_PREFIX = '/_admin_media/' # Queue configuration BROKER_URL = "django://" CELERY_IGNORE_RESULT = True CELERY_SEND_EVENTS = False CELERY_RESULT_BACKEND = None CELERY_TASK_RESULT_EXPIRES = 1 # Sentry and Raven configuration SENTRY_PUBLIC = False SENTRY_PROJECT = 1 SENTRY_CACHE_BACKEND = 'default' EMAIL_SUBJECT_PREFIX = '[Sentry] ' # Configure logging from raven.conf import setup_logging from raven.contrib.django.handlers import SentryHandler import logging # Configure root logger logger = logging.getLogger() logger.setLevel(logging.INFO) handler = logging.StreamHandler() handler.setLevel(logging.INFO) logger.addHandler(handler) # Disable django.request as it's generally useless logger = logging.getLogger('django.request') logger.propagate = False logger.addHandler(handler) # Configure default sentry logging sentry_handler = SentryHandler() sentry_handler.setLevel(logging.ERROR) setup_logging(sentry_handler) # Configure celery import djcelery djcelery.setup_loader()
Python
0
@@ -2747,53 +2747,8 @@ r',%0A -# 'django.template.loaders.eggs.Loader',%0A )%0A%0AM
d9d9b993edc8baebf69b446d40f0a05260a041d5
Remove prints
emailauth/tests.py
emailauth/tests.py
from django.test import Client, TestCase from emailauth import forms c = Client() class FormTests(TestCase): def test_creation_form(self): form_data = {'email': 'test@test.com', 'password1': 'test1234', 'password2': 'test1234'} form = forms.UserCreationForm(form_data) # Testing if form is valid, and that the fields are working. self.assertTrue(form.is_valid()) def test_form_save(self): form_data = {'email': 'test@test.com', 'password1': 'test1234', 'password2': 'test1234'} form = forms.UserCreationForm(form_data) # Testing if form is valid, and that the fields are working. self.assertTrue(form.is_valid()) user = form.save() # Testing if save function is returning properly self.assertEqual(str(user), 'test@test.com') def test_not_identically_passwords(self): form_data = {'email': 'test@test.com', 'password1': '1234test', 'password2': 'test1234'} form = forms.UserCreationForm(form_data) # Testing if form is invalid when passwords are not matching. self.assertFalse(form.is_valid()) def test_register_by_post(self): # Testing register trough post-request get_response = c.get('/register/') print(get_response.status_code) post_response_wrong = c.post('/register/', { 'username': 'testuser@test.com', 'password1': 'test1234', 'password2': 'test1234', }) print(post_response_wrong.status_code) post_response = c.post('/register/', { 'email': 'test@test.com', 'password1': 'testPass1234', 'password2': 'testPass1234', }) print(post_response.status_code) self.assertEqual(get_response.status_code, 200) self.assertNotEqual(post_response_wrong.status_code, 302) self.assertEqual(post_response.status_code, 302)
Python
0.000002
@@ -1261,48 +1261,8 @@ /')%0A - print(get_response.status_code)%0A @@ -1343,21 +1343,12 @@ test -u +U ser -@test.com ',%0A @@ -1435,55 +1435,8 @@ %7D)%0A - print(post_response_wrong.status_code)%0A @@ -1590,32 +1590,32 @@ 'testPass1234',%0A + %7D)%0A @@ -1613,49 +1613,8 @@ %7D)%0A - print(post_response.status_code)%0A
88ee9a485534f442978f9b29a1dbcf15cd245b25
simplify fragment copy loop
distance/filter/base.py
distance/filter/base.py
"""Base classes for filters.""" from distance.base import Transform, TransformError from distance.level import Level from distance.levelobjects import Group from distance import levelfragments as levelfrags ANIM_FRAG_TYPES = ( levelfrags.AnimatorFragment, levelfrags.EventListenerFragment, levelfrags.TrackAttachmentFragment, ) ANIM_FRAG_SECTIONS = {cls.base_section.to_key(any_version=True) for cls in ANIM_FRAG_TYPES} def create_replacement_group(orig, objs, animated_only=False): copied_frags = [] org_secs = [s.to_key(any_version=True) for s in orig.sections] for sec in ANIM_FRAG_SECTIONS: try: idx = org_secs.index(sec) except ValueError: pass else: copyfrag = orig.fragments[idx] copied_frags.append(copyfrag.clone()) if animated_only and not copied_frags: return objs pos, rot, scale = orig.transform group = Group(children=objs) group.recenter(pos) group.rerotate(rot) group.rescale(scale) group.fragments = list(group.fragments) + copied_frags return group, class ObjectFilter(object): @classmethod def add_args(cls, parser): parser.add_argument(":maxrecurse", type=int, default=-1, help="Set recursion limit, -1 for infinite (the default).") def __init__(self, args): self.maxrecurse = args.maxrecurse def filter_object(self, obj): return obj, def filter_group(self, grp, levels, **kw): orig_empty = not grp.children grp.children = self.filter_objects(grp.children, levels, **kw) if not orig_empty and not grp.children: # remove empty group return () return grp, def filter_any_object(self, obj, levels, **kw): if obj.is_object_group: if levels == 0: return obj, return self.filter_group(obj, levels - 1, **kw) else: return self.filter_object(obj, **kw) def filter_objects(self, objects, levels, **kw): res = [] for obj in objects: res.extend(self.filter_any_object(obj, levels, **kw)) return res def apply_level(self, level, **kw): for layer in level.layers: layer.objects = self.filter_objects(layer.objects, self.maxrecurse, **kw) def apply_group(self, grp, **kw): # not using filter_group, because we never want to remove the root # group object grp.children = self.filter_objects(grp.children, self.maxrecurse, **kw) def apply(self, content, **kw): if isinstance(content, Level): self.apply_level(content, **kw) elif isinstance(content, Group): self.apply_group(content, **kw) else: raise TypeError(f'Unknown object type: {type(content).__name__!r}') def post_filter(self, content): return True def print_summary(self, p): pass class DoNotApply(Exception): def __init__(self, reason=None, *args, **kw): super().__init__(*args, **kw) self.reason = reason class ObjectMapper(object): def __init__(self, pos=(0, 0, 0), rot=(0, 0, 0, 1), scale=(1, 1, 1)): self.transform = Transform.fill(pos=pos, rot=rot, scale=scale) def _apply_transform(self, transform, global_transform=Transform.fill()): try: res = transform.apply(*self.transform) except TransformError: raise DoNotApply('locked_scale') try: # raises TransformError if we are inside groups with # incompatible scale global_transform.apply(*res) except TransformError: raise DoNotApply('locked_scale_group') return res def apply(self, obj, global_transform=Transform.fill(), **kw): transform = self._apply_transform(obj.transform, global_transform=global_transform) return self.create_result(obj, transform, **kw) def create_result(self, old, transform): raise NotImplementedError # vim:set sw=4 ts=8 sts=4 et sr ft=python fdm=marker tw=0:
Python
0.000063
@@ -546,213 +546,109 @@ +f or -g_secs = %5Bs.to_key(any_version=True) for s in orig.sections%5D%0A for sec in ANIM_FRAG_SECTIONS:%0A try:%0A idx = org_secs.index(sec)%0A except ValueError:%0A pass%0A else + i, sec in enumerate(orig.sections):%0A if sec.to_key(any_version=True) in ANIM_FRAG_SECTIONS :%0A @@ -688,10 +688,8 @@ ts%5Bi -dx %5D%0A
c060f88310e979cdcb400cead73730cc7e1b6226
Update comments in models/group.py (changed Optional to Required). Add verbose_name to founder property in Group model.
app/soc/models/group.py
app/soc/models/group.py
#!/usr/bin/python2.5 # # Copyright 2008 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains the Group Model.""" __authors__ = [ '"Todd Larsen" <tlarsen@google.com>', '"Pawel Solyga" <pawel.solyga@gmail.com>', ] import polymodel from google.appengine.ext import db from django.utils.translation import ugettext_lazy from soc.models import countries import soc.models.user class Group(polymodel.PolyModel): """Common data fields for all groups. """ #: Required field storing name of the group. name = db.StringProperty(required=True, verbose_name=ugettext_lazy('Name')) name.help_text = ugettext_lazy('Complete, formal name of the group.') #: Required field storing link_name used in URLs to identify group. #: Lower ASCII characters only. link_name = db.StringProperty(required=True, verbose_name=ugettext_lazy('Link name')) link_name.help_text = ugettext_lazy( 'Field used in URLs to identify group. ' 'Lower ASCII characters only.') #: Required field storing short name of the group. #: It can be used for displaying group as sidebar menu item. short_name = db.StringProperty(required=True, verbose_name=ugettext_lazy('Short name')) short_name.help_text = ugettext_lazy('Short name used for sidebar menu') #: Required many:1 relationship indicating the founding User of the #: Group (this relationship is needed to keep track of lifetime group #: creation limits, used to prevent spamming, etc.). founder = db.ReferenceProperty(reference_class=soc.models.user.User, required=True, collection_name="groups") #: Optional field storing a home page URL of the group. home_page = db.LinkProperty(required=True, verbose_name=ugettext_lazy('Home Page URL')) #: Optional email address used as the "public" contact mechanism for #: the Group (as opposed to the founder.id email address which is kept #: secret, revealed only to Developers). email = db.EmailProperty(required=True, verbose_name=ugettext_lazy('Email')) #: Optional field storing description of the group. description = db.TextProperty(required=True, verbose_name=ugettext_lazy('Description')) #: Optional field containing a group street address. #: Group street address can only be lower ASCII, not UTF-8 text, #: because, if supplied, it is used as a shipping address. street = db.StringProperty(required=True, verbose_name=ugettext_lazy('Street address')) street.help_text = ugettext_lazy( 'street number and name, lower ASCII characters only') #: Optional field containing group address city. #: City can only be lower ASCII, not UTF-8 text, because, if #: supplied, it is used as a shipping address. city = db.StringProperty(required=True, verbose_name=ugettext_lazy('City')) city.help_text = ugettext_lazy('lower ASCII characters only') #: Optional field containing group address state or province. #: Group state/province can only be lower ASCII, not UTF-8 #: text, because, if supplied, it is used as a shipping address. state = db.StringProperty(required=True, verbose_name=ugettext_lazy('State/Province')) state.help_text = ugettext_lazy( 'optional if country/territory does not have states or provinces, ' 'lower ASCII characters only') #: Optional field containing address country or territory of the group. country = db.StringProperty(required=True, verbose_name=ugettext_lazy('Country/Territory'), choices=countries.COUNTRIES_AND_TERRITORIES) #: Optional field containing address postal code of the group (ZIP code in #: the United States). Postal code can only be lower ASCII, not UTF-8 #: text, because, if supplied, it is used as a shipping address. postalcode = db.StringProperty(required=True, verbose_name=ugettext_lazy('ZIP/Postal Code')) postalcode.help_text=ugettext_lazy('lower ASCII characters only') #: Optional contact phone number that will be, amongst other uses, #: supplied to shippers along with the shipping address; kept private. phone = db.PhoneNumberProperty(required=True, verbose_name=ugettext_lazy('Phone Number')) phone.help_text = ugettext_lazy( 'include complete international calling number with country code')
Python
0
@@ -2163,25 +2163,98 @@ ups%22 -) %0A #: Optional +,%0A verbose_name=ugettext_lazy('Created by'))%0A #: Required fie @@ -2398,24 +2398,24 @@ %0A #: -Optional +Required email a @@ -2671,32 +2671,32 @@ ) %0A %0A #: -Optional +Required field stori @@ -2828,32 +2828,32 @@ %0A #: -Optional +Required field conta @@ -3206,32 +3206,32 @@ nly')%0A%0A #: -Optional +Required field conta @@ -3518,32 +3518,32 @@ nly')%0A%0A #: -Optional +Required field conta @@ -3952,32 +3952,32 @@ nly')%0A%0A #: -Optional +Required field conta @@ -4182,24 +4182,24 @@ )%0A%0A #: -Optional +Required field c @@ -4573,16 +4573,16 @@ #: -Optional +Required con
4c0325f92f542b9af7e504be55b7c7d79d1af3c8
Update some features
compiler.py
compiler.py
# -*- coding: utf-8 -*- # This file is part of the pymfony package. # # (c) Alexandre Quercia <alquerci@email.com> # # For the full copyright and license information, please view the LICENSE # file that was distributed with this source code. """ """ from __future__ import absolute_import; from pymfony.component.system import ( Object, interface, ); from pymfony.component.dependency.exception import InvalidArgumentException; @interface class CompilerPassInterface(Object): """Interface that must be implemented by compilation passes """ def process(self, container): """You can modify the container here before it is dumped to PHP code. @param container: ContainerBuilder """ pass; class PassConfig(Object): """Compiler Pass Configuration This class has a default configuration embedded. """ TYPE_BEFORE_OPTIMIZATION = 'BeforeOptimization'; def __init__(self): self.__mergePass = None; self.__beforeOptimizationPasses = list(); def getPasses(self): """Returns all passes in order to be processed. @return: list An list of all passes to process """ passes = list(); if self.__mergePass: passes.append(self.__mergePass); passes.extend(self.__beforeOptimizationPasses); return passes; def addPass(self, cPass, cType=TYPE_BEFORE_OPTIMIZATION): """Adds a pass. @param cPass: CompilerPassInterface A Compiler pass @param cType: string The pass type @raise InvalidArgumentException: when a pass type doesn't exist """ assert isinstance(cPass, CompilerPassInterface); propertyName = "get{0}Passes".format(cType); if not hasattr(self, propertyName): raise InvalidArgumentException( 'Invalid type "{0}".'.format(cType) ); getattr(self, propertyName)().append(cPass); def getMergePass(self): """Gets the Merge Pass. @return: CompilerPassInterface A merge pass """ return self.__mergePass; def setMergePass(self, mergePass): """Sets the Merge Pass. @param mergePass: CompilerPassInterface A merge pass """ assert isinstance(mergePass, CompilerPassInterface); self.__mergePass = mergePass; def getBeforeOptimizationPasses(self): """ @return: list """ return self.__beforeOptimizationPasses; class Compiler(Object): """This class is used to remove circular dependencies between individual passes. """ def __init__(self): """Constructor. """ self.__passConfig = PassConfig(); def getPassConfig(self): """Returns the PassConfig. @return: PassConfig The PassConfig instance """ return self.__passConfig; def addPass(self, cPass, cType=PassConfig.TYPE_BEFORE_OPTIMIZATION): """Adds a pass to the PassConfig. @param cPass: CompilerPassInterface A compiler pass @param cType: string The type of the pass """ assert isinstance(cPass, CompilerPassInterface); self.__passConfig.addPass(cPass, cType); def compile(self, container): """Run the Compiler and process all Passes. @param container: ContainerBuilder """ for cPass in self.__passConfig.getPasses(): cPass.process(container);
Python
0
@@ -912,16 +912,58 @@ zation'; +%0A TYPE_AFTER_REMOVING = 'AfterRemoving' %0A%0A de @@ -1063,16 +1063,61 @@ list(); +%0A self.__afterRemovingPasses = list(); %0A%0A de @@ -1414,16 +1414,67 @@ Passes); +%0A passes.extend(self.__afterRemovingPasses); %0A%0A @@ -1832,17 +1832,20 @@ -p +getP ropertyN @@ -1875,24 +1875,80 @@ rmat(cType); +%0A setPropertyName = %22set%7B0%7DPasses%22.format(cType); %0A%0A if @@ -1958,33 +1958,36 @@ t hasattr(self, -p +getP ropertyName):%0A @@ -2099,24 +2099,33 @@ );%0A%0A + passes = getattr(sel @@ -2127,17 +2127,20 @@ r(self, -p +getP ropertyN @@ -2149,20 +2149,84 @@ e)() -.append(cPas +;%0A passes.append(cPass);%0A getattr(self, setPropertyName)(passe s);%0A @@ -2765,24 +2765,449 @@ ionPasses;%0A%0A + def setBeforeOptimizationPasses(self, passes):%0A %22%22%22%0A @param passes: list%0A %22%22%22%0A self.__beforeOptimizationPasses = passes;%0A%0A def getAfterRemovingPasses(self):%0A %22%22%22%0A @return: list%0A %22%22%22%0A return self.__afterRemovingPasses;%0A%0A def setAfterRemovingPasses(self, passes):%0A %22%22%22%0A @param passes: list%0A %22%22%22%0A self.__afterRemovingPasses = passes;%0A %0Aclass Compi
a5b9b12f3e16e3cf2bb1de6f61571f612981c468
Use comma separated list of tags for tags --merge
v6/plugins/tags/tags.py
v6/plugins/tags/tags.py
# -*- coding: utf-8 -*- # Copyright © 2012-2013 Roberto Alsina and others. # Permission is hereby granted, free of charge, to any # person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the # Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the # Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice # shall be included in all copies or substantial portions of # the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import unicode_literals, print_function import codecs from textwrap import dedent from nikola.nikola import Nikola from nikola.plugin_categories import Command from nikola.utils import LOGGER def format_doc_string(function): text = dedent(' ' * 4 + function.__doc__.strip()) return '\n'.join([line for line in text.splitlines() if line.strip()]) + '\n' def list_tags(site, sorting='alpha'): """ Lists all the tags used in the site. The tags are sorted alphabetically, by default. Sorting can be one of 'alpha' or 'count'. """ tags = site.posts_per_tag if sorting == 'count': tags = sorted(tags, key=lambda tag: len(tags[tag]), reverse=True) else: tags = sorted(site.posts_per_tag.keys()) for tag in tags: if sorting == 'count': show = '{:>4} {}'.format(len(site.posts_per_tag[tag]), tag) else: show = tag print(show) return tags def merge_tags(site, tags, filenames, test_mode=False): """ Merges a list of tags, replacing everything with the last tag. Requires a list of file names to be passed as arguments. $ nikola tags --merge "foo bar baz useless" posts/*.rst The above command will replace foo, bar, and baz with 'useless' in all rst posts. """ if len(tags) < 2: print("ERROR: Need atleast two tags to merge.") else: # fixme: currently doesn't handle two post files. posts = [ post for post in site.timeline if post.source_path in filenames and not post.is_two_file ] FMT = 'Tags for {0}:\n{1:>6} - {2}\n{3:>6} - {4}\n' OLD = 'old' NEW = 'new' for post in posts: new_tags = _clean_tags(post.alltags[:], set(tags[:-1]), tags[-1]) if test_mode: print(FMT.format( post.source_path, OLD, post.alltags, NEW, new_tags) ) else: _replace_tags_line(post, new_tags) return new_tags def _clean_tags(tags, remove, keep): """ In all tags list, replace tags in remove with keep tag. """ original_tags = tags[:] for index, tag in enumerate(original_tags): if tag in remove: tags.remove(tag) if len(original_tags) != len(tags) and keep not in tags: tags.append(keep) return tags def _replace_tags_line(post, tags): with codecs.open(post.source_path) as f: post_text = f.readlines() for index, line in enumerate(post_text[:]): if line.startswith('.. tags:'): post_text[index] = '.. tags: %s\n' % ', '.join(tags) break with codecs.open(post.source_path, 'wb+') as f: post_text = f.writelines(post_text) class CommandTags(Command): """ Manage tags on the site. This plugin is inspired by `jtags <https://github.com/ttscoff/jtag>`_. """ name = "tags" doc_usage = "[options]" doc_purpose = "manages the tags of your site" cmd_options = [ { 'name': 'list', 'long': 'list', 'short': 'l', 'default': False, 'type': bool, 'help': format_doc_string(list_tags) }, { 'name': 'list_sorting', 'short': 's', 'type': str, 'default': 'alpha', 'help': 'Changes sorting of list; can be one of alpha or count.\n' }, { 'name': 'merge', 'long': 'merge', 'type': lambda args: args.split(), 'default': '', 'help': format_doc_string(merge_tags) }, { 'name': 'test', 'short': 't', 'type': bool, 'default': False, 'help': 'Run other commands in test mode. Does not edit any files.\n' }, ] def _execute(self, options, args): """Manage the tags on the site.""" try: import conf except ImportError: LOGGER.error("No configuration found, cannot run the console.") else: nikola = Nikola(**conf.__dict__) nikola.scan_posts() if len(options['merge']) > 1 and len(args) > 0: merge_tags(nikola, options['merge'], args, options['test']) elif options['list']: list_tags(nikola, options['list_sorting']) else: print(self.help())
Python
0.000001
@@ -2181,16 +2181,32 @@ list of +comma-separated tags, re @@ -2217,18 +2217,12 @@ ing -everything +them wit @@ -2338,17 +2338,17 @@ %22foo - bar baz +,bar,baz, usel @@ -2461,24 +2461,72 @@ .%0A%0A %22%22%22%0A%0A + tags = _process_comma_separated_tags(tags)%0A%0A if len(t @@ -3586,24 +3586,125 @@ eturn tags%0A%0A +def _process_comma_separated_tags(tags):%0A return %5Btag.strip() for tag in tags.strip().split(',')%5D%0A %0Adef _replac @@ -4859,33 +4859,11 @@ e': -lambda args: args.split() +str ,%0A
265e9added53d1eee1291b9e0b5a10bc7dfe19c8
Make sure we don't have section A before doing the extra round of manipulation
myuw_mobile/test/dao/canvas.py
myuw_mobile/test/dao/canvas.py
from django.test import TestCase from django.test.client import RequestFactory from myuw_mobile.dao.canvas import get_indexed_data_for_regid from myuw_mobile.dao.canvas import get_indexed_by_decrosslisted from myuw_mobile.dao.schedule import _get_schedule from myuw_mobile.dao.term import get_current_quarter class TestCanvas(TestCase): def test_crosslinks(self): with self.settings( RESTCLIENTS_SWS_DAO_CLASS='restclients.dao_implementation.sws.File'): data = get_indexed_data_for_regid("12345678901234567890123456789012") physics = data['2013,spring,PHYS,121/A'] self.assertEquals(physics.course_url, 'https://canvas.uw.edu/courses/149650') train = data['2013,spring,TRAIN,100/B'] self.assertEquals(train.course_url, 'https://canvas.uw.edu/courses/249650') def test_crosslinks_lookup(self): with self.settings( RESTCLIENTS_SWS_DAO_CLASS='restclients.dao_implementation.sws.File'): data = get_indexed_data_for_regid("12345678901234567890123456789012") now_request = RequestFactory().get("/") now_request.session = {} term = get_current_quarter(now_request) schedule = _get_schedule("12345678901234567890123456789012", term) canvas_data_by_course_id = get_indexed_by_decrosslisted(data, schedule.sections) physics = data['2013,spring,PHYS,121/A'] self.assertEquals(physics.course_url, 'https://canvas.uw.edu/courses/149650') train = data['2013,spring,TRAIN,100/A'] self.assertEquals(train.course_url, 'https://canvas.uw.edu/courses/249650')
Python
0.000001
@@ -691,32 +691,139 @@ urses/149650')%0A%0A + has_section_a = '2013,spring,TRAIN,100/A' in data%0A self.assertFalse(has_section_a)%0A%0A trai
ae948a2dfdd62af2ba98a0ee506ddd48504ee64b
bump version to 0.6-dev
validictory/__init__.py
validictory/__init__.py
#!/usr/bin/env python from validictory.validator import SchemaValidator __all__ = [ 'validate', 'SchemaValidator' ] __version__ = '0.5.0' def validate(data, schema, validator_cls=SchemaValidator): ''' Validates a parsed json document against the provided schema. If an error is found a ValueError is raised. ``data`` is a python dictionary object of parsed json data. ``schema`` is a python dictionary object representing the schema. If ``validator_cls`` is provided that class will be used to validate the given ``schema`` against the given ``data``. The given class should be a subclass of the SchemaValidator class. ''' v = validator_cls() return v.validate(data,schema) if __name__ == '__main__': import sys import json if len(sys.argv) == 2: if sys.argv[1] == "--help": raise SystemExit("%s SCHEMAFILE [INFILE]" % (sys.argv[0],)) schemafile = open(sys.argv[1], 'rb') infile = sys.stdin elif len(sys.argv) == 3: schemafile = open(sys.argv[1], 'rb') infile = open(sys.argv[2], 'rb') else: raise SystemExit("%s SCHEMAFILE [INFILE]" % (sys.argv[0],)) try: obj = json.load(infile) schema = json.load(schemafile) validate(obj, schema) except ValueError, e: raise SystemExit(e)
Python
0
@@ -132,11 +132,15 @@ '0. -5 +6 .0 +-dev '%0A%0Ad
295a6dd0c2af01161ee5da274719596f043fe21c
Use encode('utf8') instead of str(...).
applyCrf.py
applyCrf.py
#!/usr/bin/env python """This program will read a JSON file (such as adjudicated_modeled_live_eyehair_100_03.json) and process it with CRF++. The labels assigned by CRF++ are printed.""" import argparse import sys import scrapings import crf_features as crff import CRFPP def main(argv=None): '''this is called if run from command line''' parser = argparse.ArgumentParser() parser.add_argument('-d','--debug', help="Optional give debugging feedback.", required=False, action='store_true') parser.add_argument('-f','--featlist', help="Required input file with features to be extracted, one feature entry per line.", required=True) parser.add_argument('-i','--input', help="Required input file with Web scraping sentences in JSON format.", required=True) parser.add_argument('-m','--model', help="Required input model file.", required=True) args=parser.parse_args() # Read the Web scrapings: s = scrapings.Scrapings(args.input) if args.debug: print "sencence count=%d" % s.sentenceCount() # Create a CrfFeatures object. This classs provides a lot of services, but we'll use only a subset. c = crff.CrfFeatures(args.featlist) # Create a CRF++ processor. tagger = CRFPP.Tagger("-m " + args.model) for sidx in range(0, s.sentenceCount()): tokens = s.getAllTokens(sidx) if args.debug: print "len(tokens)=%d" % len(tokens) fc = c.featurizeSentence(tokens) if args.debug: print "len(fc)=%d" % len(fc) tagger.clear() for idx, token in enumerate(tokens): features = fc[idx] if args.debug: print "token#%d (%s) has %d features" % (idx, token, len(features)) tf = token + ' ' + ' '.join(features) tagger.add(str(tf)) tagger.parse() # tagger.size() returns the number of tokens that were added. # tagger.xsize() returns the number of features plus 1 (for the token). if args.debug: print "size=%d" % tagger.size() print "xsize=%d" % tagger.xsize() print "ysize=%d" % tagger.ysize() print "dsize=%d" % tagger.dsize() print "vlevel=%d" % tagger.vlevel() print "nbest=%d" % tagger.nbest() ntokens = tagger.size() if ntokens != len(tokens): print "received %d tokens , expected %d" % (ntokens, len(tokens)) nfeatures = tagger.xsize() for tokenIdx in range(0, tagger.size()): if args.debug: for featureIdx in range (0, nfeatures): print "x(%d, %d)=%s" % (tokenIdx, featureIdx, tagger.x(tokenIdx, featureIdx)) # tagger.x(tokenIdx, 0) is the original token # tagger.yname(tagger.y(tokenIdx)) is the label assigned to that token. print "%s %s" % (tagger.x(tokenIdx, 0), tagger.yname(tagger.y(tokenIdx))) # call main() if this is run as standalone if __name__ == "__main__": sys.exit(main())
Python
0.000001
@@ -1805,14 +1805,24 @@ add( -str(tf +tf.encode('utf8' ))%0A
e7c142f6b4cbb4add64586c30297f0c3f983de88
update field tooltip
addons/website_sale/models/res_config_settings.py
addons/website_sale/models/res_config_settings.py
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from ast import literal_eval from odoo import api, models, fields class ResConfigSettings(models.TransientModel): _inherit = 'res.config.settings' def _default_order_mail_template(self): if self.env['ir.module.module'].search([('name', '=', 'website_quote')]).state in ('installed', 'to upgrade'): return self.env.ref('website_quote.confirmation_mail').id else: return self.env.ref('sale.email_template_edi_sale').id def _default_recovery_mail_template(self): try: return self.env.ref('website_sale.mail_template_sale_cart_recovery').id except ValueError: return False salesperson_id = fields.Many2one('res.users', related='website_id.salesperson_id', string='Salesperson') salesteam_id = fields.Many2one('crm.team', related='website_id.salesteam_id', string='Sales Channel', domain=[('team_type', '!=', 'pos')]) module_website_sale_delivery = fields.Boolean("eCommerce Shipping Costs") # field used to have a nice radio in form view, resuming the 2 fields above sale_delivery_settings = fields.Selection([ ('none', 'No shipping management on website'), ('internal', "Delivery methods are only used internally: the customer doesn't pay for shipping costs"), ('website', "Delivery methods are selectable on the website: the customer pays for shipping costs"), ], string="Shipping Management") group_website_multiimage = fields.Boolean(string='Multi-Images', implied_group='website_sale.group_website_multi_image', group='base.group_portal,base.group_user,base.group_public') group_delivery_invoice_address = fields.Boolean(string="Shipping Address", implied_group='sale.group_delivery_invoice_address') module_website_sale_options = fields.Boolean("Optional Products") module_website_sale_digital = fields.Boolean("Digital Content") module_website_sale_wishlist = fields.Boolean("Wishlists") module_website_sale_comparison = fields.Boolean("Product Comparison Tool") module_website_sale_stock = fields.Boolean("Inventory", help='Installs *e-Commerce Inventory*') module_account_invoicing = fields.Boolean("Invoicing") order_mail_template = fields.Many2one('mail.template', string='Order Confirmation Email', default=_default_order_mail_template, domain="[('model', '=', 'sale.order')]", help="Email sent to customer at the end of the checkout process") automatic_invoice = fields.Boolean("Automatic Invoice") module_l10n_eu_service = fields.Boolean(string="EU Digital Goods VAT") cart_recovery_mail_template = fields.Many2one('mail.template', string='Cart Recovery Email', default=_default_recovery_mail_template, config_parameter='website_sale.cart_recovery_mail_template_id', domain="[('model', '=', 'sale.order')]") cart_abandoned_delay = fields.Float("Abandoned Delay", help="number of hours after which the cart is considered abandoned", default=1.0, config_parameter='website_sale.cart_abandoned_delay') @api.model def get_values(self): res = super(ResConfigSettings, self).get_values() params = self.env['ir.config_parameter'].sudo() sale_delivery_settings = 'none' if self.env['ir.module.module'].search([('name', '=', 'delivery')], limit=1).state in ('installed', 'to install', 'to upgrade'): sale_delivery_settings = 'internal' if self.env['ir.module.module'].search([('name', '=', 'website_sale_delivery')], limit=1).state in ('installed', 'to install', 'to upgrade'): sale_delivery_settings = 'website' res.update( automatic_invoice=params.get_param('website_sale.automatic_invoice', default=False), sale_delivery_settings=sale_delivery_settings, ) return res def set_values(self): super(ResConfigSettings, self).set_values() value = self.module_account_invoicing and self.default_invoice_policy == 'order' and self.automatic_invoice self.env['ir.config_parameter'].sudo().set_param('website_sale.automatic_invoice', value) @api.onchange('sale_delivery_settings') def _onchange_sale_delivery_settings(self): if self.sale_delivery_settings == 'none': self.update({ 'module_delivery': False, 'module_website_sale_delivery': False, }) elif self.sale_delivery_settings == 'internal': self.update({ 'module_delivery': True, 'module_website_sale_delivery': False, }) else: self.update({ 'module_delivery': True, 'module_website_sale_delivery': True, }) @api.onchange('group_discount_per_so_line') def _onchange_group_discount_per_so_line(self): if self.group_discount_per_so_line: self.update({ 'multi_sales_price': True, })
Python
0
@@ -2213,30 +2213,54 @@ lls -*e-Commerce Inventory* +the %22Website Delivery Information%22 application ')%0A%0A
1c2c9664d82d02ec2eb9929fe5370aa5e4b077b1
simplify logic - is_act_as_username returns False, not just logs
djactasauth/backends.py
djactasauth/backends.py
# -*- coding: utf-8 -*- import logging import django from django.contrib.auth.backends import ModelBackend from django.contrib import auth _authenticate_needs_request_arg = django.VERSION[:2] >= (1, 11) log = logging.getLogger(__name__) class FilteredModelBackend(ModelBackend): def get_user(self, user_id): user = super(FilteredModelBackend, self).get_user(user_id) return self.filter_user(user) if _authenticate_needs_request_arg: def authenticate(self, request, username=None, password=None, **kwargs): return self._authenticate( request=request, username=username, password=password, **kwargs) else: def authenticate(self, username=None, password=None, **kwargs): return self._authenticate( username=username, password=password, **kwargs) def _authenticate(self, **kwargs): user = super(FilteredModelBackend, self).authenticate(**kwargs) return self.filter_user(user) def filter_user(self, user): if not user: return user filters = getattr(self, 'filter_kwargs', None) if filters: qs = type(user)._default_manager.filter( pk=user.pk).filter(**filters) if not qs.exists(): return None return user class ActAsBackend(object): sepchar = '/' too_many_sepchar_msg = 'Username holds more than one separation char "{}"'\ '.'.format(sepchar) @classmethod def is_act_as_username(cls, username): if not username: return False if username.count(ActAsBackend.sepchar) > 1: log.warn(cls.too_many_sepchar_msg) return cls.sepchar in username if _authenticate_needs_request_arg: def authenticate(self, request, username=None, password=None, **kwargs): return self._authenticate( request=request, username=username, password=password, **kwargs) else: def authenticate(self, username=None, password=None, **kwargs): return self._authenticate( username=username, password=password, **kwargs) def _authenticate(self, username=None, password=None, **kwargs): self.fail_unless_one_aaa_backend_is_configured() assert password is not None if not self.is_act_as_username(username): return None try: auth_username, act_as_username = username.split(self.sepchar) except ValueError: return None backends = [b for b in auth.get_backends() if not isinstance(b, ActAsBackend)] for backend in backends: auth_user = backend.authenticate( username=auth_username, password=password, **kwargs) if auth_user: return self.get_act_as_user( auth_user=auth_user, act_as_username=act_as_username) def fail_unless_one_aaa_backend_is_configured(self): aaa_backends = list( type(backend) for backend in auth.get_backends() if isinstance(backend, ActAsBackend)) if len(aaa_backends) != 1: raise ValueError( 'There should be exactly one AAA backend configured, ' 'but there were {}'.format(aaa_backends)) def get_act_as_user(self, auth_user, act_as_username): if auth_user.username != act_as_username: UserModel = auth.get_user_model() try: user = self._get_user_manager().get_by_natural_key( act_as_username) except UserModel.DoesNotExist: return None if not self.can_act_as(auth_user=auth_user, user=user): return None else: user = auth_user return user def _get_user_manager(self): UserModel = auth.get_user_model() return UserModel._default_manager def can_act_as(self, auth_user, user): return False def get_user(self, user_id): return self._get_user_manager().get(pk=user_id) class OnlySuperuserCanActAsBackend(ActAsBackend): def can_act_as(self, auth_user, user): return auth_user.is_superuser and not user.is_superuser
Python
0.998867
@@ -1741,16 +1741,41 @@ ar_msg)%0A + return False%0A @@ -2510,33 +2510,16 @@ rn None%0A - try:%0A @@ -2584,59 +2584,8 @@ ar)%0A - except ValueError:%0A return None%0A
0cd3651810daceefa492bc303c74568d1a042ca6
Fix get_proxy_ticket method usage
django_cas_ng/models.py
django_cas_ng/models.py
# ⁻*- coding: utf-8 -*- from django.db import models from django.conf import settings from .utils import (get_cas_client, get_service_url, get_user_from_session) from importlib import import_module from cas import CASError SessionStore = import_module(settings.SESSION_ENGINE).SessionStore class ProxyError(ValueError): pass class ProxyGrantingTicket(models.Model): class Meta: unique_together = ('session_key', 'user') session_key = models.CharField(max_length=255, blank=True, null=True) user = models.ForeignKey( settings.AUTH_USER_MODEL, related_name="+", null=True, blank=True ) pgtiou = models.CharField(max_length=255, null=True, blank=True) pgt = models.CharField(max_length=255, null=True, blank=True) date = models.DateTimeField(auto_now_add=True) @classmethod def clean_deleted_sessions(cls): for pgt in cls.objects.all(): session = SessionStore(session_key=pgt.session_key) user = get_user_from_session(session) if not user.is_authenticated(): pgt.delete() @classmethod def retrieve_pt(cls, request, service): """`request` should be the current HttpRequest object `service` a string representing the service for witch we want to retrieve a ticket. The function return a Proxy Ticket or raise `ProxyError` """ try: pgt = cls.objects.get(user=request.user, session_key=request.session.session_key).pgt except cls.DoesNotExist: raise ProxyError( "INVALID_TICKET", "No proxy ticket found for this HttpRequest object" ) else: service_url = get_service_url(request) client = get_cas_client(service_url=service_url) try: return client.get_proxy_ticket(pgt, service) # change CASError to ProxyError nicely except CASError as error: raise ProxyError(*error.args) # juste embed other errors except Exception as e: raise ProxyError(e) class SessionTicket(models.Model): session_key = models.CharField(max_length=255) ticket = models.CharField(max_length=255) @classmethod def clean_deleted_sessions(cls): for st in cls.objects.all(): session = SessionStore(session_key=st.session_key) user = get_user_from_session(session) if not user.is_authenticated(): st.delete()
Python
0.000004
@@ -118,25 +118,8 @@ ent, - get_service_url, get @@ -1702,59 +1702,8 @@ se:%0A - service_url = get_service_url(request)%0A @@ -1753,20 +1753,16 @@ =service -_url )%0A @@ -1822,25 +1822,16 @@ cket(pgt -, service )%0A
db033a9560ee97b5281adbf05f3f452943d592d7
Add test_get_on_call and test_weekly
django_on_call/tests.py
django_on_call/tests.py
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase class SimpleTest(TestCase): def test_basic_addition(self): """ Tests that 1 + 1 always equals 2. """ self.assertEqual(1 + 1, 2)
Python
0
@@ -1,186 +1,19 @@ -%22%22%22%0AThis file demonstrates writing tests using the unittest module. These will pass%0Awhen you run %22manage.py test%22.%0A%0AReplace this with more appropriate tests for your application.%0A%22%22%22 +import datetime %0A%0Afr @@ -44,16 +44,44 @@ stCase%0A%0A +from .models import OnCall%0A%0A %0Aclass S @@ -118,127 +118,873 @@ est_ -basic_addition(self):%0A %22%22%22%0A Tests that 1 + 1 always equals 2.%0A %22%22%22%0A self.assertEqual(1 + 1, 2 +get_on_call(self):%0A %22%22%22Test the basic OnCall.get_on_call functionality%0A %22%22%22%0A on_call = OnCall(slug='test', rule='on_call = %22Alice%22')%0A self.assertEqual(on_call.get_on_call(), 'Alice')%0A%0A def test_weekly(self):%0A %22%22%22Test a week-on round robin%0A %22%22%22%0A on_call = OnCall(slug='test', rule='%5Cn'.join(%5B%0A 'handlers = %5B%22Alice%22, %22Bob%22, %22Charlie%22%5D',%0A 'week = int(now.strftime(%22%25W%22))',%0A 'on_call = handlers%5Bweek %25 len(handlers)%5D',%0A %5D))%0A for now, expected in %5B%0A (datetime.datetime(2013, 1, 1), 'Alice'),%0A (datetime.datetime(2013, 1, 8), 'Bob'),%0A (datetime.datetime(2013, 1, 15), 'Charlie'),%0A (datetime.datetime(2013, 1, 22), 'Alice'),%0A %5D:%0A self.assertEqual(on_call.get_on_call(now=now), expected )%0A
781e20bc3f465bdaac50f0f2a637b037d892c054
Remove premature optimisation
src/registry.py
src/registry.py
from .formatters import * class FormatRegistry(): def __init__(self): self.__registered_formatters = [ ClangFormat(), ElmFormat(), GoFormat(), JavaScriptFormat(), PythonFormat(), RustFormat(), TerraformFormat() ] self.__source_formatter_lookup_table = {} for formatter in self.__registered_formatters: self.__source_formatter_lookup_table[formatter.source] = formatter @property def all(self): return self.__registered_formatters @property def enabled(self): return [x for x in self.all if x.format_on_save] def find(self, predicate, default=None): return next((x for x in self.all if predicate(x)), default) def by_view(self, view): source = view.scope_name(0).split(' ')[0] return self.__source_formatter_lookup_table.get(source) def by_name(self, name): return self.find(lambda x: x.name == name)
Python
0.00005
@@ -80,35 +80,24 @@ self.__ -registered_ formatters = @@ -244,192 +244,8 @@ %5D -%0A self.__source_formatter_lookup_table = %7B%7D%0A for formatter in self.__registered_formatters:%0A self.__source_formatter_lookup_table%5Bformatter.source%5D = formatter %0A%0A @@ -301,19 +301,8 @@ f.__ -registered_ form @@ -621,44 +621,35 @@ elf. -__source_formatter_lookup_table.get( +find(lambda x: x.source == sour
945e7d1ef165054891a0ac574d52f6a1c3b7a162
Add long help
code_gen.py
code_gen.py
import sys import getopt from config import CONFIG from ida_code_gen import IdaCodeGen from ida_parser import IdaInfoParser def print_help(): print 'Options:' print ' -d, --database Path to database from arguments. Default = ' + CONFIG['database'] print ' -o, --out_dir Path to output directory for code generation. Default = ' + CONFIG['out_dir'] print ' -v, --verbose Verbose mode programm. Default = ' + str(CONFIG['verbose']) print 'Example:' print ' python code_gen.py -v --database C:/ida_info.sqlite3 --out_dir C:/code_gen/' pass def main(argv): try: opts, args = getopt.getopt(argv, 'hvdo', ['verbose', 'database=', 'out_dir=']) except getopt.GetoptError: print_help() sys.exit(2) for opt, arg in opts: if opt == '-h': print_help() sys.exit() if opt in ('-v', '--verbose'): CONFIG['verbose'] = True continue if opt in ('-d', '--database'): CONFIG['database'] = arg continue if opt in ('-o', '--out_dir'): CONFIG['out_dir'] = arg continue if CONFIG['verbose']: print 'database: ' + CONFIG['database'] print 'out_dir: ' + CONFIG['out_dir'] print 'verbose: ' + str(CONFIG['verbose']) parser = IdaInfoParser(CONFIG['database']) parser.start() code_gen = IdaCodeGen(CONFIG['database'], CONFIG['out_dir']) code_gen.start() if __name__ == '__main__': main(sys.argv[1:])
Python
0.000121
@@ -647,16 +647,25 @@ vdo', %5B' +help', '' verbose' @@ -813,12 +813,24 @@ == +( '-h' +, '--help') :%0A
2ad94140360f893ad46b1b972e753f2a78b5f779
print function
example/example.py
example/example.py
# coding: utf-8 import json import os import lastpass with open(os.path.join(os.path.dirname(__file__), 'credentials.json')) as f: credentials = json.load(f) username = str(credentials['username']) password = str(credentials['password']) try: # First try without a multifactor password vault = lastpass.Vault.open_remote(username, password) except lastpass.LastPassIncorrectGoogleAuthenticatorCodeError as e: # Get the code multifactor_password = input('Enter Google Authenticator code:') # And now retry with the code vault = lastpass.Vault.open_remote(username, password, multifactor_password) except lastpass.LastPassIncorrectYubikeyPasswordError as e: # Get the code multifactor_password = input('Enter Yubikey password:') # And now retry with the code vault = lastpass.Vault.open_remote(username, password, multifactor_password) for index, i in enumerate(vault.accounts): print index+1, i.id, i.name, i.username, i.password, i.url, i.group
Python
0.00093
@@ -941,15 +941,47 @@ rint - +(%22%7B%7D %7B%7D %7B%7D %7B%7D %7B%7D %7B%7D %7B%7D%22.format( index -+ + + 1, i @@ -1031,9 +1031,11 @@ i.group +)) %0A
cefa0a94582e40f92c48d6c91cf393c9b0310713
fix geojson in sources dir
validate.py
validate.py
import json import re import click import jsonschema import utils @click.command() @click.argument('schema', type=click.File('r'), required=True) @click.argument('jsonfiles', type=click.Path(exists=True), required=True) def validate(schema, jsonfiles): """Validate a JSON files against a JSON schema. \b SCHEMA: JSON schema to validate against. Required. JSONFILE: JSON files to validate. Required. """ schema = json.loads(schema.read()) for path in utils.get_files(jsonfiles): regex = r'(sources|generated)/[A-Z]{2}/[A-Z]{2}/[a-z-]+.(geo)?json' if not re.compile(regex).match(path): raise AssertionError('Source path does not match spec for ' + path) with open(path) as f: jsonfile = json.loads(f.read()) jsonschema.validate(jsonfile, schema) if __name__ == '__main__': validate()
Python
0.000003
@@ -502,24 +502,67 @@ jsonfiles):%0A + if path.startswith('sources'):%0A rege @@ -571,27 +571,122 @@ = r' -( sources -%7Cgenerated) +/%5BA-Z%5D%7B2%7D/%5BA-Z%5D%7B2%7D/%5Ba-z-%5D+.json'%0A elif path.startswith('generated'):%0A regex = r'generated /%5BA- @@ -712,19 +712,54 @@ -%5D+. -( geo -)? json' +%0A else:%0A regex = r'' %0A%0A @@ -840,16 +840,9 @@ or(' -Source p +P ath
6d35c533940db6a6d664546c2b97e5c12c92dcfe
remove yaml parser for bandap GMM
example/src/yml.py
example/src/yml.py
# -*- coding: utf-8 -*- from __future__ import division, print_function, absolute_import import os import yaml class SpeakerYML(object): def __init__(self, ymlf): # open yml file with open(ymlf) as yf: conf = yaml.safe_load(yf) # read parameter from yml file self.wav_fs = conf['wav']['fs'] self.wav_bit = conf['wav']['bit'] self.wav_framems = conf['wav']['framems'] self.wav_shiftms = conf['wav']['shiftms'] self.wav_fftl = conf['wav']['fftl'] self.f0_minf0 = conf['f0']['minf0'] self.f0_maxf0 = conf['f0']['maxf0'] assert self.f0_minf0 < self.f0_maxf0, \ "should be minf0 < maxf0 in yml file" self.mcep_dim = conf['mcep']['dim'] self.mcep_alpha = conf['mcep']['alpha'] self.power_dim = conf['power']['threshold'] self.analyzer = conf['analyzer'] def print_params(self): pass class PairYML(object): def __init__(self, ymlf): # open yml file with open(ymlf) as yf: conf = yaml.safe_load(yf) self.jnt_n_iter = conf['jnt']['n_iter'] self.GMM_mcep_n_mix = conf['GMM']['mcep']['n_mix'] self.GMM_mcep_n_iter = conf['GMM']['mcep']['n_iter'] self.GMM_mcep_covtype = conf['GMM']['mcep']['covtype'] self.GMM_mcep_cvtype = conf['GMM']['mcep']['cvtype'] self.GMM_bandap_n_mix = conf['GMM']['bandap']['n_mix'] self.GMM_bandap_n_iter = conf['GMM']['bandap']['n_iter'] self.GMM_bandap_covtype = conf['GMM']['bandap']['covtype'] self.GMM_bandap_cvtype = conf['GMM']['bandap']['cvtype'] def _read_training_list(self): if not os.path.exists(self.trlist): raise('training file list does not exists.') # read training list self.trfiles = [] with open(self.trlist, 'r') as f: for line in f: self.trfiles.append(line.rstrip().split(" ")) def _read_evaluation_list(self): if not os.path.exists(self.evlist): raise('evaluation file list does not exists.') self.evfiles = [] with open(self.evlist, 'r') as f: for line in f: self.evfiles.append(line.rstrip()) def print_params(self): pass
Python
0.000005
@@ -1389,269 +1389,8 @@ '%5D%0A%0A - self.GMM_bandap_n_mix = conf%5B'GMM'%5D%5B'bandap'%5D%5B'n_mix'%5D%0A self.GMM_bandap_n_iter = conf%5B'GMM'%5D%5B'bandap'%5D%5B'n_iter'%5D%0A self.GMM_bandap_covtype = conf%5B'GMM'%5D%5B'bandap'%5D%5B'covtype'%5D%0A self.GMM_bandap_cvtype = conf%5B'GMM'%5D%5B'bandap'%5D%5B'cvtype'%5D%0A%0A
7e16a9feb88023a03363aee5be552a2f15b825fc
修复 waiting 状态下颜色错误的问题
utils/templatetags/submission.py
utils/templatetags/submission.py
# coding=utf-8 def translate_result(value): results = { 0: "Accepted", 1: "Runtime Error", 2: "Time Limit Exceeded", 3: "Memory Limit Exceeded", 4: "Compile Error", 5: "Format Error", 6: "Wrong Answer", 7: "System Error", 8: "Waiting" } return results[value] def translate_id(submission_item): return submission_item["_id"] def translate_language(value): return {1: "C", 2: "C++", 3: "Java"}[value] def translate_result_class(value): if value == 0: return "success" elif value == "8": return "info" return "danger" from django import template register = template.Library() register.filter("translate_result", translate_result) register.filter("translate_id", translate_id) register.filter("translate_language", translate_language) register.filter("translate_result_class", translate_result_class)
Python
0.000007
@@ -593,11 +593,9 @@ == -%228%22 +8 :%0A
d17a88ac9ef8e3806c7ac60d31df62a1041939cb
Add sum_of_spreads
muv/spatial.py
muv/spatial.py
""" Spatial statistics. """ __author__ = "Steven Kearnes" __copyright__ = "Copyright 2014, Stanford University" __license__ = "3-clause BSD" import numpy as np def spread(d, t): """ Calculate the spread between two sets of compounds. Given a matrix containing distances between two sets of compounds, A and B, calculate the fraction of compounds in set A that are closer than t to any compound in set B. Parameters ---------- d : ndarray Distance matrix with compounds from set A on first axis. t : float Distance threshold. """ p = np.mean(np.any(d < t, axis=1)) return p
Python
0.998996
@@ -588,17 +588,17 @@ %22%22%22%0A -p +s = np.me @@ -634,10 +634,906 @@ return -p +s%0A%0A%0Adef sum_of_spreads(d, coeff, min_t=0, max_t=3, step=None):%0A %22%22%22%0A Calculate the sum of spreads across a range of distance thresholds.%0A%0A Parameters%0A ----------%0A d : ndarray%0A Distance matrix with compounds from set A on first axis.%0A coeff : float%0A Coefficient used to rescale distance thresholds.%0A min_t : float, optional (default 0)%0A Minimum distance threshold (before rescaling).%0A max_t : float, optional (default 3)%0A Maximum distance threshold (before rescaling).%0A step : float, optional%0A Step size for determining values to sample between min_t and max_t.%0A If not provided, defaults to max_t / 500.%0A %22%22%22%0A if step is None:%0A step = max_t / 500.%0A n_steps = int((max_t - min_t) / step)%0A thresholds = coeff * np.linspace(min_t, max_t, n_steps)%0A ss = np.sum(%5Bspread(d, t) for t in thresholds%5D)%0A return ss %0A
e05736cd36bc595070dda78e91bcb1b4bcfd983c
Remove deprecated usage of `reflect` constructor param
microcosm_postgres/operations.py
microcosm_postgres/operations.py
""" Common database operations. """ from sqlalchemy import MetaData from sqlalchemy.exc import ProgrammingError from microcosm_postgres.migrate import main from microcosm_postgres.models import Model def stamp_head(graph): """ Stamp the database with the current head revision. """ main(graph, "stamp", "head") def get_current_head(graph): """ Get the current database head revision, if any. """ session = new_session(graph) try: result = session.execute("SELECT version_num FROM alembic_version") except ProgrammingError: return None else: return result.scalar() finally: session.close() def create_all(graph): """ Create all database tables. """ head = get_current_head(graph) if head is None: Model.metadata.create_all(graph.postgres) stamp_head(graph) def drop_all(graph): """ Drop all database tables. """ Model.metadata.drop_all(graph.postgres) drop_alembic_table(graph) def drop_alembic_table(graph): """ Drop the alembic version table. """ try: graph.postgres.execute("DROP TABLE alembic_version;") except ProgrammingError: return False else: return True # Cached database metadata instance _metadata = None def recreate_all(graph): """ Drop and add back all database tables, or reset all data associated with a database. Intended mainly for testing, where a test database may either need to be re-initialized or cleared out between tests """ global _metadata if _metadata is None: # First-run, the test database/metadata needs to be initialized drop_all(graph) create_all(graph) _metadata = MetaData(bind=graph.postgres, reflect=True) return # Otherwise, truncate all existing tables connection = graph.postgres.connect() transaction = connection.begin() for table in reversed(_metadata.sorted_tables): connection.execute(table.delete()) transaction.commit() def new_session(graph, expire_on_commit=False): """ Create a new session. """ return graph.sessionmaker(expire_on_commit=expire_on_commit)
Python
0
@@ -1794,22 +1794,36 @@ gres -, +)%0A _metadata. reflect -=True +( )%0A
92d1b23208912ca4c8a440468caf7bc595704d8c
Add test for the PeriodOverlapFilterBackend
django/website/logframe/tests/test_api.py
django/website/logframe/tests/test_api.py
from datetime import date, timedelta from inspect import isfunction from django.contrib.auth.models import Permission from django.db.models.query_utils import Q from django.test.client import RequestFactory from django_dynamic_fixture import G import mock import pytest from rest_framework.request import Request from contacts.models import User from contacts.group_permissions import GroupPermissions from ..api import CanEditOrReadOnly, IDFilterBackend, get_period_filter @pytest.mark.django_db def test_default_user_can_read_data(): gp = GroupPermissions() gp.setup_groups_and_permissions() u1 = G(User) request = mock.Mock(method="GET", user=u1) perm_obj = CanEditOrReadOnly() assert perm_obj.has_object_permission(request, None, None) is True @pytest.mark.django_db def test_default_user_can_not_change_data(): gp = GroupPermissions() gp.setup_groups_and_permissions() u1 = G(User) request = mock.Mock(method="POST", user=u1) perm_obj = CanEditOrReadOnly() assert perm_obj.has_object_permission(request, None, None) is False @pytest.mark.django_db def test_editor_can_change_data(): gp = GroupPermissions() gp.setup_groups_and_permissions() u1 = G(User) edit_perm = Permission.objects.get(codename='edit_logframe') u1.user_permissions.add(edit_perm) request = mock.Mock(method="POST", user=u1) perm_obj = CanEditOrReadOnly() assert perm_obj.has_object_permission(request, None, None) is True def test_id_filter_backend_filter_queryset_filters_on_ids(): request = RequestFactory().get('/?id=1&id=2&id=3') request = Request(request) id_filter_backend = IDFilterBackend() mock_queryset = mock.Mock(filter=mock.Mock()) id_filter_backend.filter_queryset(request, mock_queryset, None) mock_queryset.filter.assert_called_with(id__in=[1, 2, 3]) def test_get_period_filter_returns_function(): yesterday = date.today() - timedelta(days=1) today = date.today() ret_val = get_period_filter(yesterday, today, 'start_date', 'end_date') assert isfunction(ret_val) def test_get_period_filter_function_filters_queryset(): start_date = date.today() - timedelta(days=1) end_date = date.today() rel_1 = Q(**{'start_date__gte': start_date}) & Q(**{'start_date__lte': end_date}) rel_2 = Q(**{'start_date__lte': start_date}) & Q(**{'end_date__gte': start_date}) rel_3 = Q(**{'start_date__lte': end_date}) & Q(**{'end_date': None}) rel_4 = Q(**{'end_date__gte': start_date}) & Q(**{'start_date': None}) rel_5 = Q(**{'start_date': None}) & Q(**{'end_date': None}) expected_query = rel_1 | rel_2 | rel_3 | rel_4 | rel_5 mock_queryset = mock.Mock(filter=mock.Mock()) filter_func = get_period_filter(start_date, end_date, 'start_date', 'end_date') filter_func(mock_queryset) actual_query = mock_queryset.filter.call_args[0][0] assert unicode(expected_query) == unicode(actual_query)
Python
0
@@ -416,16 +416,22 @@ import +(%0A CanEditO @@ -440,16 +440,20 @@ eadOnly, +%0A IDFilte @@ -461,16 +461,52 @@ Backend, +%0A PeriodOverlapFilterBackend,%0A get_per @@ -516,16 +516,18 @@ _filter%0A +)%0A %0A%0A@pytes @@ -1955,25 +1955,26 @@ ():%0A -yesterday +start_date = date. @@ -2005,21 +2005,24 @@ =1)%0A -today +end_date = date. @@ -2065,24 +2065,28 @@ ter( -yesterday, today +start_date, end_date , 's @@ -2945,16 +2945,16 @@ %5B0%5D%5B0%5D%0A%0A - asse @@ -3005,8 +3005,754 @@ _query)%0A +%0A%0A@mock.patch('logframe.api.get_period_filter')%0Adef test_period_overlap_filter_backend_filter_queryset_filters_queryset(get_period_filter_func):%0A request = RequestFactory().get('/?start_date=20151105&end_date=20151104')%0A request = Request(request)%0A%0A get_period_filter_func.return_value = mock.Mock()%0A%0A mock_queryset = mock.Mock(filter=mock.Mock())%0A%0A filter_backend = PeriodOverlapFilterBackend()%0A filter_backend.filter_queryset(%0A request,%0A mock_queryset,%0A mock.Mock(lookup_period_start='start_date', lookup_period_end='end_date')%0A )%0A%0A get_period_filter_func.assert_called_with('20151105', '20151104', 'start_date', 'end_date')%0A get_period_filter_func.return_value.assert_called_with(mock_queryset)%0A
78c5ef063a82d707b30eed4a6e02fcbc8976f4df
move sort code to the end, so initial result will be sorted too.
django_project/feti/views/landing_page.py
django_project/feti/views/landing_page.py
# coding=utf-8 """FETI landing page view.""" __author__ = 'Christian Christelis <christian@kartoza.com>' __date__ = '04/2015' __license__ = "GPL" __copyright__ = 'kartoza.com' from collections import OrderedDict from haystack.query import SearchQuerySet from django.shortcuts import render from django.http import HttpResponse from django.template import RequestContext from feti.models.campus import Campus from feti.models.course import Course def landing_page(request): """Serves the FETI landing page. :param request: A django request object. :type request: request :returns: Returns the landing page. :rtype: HttpResponse """ # sort the campus alphabetically def campus_key(item): return item[0].long_description search_terms = '' course_dict = OrderedDict() errors = None if request.GET: search_terms = request.GET.get('search_terms') if search_terms: campuses = SearchQuerySet().filter(content=search_terms).models( Campus) courses = SearchQuerySet().filter(content=search_terms).models( Course) for campus in [c.object for c in campuses]: if campus.incomplete: continue course_dict[campus] = campus.courses.all() for course in [c.object for c in courses]: for campus in course.campus_set.all(): if campus in course_dict: if course not in course_dict[campus]: course_dict[campus].append(course) else: course_dict[campus] = [course] course_dict = OrderedDict( sorted(course_dict.items(), key=campus_key)) else: campuses = Campus.objects.filter(_complete=True).order_by( '_long_description') for campus in campuses: course_dict[campus] = campus.courses.all() else: campuses = Campus.objects.filter(_complete=True).order_by( '_long_description') for campus in campuses: course_dict[campus] = campus.courses.all() context = { 'course_dict': course_dict, 'search_terms': search_terms, 'errors': errors } return render( request, 'feti/feti.html', context_instance=RequestContext(request, context))
Python
0
@@ -758,16 +758,32 @@ cription +.strip().lower() %0A%0A se @@ -1697,108 +1697,8 @@ se%5D%0A - course_dict = OrderedDict(%0A sorted(course_dict.items(), key=campus_key))%0A @@ -2104,24 +2104,109 @@ rses.all()%0A%0A + course_dict = OrderedDict(%0A sorted(course_dict.items(), key=campus_key))%0A%0A context
c7f8fd75dd5b41a059b65e9cea54d875d1f57655
Change self to PortStatCollector.
src/collectors/portstat/portstat.py
src/collectors/portstat/portstat.py
""" The PortStatCollector collects metrics about ports listed in config file. ##### Dependencies * psutil """ from collections import Counter import psutil import diamond.collector class PortStatCollector(diamond.collector.Collector): def __init__(self, *args, **kwargs): super(PortStatCollector, self).__init__(*args, **kwargs) self.ports = {} for port_name, cfg in self.config['port'].items(): port_cfg = {} for key in ('number',): port_cfg[key] = cfg.get(key, []) self.ports[port_name] = port_cfg def get_default_config_help(self): config_help = super(PortStatCollector, self).get_default_config_help() config_help.update({ }) return config_help def get_default_config(self): config = super(PortStatCollector, self).get_default_config() config.update({ 'path': 'port', 'port': {}, }) return config @staticmethod def get_port_stats(port): """ Iterate over connections and count states for specified port :param port: port for which stats are collected :return: Counter with port states """ cnts = Counter() for c in psutil.net_connections(): c_port = c.laddr[1] if c_port != port: continue status = c.status.lower() cnts[status] += 1 return cnts def collect(self): """ Overrides the Collector.collect method """ for port_name, port_cfg in self.ports.iteritems(): port = int(port_cfg['number']) stats = self.get_port_stats(port) for stat_name, stat_value in stats.iteritems(): metric_name = '%s.%s' % (port_name, stat_name) self.publish(metric_name, stat_value)
Python
0
@@ -1682,20 +1682,33 @@ stats = -self +PortStatCollector .get_por
0744dba6a52c42dbe6f9ba360e5311a1f90c3550
Fix python 3 compatibility issue in DNSimple driver.
libcloud/common/dnsimple.py
libcloud/common/dnsimple.py
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import httplib from libcloud.common.base import ConnectionUserAndKey from libcloud.common.base import JsonResponse class DNSimpleDNSResponse(JsonResponse): def success(self): """ Determine if our request was successful. The meaning of this can be arbitrary; did we receive OK status? Did the node get created? Were we authenticated? :rtype: ``bool`` :return: ``True`` or ``False`` """ # response.success() only checks for 200 and 201 codes. Should we # add 204? return self.status in [httplib.OK, httplib.CREATED, httplib.NO_CONTENT] class DNSimpleDNSConnection(ConnectionUserAndKey): host = 'api.dnsimple.com' responseCls = DNSimpleDNSResponse def add_default_headers(self, headers): """ Add headers that are necessary for every request This method adds ``token`` to the request. """ # TODO: fijarse sobre que info se paso como parametro y en base # a esto, fijar el header headers['X-DNSimple-Token'] = '%s:%s' % (self.user_id, self.key) headers['Accept'] = 'application/json' headers['Content-Type'] = 'application/json' return headers
Python
0
@@ -774,16 +774,41 @@ icense.%0A +%0Afrom libcloud.utils.py3 import h
725b246a0bbb437a5a0efeb16b58d3942f3b14cc
Update the example client.
examples/client.py
examples/client.py
from twisted.internet import reactor, defer from txjason.netstring import JSONRPCClientFactory from txjason.client import JSONRPCClientError client = JSONRPCClientFactory('127.0.0.1', 7080) @defer.inlineCallbacks def stuff(): try: r = yield client.callRemote('bar.foo') except JSONRPCClientError as e: print e r = yield client.callRemote('bar.add', 1, 2) print "add result: %s" % str(r) r = yield client.callRemote('bar.whoami') print "whaomi result: %s" % str(r) reactor.callWhenRunning(stuff) reactor.run()
Python
0
@@ -26,22 +26,30 @@ ort -reactor, defer +defer, endpoints, task %0Afro @@ -226,16 +226,144 @@ def -stuff(): +main(reactor, description):%0A endpoint = endpoints.clientFromString(reactor, description)%0A client = JSONRPCClientFactory(endpoint)%0A %0A @@ -645,49 +645,45 @@ )%0A%0A%0A -reactor.callWhenRunning(stuff)%0Areactor.run( +task.react(main, %5B'tcp:127.0.0.1:7080'%5D )%0A
5dddadb98340fec6afda80fd1a8ee1eda907b60a
print exports to terminal
examples/export.py
examples/export.py
""" Demonstrates export console output """ from rich.console import Console from rich.table import Table console = Console(record=True) def print_table(): table = Table(title="Star Wars Movies") table.add_column("Released", style="cyan", no_wrap=True) table.add_column("Title", style="magenta") table.add_column("Box Office", justify="right", style="green") table.add_row("Dec 20, 2019", "Star Wars: The Rise of Skywalker", "$952,110,690") table.add_row("May 25, 2018", "Solo: A Star Wars Story", "$393,151,347") table.add_row("Dec 15, 2017", "Star Wars Ep. V111: The Last Jedi", "$1,332,539,889") table.add_row("Dec 16, 2016", "Rogue One: A Star Wars Story", "$1,332,439,889") console.print(table, justify="center") # Prints table print_table() # Get console output as text text = console.export_text() with open("plaintext_export.txt", "w") as file: file.write(text) # Calling print_table again because console output buffer # is flushed once export function is called print_table() # Get console output as html # use clear=False so output is not flushed after export html = console.export_html(clear=False) with open("html_export.html", "w") as file: file.write(html) # Export text output to table_export.txt console.save_text("rich_export.txt", clear=False) # Export html output to table_export.html console.save_html("rich_export.html")
Python
0
@@ -737,26 +737,8 @@ able -, justify=%22center%22 )%0A%0A%0A @@ -796,16 +796,53 @@ as text%0A +file1 = %22table_export_plaintext.txt%22%0A text = c @@ -876,30 +876,13 @@ pen( -%22plaintext_export.txt%22 +file1 , %22w @@ -913,16 +913,75 @@ te(text) +%0Aprint(f%22Exported console output as plain text to %7Bfile1%7D%22) %0A%0A# Call @@ -1176,16 +1176,49 @@ export%0A +file2 = %22table_export_html.html%22%0A html = c @@ -1263,26 +1263,13 @@ pen( -%22html_export.html%22 +file2 , %22w @@ -1305,177 +1305,389 @@ ml)%0A -%0A# Export text output to table_export.txt%0Aconsole.save_text(%22rich_export.txt%22, clear=False)%0A%0A# Export html output to table_export.html%0Aconsole.save_html(%22rich_export.html +print(f%22Exported console output as html to %7Bfile2%7D%22)%0A%0A# Export text output to table_export.txt%0Afile3 = %22table_export_plaintext2.txt%22%0Aconsole.save_text(file3, clear=False)%0Aprint(f%22Exported console output as plain text to %7Bfile3%7D%22)%0A%0A# Export html output to table_export.html%0Afile4 = %22table_export_html2.html%22%0Aconsole.save_html(file4)%0Aprint(f%22Exported console output as html to %7Bfile4%7D %22)%0A
1741c7258ebdcef412442cebab33409290496df0
Add network example
IoT/iot_utils.py
IoT/iot_utils.py
from __future__ import print_function import sys, signal, atexit import json __author__ = 'KT Kirk' __all__ = ['keys', 'atexit', 'signal'] ## Exit handlers ## # This function stops python from printing a stacktrace when you hit control-C def SIGINTHandler(signum, frame): raise SystemExit # This function lets you run code on exit, including functions from myUVSensor def exitHandler(): print("Exiting") try: sys.exit(0) except KeyError: pass # Register exit handlers atexit.register(exitHandler) signal.signal(signal.SIGINT, SIGINTHandler) # Load data.sparkfun.com keys file with open("keys_n1YRX98dq9C6X0LrZdvD.json") as json_file: keys = json.load(json_file)
Python
0.000002
@@ -696,8 +696,11 @@ on_file) +%0A%0A%0A
525f4e7139fa36446ea344417107b86664d33269
Fix #664
yowsup/layers/protocol_media/protocolentities/message_media_downloadable_video.py
yowsup/layers/protocol_media/protocolentities/message_media_downloadable_video.py
from yowsup.structs import ProtocolEntity, ProtocolTreeNode from .message_media_downloadable import DownloadableMediaMessageProtocolEntity class VideoDownloadableMediaMessageProtocolEntity(DownloadableMediaMessageProtocolEntity): ''' <message t="{{TIME_STAMP}}" from="{{CONTACT_JID}}" offline="{{OFFLINE}}" type="text" id="{{MESSAGE_ID}}" notify="{{NOTIFY_NAME}}"> <media type="{{DOWNLOADABLE_MEDIA_TYPE: (image | audio | video)}}" mimetype="{{MIME_TYPE}}" filehash="{{FILE_HASH}}" url="{{DOWNLOAD_URL}}" ip="{{IP}}" size="{{MEDIA SIZE}}" file="{{FILENAME}}" encoding="{{ENCODING}}" height="{{IMAGE_HEIGHT}}" width="{{IMAGE_WIDTH}}" > {{THUMBNAIL_RAWDATA (JPEG?)}} </media> </message> ''' def __init__(self, mimeType, fileHash, url, ip, size, fileName, abitrate, acodec, asampfmt, asampfreq, duration, encoding, fps, width, height, seconds, vbitrate, vcodec, caption = None, _id = None, _from = None, to = None, notify = None, timestamp = None, participant = None, preview = None, offline = None, retry = None): super(VideoDownloadableMediaMessageProtocolEntity, self).__init__("video", mimeType, fileHash, url, ip, size, fileName, _id, _from, to, notify, timestamp, participant, preview, offline, retry) self.setVideoProps(abitrate, acodec, asampfmt, asampfreq, duration, encoding, fps, width, height, seconds, vbitrate, vcodec, caption) def __str__(self): out = super(VideoDownloadableMediaMessageProtocolEntity, self).__str__() out += "Audio bitrate: %s\n" % self.abitrate out += "Audio codec: %s\n" % self.acodec out += "Audio sampling fmt.: %s\n" % self.asampfmt out += "Audio sampling freq.: %s\n" % self.asampfreq out += "Duration: %s\n" % self.duration out += "Encoding: %s\n" % self.encoding out += "Fps: %s\n" % self.fps out += "Width: %s\n" % self.width out += "Height: %s\n" % self.height out += "Video bitrate: %s\n" % self.vbitrate out += "Video codec: %s\n" % self.vcodec if self.caption: out += "Caption: %s\n" % self.caption return out def setVideoProps(self, abitrate, acodec, asampfmt, asampfreq, duration, encoding, fps, width, height, seconds, vbitrate, vcodec, caption = None): self.abitrate = abitrate self.acodec = acodec self.asampfmt = asampfmt self.asampfreq = asampfreq self.duration = duration self.encoding = encoding self.fps = fps self.height = height self.seconds = seconds self.vbitrate = vbitrate self.vcodec = vcodec self.width = width self.caption = caption def toProtocolTreeNode(self): node = super(VideoDownloadableMediaMessageProtocolEntity, self).toProtocolTreeNode() mediaNode = node.getChild("media") mediaNode.setAttribute("abitrate", self.abitrate) mediaNode.setAttribute("acodec", self.acodec) mediaNode.setAttribute("asampfmt", self.asampfmt) mediaNode.setAttribute("asampfreq", self.asampfreq) mediaNode.setAttribute("duration", self.duration) mediaNode.setAttribute("encoding", self.encoding) mediaNode.setAttribute("fps", self.fps) mediaNode.setAttribute("height", self.height) mediaNode.setAttribute("seconds", self.seconds) mediaNode.setAttribute("vbitrate", self.vbitrate) mediaNode.setAttribute("vcodec", self.vcodec) mediaNode.setAttribute("width", self.width) if self.caption: mediaNode.setAttribute("caption", self.caption) return node @staticmethod def fromProtocolTreeNode(node): entity = DownloadableMediaMessageProtocolEntity.fromProtocolTreeNode(node) entity.__class__ = VideoDownloadableMediaMessageProtocolEntity mediaNode = node.getChild("media") entity.setVideoProps( mediaNode.getAttributeValue("abitrate"), mediaNode.getAttributeValue("acodec"), mediaNode.getAttributeValue("asampfmt"), mediaNode.getAttributeValue("asampfreq"), mediaNode.getAttributeValue("duration"), mediaNode.getAttributeValue("encoding"), mediaNode.getAttributeValue("fps"), mediaNode.getAttributeValue("width"), mediaNode.getAttributeValue("height"), mediaNode.getAttributeValue("seconds"), mediaNode.getAttributeValue("vbitrate"), mediaNode.getAttributeValue("vcodec"), mediaNode.getAttributeValue("caption") ) return entity
Python
0
@@ -2286,32 +2286,44 @@ if self.caption + is not None :%0A ou @@ -2962,24 +2962,87 @@ on%0A %0A + def getCaption(self):%0A return self.caption%0A %0A def toPr @@ -3917,16 +3917,28 @@ .caption + is not None :%0A
b07243a6fb11dbbd487ba37620f7c8f4fc89449a
bump version to v1.10.5
ndd/package.py
ndd/package.py
# -*- coding: utf-8 -*- """Template package file""" __title__ = 'ndd' __version__ = '1.10.4' __author__ = 'Simone Marsili' __summary__ = '' __url__ = 'https://github.com/simomarsili/ndd' __email__ = 'simo.marsili@gmail.com' __license__ = 'BSD 3-Clause' __copyright__ = 'Copyright (c) 2020, Simone Marsili' __classifiers__ = [ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ]
Python
0
@@ -87,9 +87,9 @@ .10. -4 +5 '%0A__
7abd9b977368a189ca3f298e566dd1dd5b7a66d1
Update constant.py
vnpy/trader/constant.py
vnpy/trader/constant.py
""" General constant string used in VN Trader. """ from enum import Enum class Direction(Enum): """ Direction of order/trade/position. """ LONG = "多" SHORT = "空" NET = "净" class Offset(Enum): """ Offset of order/trade. """ NONE = "" OPEN = "开" CLOSE = "平" CLOSETODAY = "平今" CLOSEYESTERDAY = "平昨" class Status(Enum): """ Order status. """ SUBMITTING = "提交中" NOTTRADED = "未成交" PARTTRADED = "部分成交" ALLTRADED = "全部成交" CANCELLED = "已撤销" REJECTED = "拒单" class Product(Enum): """ Product class. """ EQUITY = "股票" FUTURES = "期货" OPTION = "期权" INDEX = "指数" FOREX = "外汇" SPOT = "现货" ETF = "ETF" BOND = "债券" WARRANT = "权证" SPREAD = "价差" FUND = "基金" class OrderType(Enum): """ Order type. """ LIMIT = "限价" MARKET = "市价" STOP = "STOP" FAK = "FAK" FOK = "FOK" class OptionType(Enum): """ Option type. """ CALL = "看涨期权" PUT = "看跌期权" class Exchange(Enum): """ Exchange. """ # Chinese CFFEX = "CFFEX" # China Financial Futures Exchange SHFE = "SHFE" # Shanghai Futures Exchange CZCE = "CZCE" # Zhengzhou Commodity Exchange DCE = "DCE" # Dalian Commodity Exchange INE = "INE" # Shanghai International Energy Exchange SSE = "SSE" # Shanghai Stock Exchange SZSE = "SZSE" # Shenzhen Stock Exchange SGE = "SGE" # Shanghai Gold Exchange WXE = "WXE" # Wuxi Steel Exchange # Global SMART = "SMART" # Smart Router for US stocks NYMEX = "NYMEX" # New York Mercantile Exchange COMEX = "COMEX" # a division of theNew York Mercantile Exchange GLOBEX = "GLOBEX" # Globex of CME IDEALPRO = "IDEALPRO" # Forex ECN of Interactive Brokers CME = "CME" # Chicago Mercantile Exchange ICE = "ICE" # Intercontinental Exchange SEHK = "SEHK" # Stock Exchange of Hong Kong HKFE = "HKFE" # Hong Kong Futures Exchange SGX = "SGX" # Singapore Global Exchange CBOT = "CBT" # Chicago Board of Trade DME = "DME" # Dubai Mercantile Exchange EUREX = "EUX" # Eurex Exchange APEX = "APEX" # Asia Pacific Exchange LME = "LME" # London Metal Exchange BMD = "BMD" # Bursa Malaysia Derivatives TOCOM = "TOCOM" # Tokyo Commodity Exchange EUNX = "EUNX" # Euronext Exchange KRX = "KRX" # Korean Exchange # CryptoCurrency BITMEX = "BITMEX" OKEX = "OKEX" HUOBI = "HUOBI" BITFINEX = "BITFINEX" class Currency(Enum): """ Currency. """ USD = "USD" HKD = "HKD" CNY = "CNY" class Interval(Enum): """ Interval of bar data. """ MINUTE = "1m" HOUR = "1h" DAILY = "d" WEEKLY = "w"
Python
0.000001
@@ -2776,16 +2776,40 @@ TFINEX%22%0A + BINANCE = %22BINANCE%22%0A %0A%0Aclass
5848a9c64744eacf8d90a86335e948ed17ef8346
Correct path to workflows
src/prepare_asaim/import_workflows.py
src/prepare_asaim/import_workflows.py
#!/usr/bin/env python import os from bioblend import galaxy admin_email = os.environ.get('GALAXY_DEFAULT_ADMIN_USER', 'admin@galaxy.org') admin_pass = os.environ.get('GALAXY_DEFAULT_ADMIN_PASSWORD', 'admin') url = "http://localhost:8080" gi = galaxy.GalaxyInstance(url=url, email=admin_email, password=admin_pass) wf = galaxy.workflows.WorkflowClient(gi) wf.import_workflow_from_local_path('/home/galaxy/asaim_main_workflow.ga') wf.import_workflow_from_local_path('/home/galaxy/asaim_taxonomic_result_comparative_analysis.ga') wf.import_workflow_from_local_path('/home/galaxy/asaim_functional_result_comparative_analysis.ga') wf.import_workflow_from_local_path('/home/galaxy/asaim_go_slim_terms_comparative_analysis.ga') wf.import_workflow_from_local_path('/home/galaxy/asaim_taxonomically_related_functional_result_comparative_analysis.ga')
Python
0.000018
@@ -385,37 +385,24 @@ local_path(' -/home/galaxy/ asaim_main_w @@ -446,37 +446,24 @@ local_path(' -/home/galaxy/ asaim_taxono @@ -531,37 +531,24 @@ local_path(' -/home/galaxy/ asaim_functi @@ -617,37 +617,24 @@ local_path(' -/home/galaxy/ asaim_go_sli @@ -707,21 +707,8 @@ th(' -/home/galaxy/ asai
0d31cbfd3042a1e7255ed833715112504fe608ae
Revert types
dshin/nn/types.py
dshin/nn/types.py
""" TensorFlow type annotation aliases. """ import typing import tensorflow as tf Value = (tf.Variable, tf.Tensor) Values = typing.Sequence[Value] Named = (tf.Variable, tf.Tensor, tf.Operation) NamedSeq = typing.Sequence[Named] Tensors = typing.Sequence[tf.Tensor] Variables = typing.Sequence[tf.Variable] Operations = typing.Sequence[tf.Operation]
Python
0.000001
@@ -85,17 +85,29 @@ Value = -( +typing.Union%5B tf.Varia @@ -120,17 +120,17 @@ f.Tensor -) +%5D %0AValues @@ -166,9 +166,21 @@ d = -( +typing.Union%5B tf.V @@ -215,9 +215,9 @@ tion -) +%5D %0ANam
8a3ae1b809d886f647f13574cc9b416b17c27b7c
Remove VERSION variable from api.py
duckduckpy/api.py
duckduckpy/api.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from collections import namedtuple from utils import camel_to_snake_case SERVER_HOST = 'api.duckduckgo.com' VERSION = '0.1-alpha' USER_AGENT = 'duckduckpy {0}'.format(VERSION) ICON_KEYS = set(['URL', 'Width', 'Height']) RESULT_KEYS = set(['FirstURL', 'Icon', 'Result', 'Text']) CONTENT_KEYS = set(['data_type', 'label', 'sort_order', 'value', 'wiki_order']) META_KEYS = set(['data_type', 'label', 'value']) INFOBOX_KEYS = set(['content', 'meta']) RESPONSE_KEYS = set([ 'Redirect', 'Definition', 'ImageWidth', 'Infobox', 'RelatedTopics', 'ImageHeight', 'Heading', 'Answer', 'AbstractText', 'Type', 'ImageIsLogo', 'DefinitionSource', 'AbstractURL', 'Abstract', 'DefinitionURL', 'Results', 'Entity', 'AnswerType', 'AbstractSource', 'Image']) camel_to_snake_case_set = lambda seq: set(map(camel_to_snake_case, seq)) Icon = namedtuple('Icon', camel_to_snake_case_set(ICON_KEYS)) Result = namedtuple('Result', camel_to_snake_case_set(RESULT_KEYS)) Content = namedtuple('Content', camel_to_snake_case_set(CONTENT_KEYS)) Meta = namedtuple('Meta', camel_to_snake_case_set(META_KEYS)) Infobox = namedtuple('Infobox', camel_to_snake_case_set(INFOBOX_KEYS)) Response = namedtuple('Response', camel_to_snake_case_set(RESPONSE_KEYS))
Python
0.000002
@@ -58,16 +58,49 @@ iterals%0A +from __init__ import __version__%0A from col @@ -131,16 +131,27 @@ le%0Afrom +duckduckpy. utils im @@ -215,30 +215,8 @@ om'%0A -VERSION = '0.1-alpha'%0A USER @@ -252,15 +252,19 @@ mat( -VERSION +__version__ )%0A%0AI
ee5e9d09a02e52714291a44148be4722f8e495ac
Revert "Take Abode camera snapshot before fetching latest image" (#68626)
homeassistant/components/abode/camera.py
homeassistant/components/abode/camera.py
"""Support for Abode Security System cameras.""" from __future__ import annotations from datetime import timedelta from typing import Any, cast from abodepy.devices import CONST, AbodeDevice as AbodeDev from abodepy.devices.camera import AbodeCamera as AbodeCam import abodepy.helpers.timeline as TIMELINE import requests from requests.models import Response from homeassistant.components.camera import Camera from homeassistant.config_entries import ConfigEntry from homeassistant.core import Event, HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import Throttle from . import AbodeDevice, AbodeSystem from .const import DOMAIN, LOGGER MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up Abode camera devices.""" data: AbodeSystem = hass.data[DOMAIN] entities = [] for device in data.abode.get_devices(generic_type=CONST.TYPE_CAMERA): entities.append(AbodeCamera(data, device, TIMELINE.CAPTURE_IMAGE)) async_add_entities(entities) class AbodeCamera(AbodeDevice, Camera): """Representation of an Abode camera.""" _device: AbodeCam def __init__(self, data: AbodeSystem, device: AbodeDev, event: Event) -> None: """Initialize the Abode device.""" AbodeDevice.__init__(self, data, device) Camera.__init__(self) self._event = event self._response: Response | None = None async def async_added_to_hass(self) -> None: """Subscribe Abode events.""" await super().async_added_to_hass() self.hass.async_add_executor_job( self._data.abode.events.add_timeline_callback, self._event, self._capture_callback, ) signal = f"abode_camera_capture_{self.entity_id}" self.async_on_remove(async_dispatcher_connect(self.hass, signal, self.capture)) def capture(self) -> bool: """Request a new image capture.""" return cast(bool, self._device.capture()) @Throttle(MIN_TIME_BETWEEN_UPDATES) def refresh_image(self) -> None: """Find a new image on the timeline.""" if self._device.refresh_image(): self.get_image() def get_image(self) -> None: """Attempt to download the most recent capture.""" if self._device.image_url: try: self._response = requests.get(self._device.image_url, stream=True) self._response.raise_for_status() except requests.HTTPError as err: LOGGER.warning("Failed to get camera image: %s", err) self._response = None else: self._response = None def camera_image( self, width: int | None = None, height: int | None = None ) -> bytes | None: """Get a camera image.""" if not self.capture(): return None self.refresh_image() if self._response: return self._response.content return None def turn_on(self) -> None: """Turn on camera.""" self._device.privacy_mode(False) def turn_off(self) -> None: """Turn off camera.""" self._device.privacy_mode(True) def _capture_callback(self, capture: Any) -> None: """Update the image with the device then refresh device.""" self._device.update_image_location(capture) self.get_image() self.schedule_update_ha_state() @property def is_on(self) -> bool: """Return true if on.""" return cast(bool, self._device.is_on)
Python
0
@@ -3020,63 +3020,8 @@ %22%22%22%0A - if not self.capture():%0A return None%0A
82d562472fd738890fe72755a25392a12f7e7bc3
Implement getArtist(name) function.
dbengine.py
dbengine.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import sqlite3 dbname = "bandevents.db" class DBEngine(object): def __init__(self): self.conn = None self.cur = None self.__firstRun() def __firstRun(self): if self.conn == None: self.conn = sqlite3.connect(dbname) if self.cur == None: self.cur = self.conn.cursor() def close(self): if self.cur: self.cur.close() if self.conn: self.conn.close() def pluginCreateVenueEntity(self, venuedict): """ Create needed venue entries. Parameter venuedict is Dogshome.eventSQLentity(), i.e. """ cols = ", ".join(venuedict.keys()) placeholders = ":" + ", :".join(venuedict.keys()) q = u"INSERT OR IGNORE INTO venue (%s) VALUES (%s);" \ % (cols, placeholders) self.cur.execute(q, venuedict) self.conn.commit() def insertVenueEvents(self, venue): """ Insert parsed events from a venue into the database. """ # Replace venue by venueid venue["venueid"] = self.getVenueByName(venue["venue"])[0] # TODO Why do we have this keyword in the dict in general... venue.pop("venue") # No such column in SQL db cols = ", ".join(venue.keys()) placeholders = ":" + ", :".join(venue.keys()) q = u"INSERT OR IGNORE INTO event (%s) VALUES (%s);" \ % (cols, placeholders) self.cur.execute(q, venue) self.conn.commit() def insertLastFMartists(self, artistdata): cols = ", ".join(artistdata.keys()) placeholders = ":" + ", :".join(artistdata.keys()) q = u"INSERT OR IGNORE INTO artist (%s) VALUES (%s);" \ % (cols, placeholders) self.cur.execute(q, artistdata) self.conn.commit() def getVenues(self): q = u"SELECT id, name, city, country FROM venue" results = self.cur.execute(q) return results.fetchall() def getVenueByName(self, vname): q = u"SELECT id, name, city, country FROM venue " \ + "WHERE name = ? LIMIT 1;" results = self.cur.execute(q, [vname]) return results.fetchone() def getAllGigs(self): q = u"SELECT DISTINCT e.date, v.name, e.name " \ + "FROM event AS e INNER JOIN venue AS v ON e.venueid = v.id " \ + "GROUP BY e.date, v.name ORDER BY e.date;" results = self.cur.execute(q) return results.fetchall() def getArtists(self): q = u"SELECT name, playcount FROM artist;" results = self.cur.execute(q) for artist, playcount in results.fetchall(): yield {u"artist" : artist, \ u"playcount" : playcount} def getArtist(self, aname): pass # TODO def purgeOldEvents(self): pass # TODO if __name__ == '__main__': import venues.plugin_dogshome db = DBEngine() doggari = venues.plugin_dogshome.Dogshome() db.pluginCreateVenueEntity(doggari.eventSQLentity()) assert(db.getVenues() == [(1, u"Dog's home", u'Tampere', u'Finland')]) assert(db.getVenueByName("Dog's home") == (1, u"Dog's home", u'Tampere', u'Finland')) assert(db.getVenueByName("Testijuottola that should fail") == None) #db.insertVenueEvents(doggari.parseEvents("")) ### Test LastFM retriever import lastfmfetch lfmr = lastfmfetch.LastFmRetriever(db) for artist in lfmr.getAllListenedBands(limit=5): db.insertLastFMartists(artist) db.close()
Python
0
@@ -2855,27 +2855,288 @@ -pass # TODO +q = u%22SELECT name, playcount FROM artist %22 %5C%0A + %22WHERE name = ? LIMIT 5;%22%0A results = self.cur.execute(q, %5Baname%5D)%0A for artist, playcount in results.fetchall():%0A yield %7B u%22artist%22 : artist, %5C%0A u%22playcount%22 : playcount %7D %0A%0A de @@ -3207,16 +3207,71 @@ ain__':%0A + db = DBEngine()%0A%0A def testDogsHomePlugin():%0A impo @@ -3301,32 +3301,16 @@ me%0A%0A -db = DBEngine()%0A dogg @@ -3346,24 +3346,28 @@ Dogshome()%0A%0A + db.plugi @@ -3411,24 +3411,28 @@ tity())%0A + + assert(db.ge @@ -3490,16 +3490,20 @@ and')%5D)%0A + asse @@ -3550,32 +3550,49 @@ , u%22Dog's home%22, + %5C%0A u'Tampere', u'F @@ -3601,24 +3601,28 @@ land'))%0A + + assert(db.ge @@ -3681,17 +3681,20 @@ ne)%0A -# + db.inser @@ -3736,17 +3736,47 @@ ))%0A%0A -# +def testLastFmFetch():%0A ## Test @@ -3792,16 +3792,20 @@ triever%0A + impo @@ -3824,16 +3824,20 @@ ch%0A%0A + + lfmr = l @@ -3867,16 +3867,20 @@ ver(db)%0A + for @@ -3924,32 +3924,36 @@ mit=5):%0A + + db.insertLastFMa @@ -3968,16 +3968,132 @@ rtist)%0A%0A + for artist in db.getArtist(u%22Om%22):%0A print %22%25s %5Bplaycount %25d%5D%22 %25 (artist%5B%22artist%22%5D, artist%5B%22playcount%22%5D)%0A%0A db.c
86c7f0bbacc59d00219b7f046150bd1bbf8da760
Improve type hints in hive (#77586)
homeassistant/components/hive/climate.py
homeassistant/components/hive/climate.py
"""Support for the Hive climate devices.""" from datetime import timedelta import logging from typing import Any import voluptuous as vol from homeassistant.components.climate import ClimateEntity from homeassistant.components.climate.const import ( PRESET_BOOST, PRESET_NONE, ClimateEntityFeature, HVACAction, HVACMode, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import HiveEntity, refresh_system from .const import ( ATTR_TIME_PERIOD, DOMAIN, SERVICE_BOOST_HEATING_OFF, SERVICE_BOOST_HEATING_ON, ) HIVE_TO_HASS_STATE = { "SCHEDULE": HVACMode.AUTO, "MANUAL": HVACMode.HEAT, "OFF": HVACMode.OFF, } HASS_TO_HIVE_STATE = { HVACMode.AUTO: "SCHEDULE", HVACMode.HEAT: "MANUAL", HVACMode.OFF: "OFF", } HIVE_TO_HASS_HVAC_ACTION = { "UNKNOWN": HVACAction.OFF, False: HVACAction.IDLE, True: HVACAction.HEATING, } TEMP_UNIT = {"C": TEMP_CELSIUS, "F": TEMP_FAHRENHEIT} PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(seconds=15) _LOGGER = logging.getLogger() async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up Hive thermostat based on a config entry.""" hive = hass.data[DOMAIN][entry.entry_id] devices = hive.session.deviceList.get("climate") entities = [] if devices: for dev in devices: entities.append(HiveClimateEntity(hive, dev)) async_add_entities(entities, True) platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( "boost_heating", { vol.Required(ATTR_TIME_PERIOD): vol.All( cv.time_period, cv.positive_timedelta, lambda td: td.total_seconds() // 60, ), vol.Optional(ATTR_TEMPERATURE, default="25.0"): vol.Coerce(float), }, "async_heating_boost", ) platform.async_register_entity_service( SERVICE_BOOST_HEATING_ON, { vol.Required(ATTR_TIME_PERIOD): vol.All( cv.time_period, cv.positive_timedelta, lambda td: td.total_seconds() // 60, ), vol.Optional(ATTR_TEMPERATURE, default="25.0"): vol.Coerce(float), }, "async_heating_boost_on", ) platform.async_register_entity_service( SERVICE_BOOST_HEATING_OFF, {}, "async_heating_boost_off", ) class HiveClimateEntity(HiveEntity, ClimateEntity): """Hive Climate Device.""" _attr_hvac_modes = [HVACMode.AUTO, HVACMode.HEAT, HVACMode.OFF] _attr_preset_modes = [PRESET_BOOST, PRESET_NONE] _attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) def __init__(self, hive_session, hive_device): """Initialize the Climate device.""" super().__init__(hive_session, hive_device) self.thermostat_node_id = hive_device["device_id"] self._attr_temperature_unit = TEMP_UNIT.get(hive_device["temperatureunit"]) @refresh_system async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target hvac mode.""" new_mode = HASS_TO_HIVE_STATE[hvac_mode] await self.hive.heating.setMode(self.device, new_mode) @refresh_system async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" new_temperature = kwargs.get(ATTR_TEMPERATURE) if new_temperature is not None: await self.hive.heating.setTargetTemperature(self.device, new_temperature) @refresh_system async def async_set_preset_mode(self, preset_mode): """Set new preset mode.""" if preset_mode == PRESET_NONE and self.preset_mode == PRESET_BOOST: await self.hive.heating.setBoostOff(self.device) elif preset_mode == PRESET_BOOST: curtemp = round(self.current_temperature * 2) / 2 temperature = curtemp + 0.5 await self.hive.heating.setBoostOn(self.device, 30, temperature) async def async_heating_boost(self, time_period, temperature): """Handle boost heating service call.""" _LOGGER.warning( "Hive Service heating_boost will be removed in 2021.7.0, please update to heating_boost_on" ) await self.async_heating_boost_on(time_period, temperature) @refresh_system async def async_heating_boost_on(self, time_period, temperature): """Handle boost heating service call.""" await self.hive.heating.setBoostOn(self.device, time_period, temperature) @refresh_system async def async_heating_boost_off(self): """Handle boost heating service call.""" await self.hive.heating.setBoostOff(self.device) async def async_update(self) -> None: """Update all Node data from Hive.""" await self.hive.session.updateData(self.device) self.device = await self.hive.heating.getClimate(self.device) self._attr_available = self.device["deviceData"].get("online") if self._attr_available: self._attr_hvac_mode = HIVE_TO_HASS_STATE[self.device["status"]["mode"]] self._attr_hvac_action = HIVE_TO_HASS_HVAC_ACTION[ self.device["status"]["action"] ] self._attr_current_temperature = self.device["status"][ "current_temperature" ] self._attr_target_temperature = self.device["status"]["target_temperature"] self._attr_min_temp = self.device["min_temp"] self._attr_max_temp = self.device["max_temp"] if self.device["status"]["boost"] == "ON": self._attr_preset_mode = PRESET_BOOST else: self._attr_preset_mode = PRESET_NONE
Python
0
@@ -4004,17 +4004,30 @@ set_mode -) +: str) -%3E None :%0A @@ -4262,16 +4262,17 @@ = round( +( self.cur @@ -4287,16 +4287,22 @@ perature + or 0) * 2) /
70f588282e1777945e113e73dbca83f77355f0f9
Test git permission
driver/omni_driver.py
driver/omni_driver.py
import driver import lib.lib as lib from hardware.dmcc_motor import DMCCMotorSet class OmniDriver(driver.Driver): #Vijay was here
Python
0
@@ -125,8 +125,23 @@ as here%0A +%0A#Chad was here
2b0a0f2a575075fd44a8283d7f14efc9e0199f83
Reformatted to better read.
dbfUtils.py
dbfUtils.py
import struct, datetime, decimal, itertools def dbfreader(f): """Returns an iterator over records in a Xbase DBF file. The first row returned contains the field names. The second row contains field specs: (type, size, decimal places). Subsequent rows contain the data records. If a record is marked as deleted, it is skipped. File should be opened for binary reads. """ # See DBF format spec at: # http://www.pgts.com.au/download/public/xbase.htm#DBF_STRUCT numrec, lenheader = struct.unpack('<xxxxLH22x', f.read(32)) numfields = (lenheader - 33) // 32 fields = [] for fieldno in xrange(numfields): name, typ, size, deci = struct.unpack('<11sc4xBB14x', f.read(32)) name = name.replace('\0', '') # eliminate NULs from string fields.append((name, typ, size, deci)) yield [field[0] for field in fields] yield [tuple(field[1:]) for field in fields] terminator = f.read(1) assert terminator == '\r' fields.insert(0, ('DeletionFlag', 'C', 1, 0)) fmt = ''.join(['%ds' % fieldinfo[2] for fieldinfo in fields]) fmtsiz = struct.calcsize(fmt) for i in xrange(numrec): record = struct.unpack(fmt, f.read(fmtsiz)) if record[0] != ' ': continue # deleted record result = [] for (name, typ, size, deci), value in itertools.izip(fields, record): if name == 'DeletionFlag': continue if typ == "N": value = value.replace('\0', '').lstrip() if value == '': value = 0 elif deci: value = decimal.Decimal(value) else: value = int(value) elif typ == 'D': y, m, d = int(value[:4]), int(value[4:6]), int(value[6:8]) value = datetime.date(y, m, d) elif typ == 'L': value = (value in 'YyTt' and 'T') or (value in 'NnFf' and 'F') or '?' result.append(value) yield result def dbfwriter(f, fieldnames, fieldspecs, records): """ Return a string suitable for writing directly to a binary dbf file. File f should be open for writing in a binary mode. Fieldnames should be no longer than ten characters and not include \x00. Fieldspecs are in the form (type, size, deci) where type is one of: C for ascii character data M for ascii character memo data (real memo fields not supported) D for datetime objects N for ints or decimal objects L for logical values 'T', 'F', or '?' size is the field width deci is the number of decimal places in the provided decimal object Records can be an iterable over the records (sequences of field values). """ # header info ver = 3 now = datetime.datetime.now() yr, mon, day = now.year-1900, now.month, now.day numrec = len(records) numfields = len(fieldspecs) lenheader = numfields * 32 + 33 lenrecord = sum(field[1] for field in fieldspecs) + 1 hdr = struct.pack('<BBBBLHH20x', ver, yr, mon, day, numrec, lenheader, lenrecord) f.write(hdr) # field specs for name, (typ, size, deci) in itertools.izip(fieldnames, fieldspecs): name = name.ljust(11, '\x00') fld = struct.pack('<11sc4xBB14x', name, typ, size, deci) f.write(fld) # terminator f.write('\r') # records for record in records: f.write(' ') # deletion flag for (typ, size, deci), value in itertools.izip(fieldspecs, record): if typ == "N": value = str(value).rjust(size, ' ') elif typ == 'D': value = value.strftime('%Y%m%d') elif typ == 'L': value = str(value)[0].upper() else: value = str(value)[:size].ljust(size, ' ') assert len(value) == size f.write(value) # End of file f.write('\x1A')
Python
0.999999
@@ -63,16 +63,21 @@ %0A %22%22%22 +%0A Returns @@ -118,24 +118,24 @@ e DBF file.%0A - %0A The fir @@ -391,17 +391,16 @@ reads.%0A -%0A %22%22%22%0A @@ -620,24 +620,25 @@ fields = %5B%5D%0A +%0A for fiel @@ -860,16 +860,17 @@ deci))%0A +%0A yiel @@ -1160,16 +1160,17 @@ ze(fmt)%0A +%0A for @@ -1242,16 +1242,17 @@ mtsiz))%0A +%0A @@ -1333,16 +1333,17 @@ record%0A +%0A @@ -1354,16 +1354,17 @@ lt = %5B%5D%0A +%0A @@ -1497,16 +1497,17 @@ ontinue%0A +%0A @@ -1582,16 +1582,17 @@ strip()%0A +%0A @@ -2075,32 +2075,33 @@ t.append(value)%0A +%0A yield re @@ -2165,16 +2165,20 @@ %0A %22%22%22 +%0A Return @@ -2881,21 +2881,16 @@ alues).%0A - %0A %22%22%22%0A @@ -3639,16 +3639,17 @@ on flag%0A +%0A @@ -3958,32 +3958,32 @@ else:%0A - @@ -4017,32 +4017,33 @@ just(size, ' ')%0A +%0A asse
a3f12245163a9165f45f4ee97b6e4e67cdd29783
Update decipher.py
decipher.py
decipher.py
# # decipher.py (c) Luis Hoderlein # # BUILT: Apr 21, 2016 # # This program can brute force Cesarian ciphers # It gives you all possible outputs, meaning you still have to chose the output you want # import string def pad(num): if num < 10: return "0"+str(num) else: return str(num) raw_txt = raw_input("Enter ciphertext: ") raw_int = [] txt = "" spaces = [] raw_txt = raw_txt.lower() for i in range(0, len(raw_txt)): if raw_txt[i] != " ": txt = txt + raw_txt[i] else: spaces.append(i); for i in range(0, len(txt)): raw_int.append(string.lowercase.index(txt[i])) for i in range(0, 26): possible_int = [] for j in range(0, len(raw_int)): possible_int.append(raw_int[j]) possible_txt = "" for j in range(0, len(possible_int)): possible_int[j] = possible_int[j]+i if possible_int[j] >= 26: possible_int[j] = possible_int[j] - 26 possible_txt = possible_txt + string.lowercase[possible_int[j]] del possible_int for j in range(0, len(spaces)): possible_txt = possible_txt[:spaces[j]] + " " +possible_txt[spaces[j]:] print "Solution "+pad(i)+" is "+possible_txt
Python
0
@@ -195,16 +195,26 @@ want%0A#%0A%0A +# imports%0A import s @@ -220,16 +220,53 @@ string%0A%0A +# adds padding to make output inline%0A def pad( @@ -333,16 +333,47 @@ r(num)%0A%0A +# declare vars + ask for input%0A raw_txt @@ -445,34 +445,95 @@ %5B%5D%0A%0A -raw_txt = raw_txt.lower()%0A +# make all lower case (necessary)%0Araw_txt = raw_txt.lower()%0A%0A# log spaces + remove them %0Afor @@ -638,16 +638,40 @@ nd(i);%0A%0A +# turn chars into ints %0A for i in @@ -741,16 +741,226 @@ t%5Bi%5D))%0A%0A +# loop through every possible solution (26 of them), using i has cipher number%0A# and print all possible solution + add the spaces again%0A# to prevent some weird bug, possible int has to be reassigned every time%0A for i in @@ -1449,16 +1449,17 @@ (i)+%22 is +: %22+possi
71e96782caff8543c2e859226bd0b77a79a55040
fix gate
e3nn_jax/_gate.py
e3nn_jax/_gate.py
from functools import partial import jax import jax.numpy as jnp from e3nn_jax import IrrepsData, elementwise_tensor_product, scalar_activation from e3nn_jax.util.decorators import overload_for_irreps_without_data @partial(jax.jit, static_argnums=(1, 2, 3, 4)) def _gate(input: IrrepsData, even_act, odd_act, even_gate_act, odd_gate_act) -> IrrepsData: # split l=0 vs l>0 j = 0 for j, (_, ir) in enumerate(input.irreps): if ir.l > 0: break scalars, gated = input.split([j]) assert scalars.irreps.lmax == 0 # apply scalar activation if there is no gate if gated.irreps.dim == 0: scalars = scalar_activation(scalars, [even_act if ir.p == 1 else odd_act for _, ir in scalars.irreps]) return scalars # extract gates from scalars gates = None for i in range(j + 1): if scalars.irreps[i:].num_irreps == gated.irreps.num_irreps: scalars, gates = scalars.split([i]) break if gates is None: raise ValueError(f"Gate: did not manage to split the input {input.irreps} into scalars, gates and gated.") scalars = scalar_activation(scalars, [even_act if ir.p == 1 else odd_act for _, ir in scalars.irreps]) gates = scalar_activation(gates, [even_gate_act if ir.p == 1 else odd_gate_act for _, ir in gates.irreps]) return IrrepsData.cat([scalars, elementwise_tensor_product(gates, gated)]) @overload_for_irreps_without_data((0,)) def gate(input: IrrepsData, even_act=None, odd_act=None, even_gate_act=None, odd_gate_act=None) -> IrrepsData: r"""Gate activation function. The input is split into scalars that are activated separately, scalars that are used as gates, and non-scalars that are multiplied by the gates. List of assumptions: - The scalars are on the left side of the input. - The gate scalars are on the right side of the scalars. Args: input (IrrepsData): Input data. acts: The list of activation functions. Its length must be equal to the number of scalar blocks in the input. Returns: IrrepsData: Output data. Examples: >>> gate("12x0e + 3x0e + 2x1e + 1x2e") 12x0e+2x1e+1x2e """ assert isinstance(input, IrrepsData) if even_act is None: even_act = jax.nn.gelu if odd_act is None: odd_act = lambda x: (1 - jnp.exp(-x**2)) * x if even_gate_act is None: even_gate_act = jax.nn.sigmoid if odd_gate_act is None: odd_gate_act = jax.nn.tanh return _gate(input, even_act, odd_act, even_gate_act, odd_gate_act)
Python
0.000001
@@ -358,36 +358,36 @@ -# split l=0 vs l%3E0%0A j = 0 +scalars, gated = input, None %0A @@ -459,26 +459,16 @@ - break%0A scal @@ -497,16 +497,34 @@ it(%5Bj%5D)%0A + break%0A asse @@ -562,51 +562,17 @@ # -apply scalar activation if there is n +N o gate +s: %0A @@ -584,24 +584,16 @@ ated -.irreps.dim == 0 + is None :%0A @@ -594,33 +594,30 @@ ne:%0A -scalars = +return scalar_acti @@ -702,64 +702,36 @@ s%5D)%0A - return scalars%0A%0A # extract gates from scalars +%0A # Get the scalar gates: %0A @@ -1035,17 +1035,53 @@ tes -and gated +(%7Bscalars.irreps%7D) and gated (%7Bgated.irreps%7D) .%22)%0A @@ -2084,24 +2084,70 @@ Examples:%0A + The 3 even scalars are used as gates.%0A %3E%3E%3E @@ -2204,16 +2204,283 @@ x1e+1x2e +%0A%0A Odd scalars used as gates change the parity of the gated quantities:%0A %3E%3E%3E gate(%2212x0e + 3x0o + 2x1e + 1x2e%22)%0A 12x0e+2x1o+1x2o%0A%0A Without anything to gate, all the scalars are activated:%0A %3E%3E%3E gate(%2212x0e + 3x0o%22)%0A 12x0e+3x0o %0A %22%22%22
f421b2997494ca546c6479e4246456e56b816e60
Add Robert EVT ID too
libpebble2/util/hardware.py
libpebble2/util/hardware.py
__author__ = 'katharine' class PebbleHardware(object): UNKNOWN = 0 TINTIN_EV1 = 1 TINTIN_EV2 = 2 TINTIN_EV2_3 = 3 TINTIN_EV2_4 = 4 TINTIN_V1_5 = 5 BIANCA = 6 SNOWY_EVT2 = 7 SNOWY_DVT = 8 SPALDING_EVT = 9 BOBBY_SMILES = 10 SPALDING = 11 SILK_EVT = 12 SILK = 14 TINTIN_BB = 0xFF TINTIN_BB2 = 0xFE SNOWY_BB = 0xFD SNOWY_BB2 = 0xFC SPALDING_BB2 = 0xFB SILK_BB = 0xFA ROBERT_BB = 0xF9 SILK_BB2 = 0xF8 PLATFORMS = { UNKNOWN: 'unknown', TINTIN_EV1: 'aplite', TINTIN_EV2: 'aplite', TINTIN_EV2_3: 'aplite', TINTIN_EV2_4: 'aplite', TINTIN_V1_5: 'aplite', BIANCA: 'aplite', SNOWY_EVT2: 'basalt', SNOWY_DVT: 'basalt', BOBBY_SMILES: 'basalt', SPALDING_EVT: 'chalk', SPALDING: 'chalk', SILK_EVT: 'diorite', SILK: 'diorite', TINTIN_BB: 'aplite', TINTIN_BB2: 'aplite', SNOWY_BB: 'basalt', SNOWY_BB2: 'basalt', SPALDING_BB2: 'chalk', SILK_BB: 'diorite', ROBERT_BB: 'emery', SILK_BB2: 'diorite', } @classmethod def hardware_platform(cls, hardware): return cls.PLATFORMS.get(hardware, 'unknown')
Python
0
@@ -297,16 +297,36 @@ VT = 12%0A + ROBERT_EVT = 13%0A SILK
6bce6f9e0a44780503df59054c962ddd82d9b53f
Remove unused variables
decouple.py
decouple.py
# coding: utf-8 import os import sys # Useful for very coarse version differentiation. PY3 = sys.version_info[0] == 3 if PY3: from configparser import ConfigParser string_type = str string_empty = '' else: from ConfigParser import SafeConfigParser as ConfigParser string_type = unicode string_empty = u'' class UndefinedValueError(Exception): pass class Undefined(object): pass # Reference instance to represent undefined values undefined = Undefined() class ConfigBase(object): """ Base class to make the API explicit. """ def __init__(self, config_file): raise NotImplemented def get(self, option, default=string_empty, cast=string_type): """ Return the value for option or default option is not defined. """ raise NotImplemented def __call__(self, *args, **kwargs): """ Convenient shortcut to get. """ return self.get(*args, **kwargs) class ConfigIni(ConfigBase): """ Wrapper around ConfigParser to deal with Django environment settings. """ SECTION = 'settings' def __init__(self, config_file): self.config_file = None self.parser = None self.load(config_file) def load(self, config_file): """ Load config data from a file. """ self.config_file = config_file self.parser = ConfigParser() self.parser.readfp(open(config_file)) def get(self, option, default=string_empty, cast=string_type): """ Return the value for option or default option is not defined. """ if not self.parser.has_option(self.SECTION, option): return cast(default) getter = { bool: self.parser.getboolean, float: self.parser.getfloat, int: self.parser.getint, }.get(cast, self.parser.get) return cast(getter(self.SECTION, option)) def set(self, option, value): """ Add a config value to configuration instance. """ if not self.parser.has_section(self.SECTION): self.parser.add_section(self.SECTION) self.parser.set(self.SECTION, option, string_type(value)) def remove(self, option): """ Remove an option from the config instance. """ return self.parser.remove_option(self.SECTION, option) def list(self): """ Return a list of all (option, value) pairs. """ return self.parser.items(self.SECTION) def save(self): """ Persist current configuration instance to the original config file. """ with open(self.config_file, 'wb') as f: self.parser.write(f) class ConfigEnv(ConfigBase): """ Handle .env file format used by Foreman. """ _BOOLEANS = {'1': True, 'yes': True, 'true': True, 'on': True, '0': False, 'no': False, 'false': False, 'off': False} def __init__(self, config_file): self.data = self._read_dotenv(config_file) def _read_dotenv(self, config_file): """ Read config data from a file. Taken from jacobian's django-dotenv """ data = {} for line in open(config_file): line = line.strip() if not line or line.startswith('#') or '=' not in line: continue k, v = line.split('=', 1) v = v.strip("'").strip('"') data[k] = v return data def _cast_boolean(self, value): """ Helper to convert config values to boolean as ConfigParser do. """ if value.lower() not in self._BOOLEANS: raise ValueError('Not a boolean: %s' % value) return self._BOOLEANS[value.lower()] def get(self, option, default=undefined, cast=undefined): """ Return the value for option or default if defined. """ if option in self.data or option in os.environ: value = self.data.get(option) or os.environ[option] else: value = default if isinstance(value, Undefined): raise UndefinedValueError('%s option not found and default value was not defined.' % option) if isinstance(cast, Undefined): cast = lambda v: v # nop elif cast is bool: cast = self._cast_boolean return cast(value) class RepositoryBase(object): def __init__(self, source): raise NotImplemented def has_key(self, key): raise NotImplemented def get(self, key): raise NotImplemented class RepositoryIni(RepositoryBase): """ Retrieves option keys from .ini files. """ SECTION = 'settings' def __init__(self, source): self.parser = ConfigParser() self.parser.readfp(open(source)) def has_key(self, key): return self.parser.has_option(self.SECTION, key) def get(self, key): return self.parser.get(self.SECTION, key) class RepositoryEnv(RepositoryBase): """ Retrieves option keys from .env files with fall back to os.env. """ def __init__(self, source): self.data = {} for line in open(source): line = line.strip() if not line or line.startswith('#') or '=' not in line: continue k, v = line.split('=', 1) v = v.strip("'").strip('"') self.data[k] = v def has_key(self, key): return key in self.data or key in os.environ def get(self, key): return self.data[key] or os.environ[key] class RepositoryShell(RepositoryBase): """ Retrieves option keys from os.env. """ def __init__(self, source=None): pass def has_key(self, key): return key in os.env def get(self, key): return os.env[key] class ConfigShell(ConfigEnv): """ Fallback class that only look on os.envirion. """ def __init__(self, config_file=None): pass def get(self, option, default=string_empty, cast=string_type): """ Return the value for option or default option is not defined. """ if option not in os.environ: # If default was not defined return it, else make sure to cast. # This is usefull for cases like dj-database-url.parse. if default == string_empty: return default else: return cast(default) if cast is bool: cast = self._cast_boolean return cast(os.environ[option]) class AutoConfig(object): """ Autodetects the config file and type. """ SUPPORTED = { 'settings.ini': ConfigIni, '.env': ConfigEnv, } def __init__(self): self.config = None def _find_file(self, path): # look for all files in the current path for filename in self.SUPPORTED: file = os.path.join(path, filename) if os.path.exists(file): return file # search the parent parent = os.path.dirname(path) if parent and parent != os.path.sep: return self._find_file(parent) # reached root without finding any files. return '' def _load(self, path): try: file = self._find_file(path) except: file = '' klass = self.SUPPORTED.get(os.path.basename(file)) if not klass: klass = ConfigShell self.config = klass(file) def _caller_path(self): # MAGIC! Get the caller's module path. frame = sys._getframe() path = os.path.dirname(frame.f_back.f_back.f_code.co_filename) return path def __call__(self, *args, **kwargs): if not self.config: self._load(self._caller_path()) return self.config(*args, **kwargs) # A pré-instantiated AutoConfig to improve decouple's usability # now just import config and start using with no configuration. config = AutoConfig()
Python
0.000007
@@ -168,52 +168,8 @@ ser%0A - string_type = str%0A string_empty = ''%0A else @@ -236,57 +236,8 @@ ser%0A - string_type = unicode%0A string_empty = u''%0A %0A%0Acl
d9af336506fcca40cbc5ebf337268cfd16459c4f
Use iter_log in example.
examples/ra_log.py
examples/ra_log.py
#!/usr/bin/python # Demonstrates how to iterate over the log of a Subversion repository. from subvertpy.ra import RemoteAccess conn = RemoteAccess("svn://svn.gnome.org/svn/gnome-specimen/trunk") def cb(changed_paths, rev, revprops, has_children=None): print "=" * 79 print "%d:" % rev print "Revision properties:" for entry in revprops.items(): print " %s: %s" % entry print "" print "Changed paths" for path, (action, from_path, from_rev) in changed_paths.iteritems(): print " %s (%s)" % (path, action) conn.get_log(callback=cb, paths=None, start=0, end=conn.get_latest_revnum(), discover_changed_paths=True)
Python
0
@@ -157,36 +157,27 @@ svn. -gnome +samba .org/s -vn/gnome-specimen +ubvertpy /tru @@ -186,14 +186,12 @@ %22)%0A%0A -def cb +for (cha @@ -229,20 +229,120 @@ children -=Non +) in conn.iter_log(paths=None,%0A start=0, end=conn.get_latest_revnum(), discover_changed_paths=Tru e):%0A @@ -493,20 +493,16 @@ rint %22%22%0A - %0A pri @@ -642,124 +642,4 @@ n)%0A%0A -conn.get_log(callback=cb, paths=None, start=0, end=conn.get_latest_revnum(), %0A discover_changed_paths=True)%0A
1fdcf3b9a590bf52f42ce3ce5cdfbae07dc7100d
remove remaining settings
src/settings.py
src/settings.py
import os gettext = lambda s: s """ Django settings for demo project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) DATA_DIR = os.path.join(BASE_DIR, 'data') # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'y&+f+)tw5sqkcy$@vwh8cy%y^9lwytqtn*y=lv7f9t39b(cufx' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition ROOT_URLCONF = 'urls' WSGI_APPLICATION = 'wsgi.application' # Database # https://docs.djangoproject.com/en/1.6/ref/settings/#databases # Internationalization # https://docs.djangoproject.com/en/1.6/topics/i18n/ LANGUAGE_CODE = 'en' TIME_ZONE = 'Europe/London' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.6/howto/static-files/ STATIC_URL = '/static/' MEDIA_URL = '/media/' MEDIA_ROOT = os.path.join(DATA_DIR, 'media') STATIC_ROOT = os.path.join(DATA_DIR, 'static_collected') STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), # NOTE: we need to expose private in order for django-libsass to work # TODO: we need to find a way arount this as we do not want to expose the source files os.path.join(BASE_DIR, 'private'), ) STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) SITE_ID = 1 TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.doc.XViewMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'cms.middleware.user.CurrentUserMiddleware', 'cms.middleware.page.CurrentPageMiddleware', 'cms.middleware.toolbar.ToolbarMiddleware', 'cms.middleware.language.LanguageCookieMiddleware', ) TEMPLATE_CONTEXT_PROCESSORS = ( 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'django.core.context_processors.i18n', 'django.core.context_processors.debug', 'django.core.context_processors.request', 'django.core.context_processors.media', 'django.core.context_processors.csrf', 'django.core.context_processors.tz', 'sekizai.context_processors.sekizai', 'django.core.context_processors.static', 'cms.context_processors.cms_settings', ) TEMPLATE_DIRS = ( os.path.join(BASE_DIR, 'templates'), ) INSTALLED_APPS = ( 'djangocms_admin_style', 'djangocms_text_ckeditor', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.admin', 'django.contrib.sites', 'django.contrib.sitemaps', 'django.contrib.staticfiles', 'django.contrib.messages', 'cms', 'menus', 'sekizai', 'treebeard', 'djangocms_style', 'djangocms_inherit', 'south', 'reversion', ) LANGUAGES = ( ## Customize this ('en', gettext('English')), ('de', gettext('Deutsch')), ) CMS_LANGUAGES = { ## Customize this 'default': { 'public': True, 'hide_untranslated': False, 'redirect_on_fallback': True, }, 1: [ { 'public': True, 'code': 'en', 'hide_untranslated': False, 'name': gettext('en'), 'redirect_on_fallback': True, }, { 'public': True, 'code': 'de', 'hide_untranslated': False, 'name': gettext('de'), 'redirect_on_fallback': True, }, ], } CMS_TEMPLATES = ( ## Customize this ('fullwidth.html', 'Fullwidth'), ('sidebar_left.html', 'Sidebar Left'), ('sidebar_right.html', 'Sidebar Right'), ) CMS_PERMISSION = True CMS_PLACEHOLDER_CONF = {} DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'djangocms_demo_local', 'HOST': 'localhost', 'USER': 'postgres', 'PASSWORD': '', 'PORT': '', }, }
Python
0.000001
@@ -1485,361 +1485,8 @@ '),%0A - # NOTE: we need to expose private in order for django-libsass to work%0A # TODO: we need to find a way arount this as we do not want to expose the source files%0A os.path.join(BASE_DIR, 'private'),%0A)%0A%0ASTATICFILES_FINDERS = (%0A 'django.contrib.staticfiles.finders.FileSystemFinder',%0A 'django.contrib.staticfiles.finders.AppDirectoriesFinder',%0A )%0A%0AS
7e7ec43b7d98e5afc42bfd25e3d43ae468771340
fix #369; add script to strip extra space around/within existing sentences
docs/database/scripts/clean_whitespace.py
docs/database/scripts/clean_whitespace.py
#Script for removing unwanted ASCII whitespace from sentences: # - leading/trailing whitespace # - internal sequences of more than one whitespace character # - internal tabs and newlines # By default, uses the MySQL credentials (username, password, db name) and hostname of the VM. # To run on the server, you must specify --username, --pwd, --db, --host. #Note that you must download mysql.connector, since it doesn't come with the default distribution. #Use: sudo apt-get update && sudo apt-get install python-mysql.connector import mysql.connector import codecs import os import argparse import re class WhitespaceCleaner: """Class for removing extra whitespace from text.""" def __init__(self, parsed): self.parsed = parsed self.cnx = None def connect(self): self.cnx = mysql.connector.connect(user=self.parsed.user, password=self.parsed.pwd, host=self.parsed.host, database=self.parsed.db) def disconnect(self): self.cnx.close() def read_csv(self, filename): """Read a CSV file (id tab text) produced by an earlier step and execute an SQL query to update text.""" print "\n\nfilename: {0}".format(filename) if self.parsed.dry_run: print "---NOT executing these lines---" in_f = codecs.open(filename, "r", "utf-8") cursor = self.cnx.cursor() for line in in_f: line_en = line.encode('utf-8') id, sep, text = line_en.partition('\t') query = "UPDATE sentences SET text = '{0}' WHERE id = {1};".format(text.rstrip(), id) print query if not self.parsed.dry_run: cursor.execute(query) cursor.close() in_f.close() print "--------------------------------" def write_csv_for_stripping_sents(self, filename): """Write a CSV file (id tab text) containing IDs of sentences to be stripped of surrounding whitespace plus their new text.""" cursor = self.cnx.cursor() cursor.execute("SELECT id, text FROM sentences WHERE text regexp '^[[:space:]]' OR text regexp '[[:space:]]$';") out_f = codecs.open(filename, "w", "utf-8") for (id, text) in cursor: new_text = text.strip() line = "{0}\t{1}\n".format(id, new_text) line_de = line.decode('utf-8') out_f.write(line_de) cursor.close() out_f.close() def write_csv_from_sents_w_regex(self, filename, mysql_regex, py_regex, substitution_str): """Write a CSV file (id tab text) containing IDs of sentences to be updated plus their new text.""" cursor = self.cnx.cursor() query = "SELECT id, text FROM sentences WHERE text regexp '{0}';".format(mysql_regex) cursor.execute(query) regex = re.compile(py_regex) out_f = codecs.open(filename, "w", "utf-8") for (id, text) in cursor: new_text = regex.sub(substitution_str, text) new_text = new_text.replace("'", r"\'") line = "{0}\t{1}\n".format(id, new_text) line_en = line.decode('utf-8') out_f.write(line_en) cursor.close() out_f.close() if __name__ == "__main__": user = 'root' parser = argparse.ArgumentParser() parser.add_argument('--user', default='root', help='MySQL username') parser.add_argument('--pwd', default='tatoeba', help='MySQL password') parser.add_argument('--host', default='127.0.0.1', help='host (e.g., 127.0.0.1)') parser.add_argument('--db', default='tatoeba', help='MySQL database') parser.add_argument('--dry_run', default=False, action='store_true', help='Use this to prevent execution') parsed = parser.parse_args() # script_dir = os.path.dirname(os.path.realpath(__file__)) cleaner = WhitespaceCleaner(parsed) cleaner.connect() filename = "stripped.csv" cleaner.write_csv_for_stripping_sents(filename) cleaner.read_csv(filename) # This block must be run before the blocks that follow it. filename = "space_seq.csv" cleaner.write_csv_from_sents_w_regex(filename, "[[:space:]]{2,}", r"\s{2,}", " ") cleaner.read_csv(filename) filename = "tab.csv" cleaner.write_csv_from_sents_w_regex(filename, "[[.tab.]]", r"\t", " ") cleaner.read_csv(filename) filename = "newline.csv" cleaner.write_csv_from_sents_w_regex(filename, "[[.newline.]]", r"\n", " ") cleaner.read_csv(filename) cleaner.disconnect()
Python
0.000001
@@ -2355,16 +2355,68 @@ strip()%0A + new_text = new_text.replace(%22'%22, r%22%5C'%22)%0A
1d0d28ebdda25a7dc579857063d47c5042e6c02b
Enable south for the docs site.
django_docs/settings.py
django_docs/settings.py
# Settings for docs.djangoproject.com from django_www.common_settings import * ### Django settings CACHE_MIDDLEWARE_KEY_PREFIX = 'djangodocs' INSTALLED_APPS = [ 'django.contrib.sitemaps', 'django.contrib.sites', 'django.contrib.staticfiles', 'djangosecure', 'haystack', 'docs', ] MIDDLEWARE_CLASSES = [ 'django.middleware.cache.UpdateCacheMiddleware', 'djangosecure.middleware.SecurityMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.cache.FetchFromCacheMiddleware' ] TEMPLATE_CONTEXT_PROCESSORS = [ 'django.contrib.auth.context_processors.auth', 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.static', 'django.contrib.messages.context_processors.messages', 'docs.context_processors.recent_release', 'django.core.context_processors.request', ] ROOT_URLCONF = 'django_docs.urls' SITE_ID = 2 ### Docs settings if PRODUCTION: DOCS_BUILD_ROOT = BASE.parent.child('docbuilds') else: DOCS_BUILD_ROOT = '/tmp/djangodocs' ### Haystack settings HAYSTACK_SITECONF = 'docs.search_sites' if PRODUCTION: HAYSTACK_SEARCH_ENGINE = 'xapian' HAYSTACK_XAPIAN_PATH = BASE.parent.child('djangodocs.index') else: HAYSTACK_SEARCH_ENGINE = 'whoosh' HAYSTACK_WHOOSH_PATH = '/tmp/djangodocs.index' ### Enable optional components if DEBUG: try: import debug_toolbar except ImportError: pass else: INSTALLED_APPS.append('debug_toolbar') INTERNAL_IPS = ['127.0.0.1'] MIDDLEWARE_CLASSES.insert( MIDDLEWARE_CLASSES.index('django.middleware.common.CommonMiddleware') + 1, 'debug_toolbar.middleware.DebugToolbarMiddleware') # Log errors to Sentry instead of email, if available. if 'sentry_dsn' in SECRETS: INSTALLED_APPS.append('raven.contrib.django') SENTRY_DSN = SECRETS['sentry_dsn'] LOGGING["loggers"]["django.request"]["handlers"].remove("mail_admins")
Python
0
@@ -288,16 +288,29 @@ ystack', +%0A 'south', %0A%0A 'd @@ -1379,24 +1379,74 @@ cs.index'%0A%0A%0A +### South settings%0A%0ASOUTH_TESTS_MIGRATE = False%0A%0A%0A ### Enable o
3434c404d8ab3d42bed4756338f1b8dba3a10255
split debug_plot into debug and plot
src/settings.py
src/settings.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals debug = False debug_plot = False plot = False # CE hack is ON CE = True def plt_show(): from matplotlib import pyplot as plt if debug_plot: plt.show() else: plt.close()
Python
0.998673
@@ -293,16 +293,36 @@ bug_plot + or (debug and plot) :%0A
2dc0ac43b50c61aa10576779a8228ff578c37068
Use get_user_model
src/auditlog/middleware.py
src/auditlog/middleware.py
from __future__ import unicode_literals import threading import time from django.conf import settings from django.db.models.signals import pre_save from django.utils.functional import curry from django.apps import apps from auditlog.models import LogEntry from auditlog.compat import is_authenticated # Use MiddlewareMixin when present (Django >= 1.10) try: from django.utils.deprecation import MiddlewareMixin except ImportError: MiddlewareMixin = object threadlocal = threading.local() class AuditlogMiddleware(MiddlewareMixin): """ Middleware to couple the request's user to log items. This is accomplished by currying the signal receiver with the user from the request (or None if the user is not authenticated). """ def process_request(self, request): """ Gets the current user from the request and prepares and connects a signal receiver with the user already attached to it. """ # Initialize thread local storage threadlocal.auditlog = { 'signal_duid': (self.__class__, time.time()), 'remote_addr': request.META.get('REMOTE_ADDR'), } # In case of proxy, set 'original' address if request.META.get('HTTP_X_FORWARDED_FOR'): threadlocal.auditlog['remote_addr'] = request.META.get('HTTP_X_FORWARDED_FOR').split(',')[0] # Connect signal for automatic logging if hasattr(request, 'user') and is_authenticated(request.user): set_actor = curry(self.set_actor, user=request.user, signal_duid=threadlocal.auditlog['signal_duid']) pre_save.connect(set_actor, sender=LogEntry, dispatch_uid=threadlocal.auditlog['signal_duid'], weak=False) def process_response(self, request, response): """ Disconnects the signal receiver to prevent it from staying active. """ if hasattr(threadlocal, 'auditlog'): pre_save.disconnect(sender=LogEntry, dispatch_uid=threadlocal.auditlog['signal_duid']) return response def process_exception(self, request, exception): """ Disconnects the signal receiver to prevent it from staying active in case of an exception. """ if hasattr(threadlocal, 'auditlog'): pre_save.disconnect(sender=LogEntry, dispatch_uid=threadlocal.auditlog['signal_duid']) return None @staticmethod def set_actor(user, sender, instance, signal_duid, **kwargs): """ Signal receiver with an extra, required 'user' kwarg. This method becomes a real (valid) signal receiver when it is curried with the actor. """ if hasattr(threadlocal, 'auditlog'): if signal_duid != threadlocal.auditlog['signal_duid']: return try: app_label, model_name = settings.AUTH_USER_MODEL.split('.') auth_user_model = apps.get_model(app_label, model_name) except ValueError: auth_user_model = apps.get_model('auth', 'user') if sender == LogEntry and isinstance(user, auth_user_model) and instance.actor is None: instance.actor = user instance.remote_addr = threadlocal.auditlog['remote_addr']
Python
0.000004
@@ -83,17 +83,25 @@ .con -f +trib.auth import sett @@ -96,24 +96,30 @@ import -settings +get_user_model %0Afrom dj @@ -203,37 +203,8 @@ rry%0A -from django.apps import apps%0A from @@ -2764,269 +2764,8 @@ urn%0A - try:%0A app_label, model_name = settings.AUTH_USER_MODEL.split('.')%0A auth_user_model = apps.get_model(app_label, model_name)%0A except ValueError:%0A auth_user_model = apps.get_model('auth', 'user')%0A @@ -2815,20 +2815,19 @@ e(user, -auth +get _user_mo @@ -2829,16 +2829,18 @@ er_model +() ) and in
00c14e981807668b09a5d6a2e71fe8872291acad
Add admin support for attachments
django_mailbox/admin.py
django_mailbox/admin.py
from django.conf import settings from django.contrib import admin from django_mailbox.models import Message, Mailbox def get_new_mail(mailbox_admin, request, queryset): for mailbox in queryset.all(): mailbox.get_new_mail() get_new_mail.short_description = 'Get new mail' class MailboxAdmin(admin.ModelAdmin): list_display = ( 'name', 'uri', 'from_email', 'active', ) actions = [get_new_mail] class MessageAdmin(admin.ModelAdmin): list_display = ( 'subject', 'processed', 'mailbox', 'outgoing', ) ordering = ['-processed'] list_filter = ( 'mailbox', 'outgoing', ) raw_id_fields = ( 'in_reply_to', ) if getattr(settings, 'DJANGO_MAILBOX_ADMIN_ENABLED', True): admin.site.register(Message, MessageAdmin) admin.site.register(Mailbox, MailboxAdmin)
Python
0
@@ -93,16 +93,35 @@ s import + MessageAttachment, Message @@ -130,16 +130,16 @@ Mailbox%0A - %0Adef get @@ -504,16 +504,74 @@ _mail%5D%0A%0A +class MessageAttachmentAdmin(admin.ModelAdmin):%0A pass%0A%0A class Me @@ -981,16 +981,16 @@ True):%0A - admi @@ -1028,16 +1028,83 @@ eAdmin)%0A + admin.site.register(MessageAttachmentAdmin, MessageAttachment)%0A admi
472e24a080132a78af0128a4f89e73c86527f761
set 755 permissions on s2protocol.py so that it can be executed
s2protocol.py
s2protocol.py
#!/usr/bin/env python # # Copyright (c) 2013 Blizzard Entertainment # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import sys import argparse import pprint from mpyq import mpyq import protocol15405 class EventLogger: def __init__(self): self._event_stats = {} def log(self, output, event): # update stats if '_event' in event and '_bits' in event: stat = self._event_stats.get(event['_event'], [0, 0]) stat[0] += 1 # count of events stat[1] += event['_bits'] # count of bits self._event_stats[event['_event']] = stat # write structure pprint.pprint(event, stream=output) def log_stats(self, output): for name, stat in sorted(self._event_stats.iteritems(), key=lambda x: x[1][1]): print >> output, '"%s", %d, %d,' % (name, stat[0], stat[1] / 8) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('replay_file', help='.SC2Replay file to load') parser.add_argument("--gameevents", help="print game events", action="store_true") parser.add_argument("--messageevents", help="print message events", action="store_true") parser.add_argument("--trackerevents", help="print tracker events", action="store_true") parser.add_argument("--attributeevents", help="print attributes events", action="store_true") parser.add_argument("--header", help="print protocol header", action="store_true") parser.add_argument("--details", help="print protocol details", action="store_true") parser.add_argument("--initdata", help="print protocol initdata", action="store_true") parser.add_argument("--stats", help="print stats", action="store_true") args = parser.parse_args() archive = mpyq.MPQArchive(args.replay_file) logger = EventLogger() # Read the protocol header, this can be read with any protocol contents = archive.header['user_data_header']['content'] header = protocol15405.decode_replay_header(contents) if args.header: logger.log(sys.stdout, header) # The header's baseBuild determines which protocol to use baseBuild = header['m_version']['m_baseBuild'] try: protocol = __import__('protocol%s' % (baseBuild,)) except: print >> sys.stderr, 'Unsupported base build: %d' % baseBuild sys.exit(1) # Print protocol details if args.details: contents = archive.read_file('replay.details') details = protocol.decode_replay_details(contents) logger.log(sys.stdout, details) # Print protocol init data if args.initdata: contents = archive.read_file('replay.initData') initdata = protocol.decode_replay_initdata(contents) logger.log(sys.stdout, initdata['m_syncLobbyState']['m_gameDescription']['m_cacheHandles']) logger.log(sys.stdout, initdata) # Print game events and/or game events stats if args.gameevents: contents = archive.read_file('replay.game.events') for event in protocol.decode_replay_game_events(contents): logger.log(sys.stdout, event) # Print message events if args.messageevents: contents = archive.read_file('replay.message.events') for event in protocol.decode_replay_message_events(contents): logger.log(sys.stdout, event) # Print tracker events if args.trackerevents: if hasattr(protocol, 'decode_replay_tracker_events'): contents = archive.read_file('replay.tracker.events') for event in protocol.decode_replay_tracker_events(contents): logger.log(sys.stdout, event) # Print attributes events if args.attributeevents: contents = archive.read_file('replay.attributes.events') attributes = protocol.decode_replay_attributes_events(contents) logger.log(sys.stdout, attributes) # Print stats if args.stats: logger.log_stats(sys.stderr)
Python
0
2d42d48863bb2fb063ab09c659ceaf4f3f32e2cf
Fix first fetching of the weboob's module;
server/weboob/main.py
server/weboob/main.py
#!/usr/bin/env python from weboob.core import Weboob from weboob.core.modules import ModuleLoadError from weboob.exceptions import BrowserIncorrectPassword, BrowserPasswordExpired from weboob.tools.backend import Module import json import os import sys DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" # cwd is /build/server err_path = os.path.join('shared', 'errors.json') weboob_path = os.path.join('weboob', 'data') with file(err_path) as f: j = json.loads(f.read()) UNKNOWN_MODULE = j["UNKNOWN_WEBOOB_MODULE"] INVALID_PASSWORD = j["INVALID_PASSWORD"] EXPIRED_PASSWORD = j["EXPIRED_PASSWORD"] GENERIC_EXCEPTION = j["GENERIC_EXCEPTION"] INVALID_PARAMETERS = j['INVALID_PARAMETERS'] class Connector(object): ''' Connector is a tool that connects to common websites like bank website, phone operator website... and that grabs personal data from there. Credentials are required to make this operation. Technically, connectors are weboob backend wrappers. ''' @staticmethod def weboob(): if not os.path.isdir(weboob_path): os.makedirs(weboob_path) return Weboob(workdir=weboob_path, datadir=weboob_path) @staticmethod def test(): Connector.weboob() @staticmethod def update(): return Connector.weboob().update() def __init__(self, modulename, parameters): ''' Create a Weboob handle and try to load the modules. ''' self.weboob = Connector.weboob() self.backend = self.weboob.build_backend(modulename, parameters) def get_accounts(self): results = [] for account in self.backend.iter_accounts(): if self.weboob.VERSION == "1.1" and repr(account.iban) != "NotLoaded": results.append({ "accountNumber": account.id, "label": account.label, "balance": unicode(account.balance), "iban": unicode(account.iban) }) else: results.append({ "accountNumber": account.id, "label": account.label, "balance": unicode(account.balance) }) return results def get_transactions(self): results = [] for account in self.backend.iter_accounts(): try: for history in self.backend.iter_history(account): results.append({ "account": account.id, "amount": str(history.amount), "date": history.date.strftime(DATETIME_FORMAT), "rdate": history.rdate.strftime(DATETIME_FORMAT), "label": unicode(history.label), "raw": unicode(history.raw), "type": history.type }) except NotImplementedError: print >> sys.stderr, "The account type has not been implemented by weboob." return results def fetch(self, which): results = {} try: if which == 'accounts': results['values'] = self.get_accounts() elif which == 'transactions': results['values'] = self.get_transactions() except ModuleLoadError: results['error_code'] = UNKNOWN_MODULE except BrowserIncorrectPassword: results['error_code'] = INVALID_PASSWORD except BrowserPasswordExpired: results['error_code'] = EXPIRED_PASSWORD except Module.ConfigError as e: results['error_code'] = INVALID_PARAMETERS results['error_content'] = unicode(e) except Exception as e: print >> sys.stderr, "Unknown error of type %s" % str(type(e)) results['error_code'] = GENERIC_EXCEPTION results['error_content'] = unicode(e) return results if __name__ == '__main__': """ Possible arguments: - test - update - accounts bankuuid login password customFields? - transactions bankuuid login password customFields? """ command = None other_args = [] for l in sys.stdin: if command is None: command = l.strip() continue other_args.append(l.strip()) if command == 'test': try: Connector.test() sys.exit(0) except Exception as e: print >> sys.stderr, "Is weboob installed? %s" % unicode(e) sys.exit(1) if command == 'update': try: Connector.update() sys.exit(0) except Exception as e: print >> sys.stderr, "Exception when updating weboob: %s" % unicode(e) sys.exit(1) if command != 'accounts' and command != 'transactions': print >> sys.stderr, "Unknown command '%s'." % command sys.exit(1) if len(other_args) < 3: print >> sys.stderr, 'Missing arguments for accounts/transactions' sys.exit(1) bankuuid, login, password = other_args custom_fields = None if len(other_args) == 4: custom_fields = other_args[3] # Format parameters for the Weboob connector. params = { 'login': login, 'password': password, } if custom_fields is not None: custom_fields = json.loads(self.custom_fields) for f in custom_fields: params[f["name"]] = f["value"] content = Connector(bankuuid, params).fetch(command) print json.dumps(content, ensure_ascii=False).encode('utf-8')
Python
0
@@ -1501,16 +1501,487 @@ weboob() +%0A%0A # Careful: this is extracted from weboob's code.%0A # Install the module if necessary and hide the progress.%0A class DummyProgress:%0A def progress(self, a, b):%0A pass%0A repositories = self.weboob.repositories%0A minfo = repositories.get_module_info(modulename)%0A if minfo is not None and not minfo.is_installed():%0A repositories.install(minfo, progress=DummyProgress())%0A%0A # Calls the backend. %0A
48c880a35c899929da33f20e9cd4ee7e4fd8bc7e
Set a custom name template including the replica set
servers/mongo/data.py
servers/mongo/data.py
from .. import Server import logging class MongoDataNode(Server): log = logging.getLogger('Servers.MongoDataNode') log.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s [%(name)s] %(levelname)s: %(message)s', datefmt = '%H:%M:%S') ch.setFormatter(formatter) log.addHandler(ch) def __init__(self, dry = None, verbose = None, size = None, cluster = None, environment = None, ami = None, region = None, role = None, keypair = None, availability_zone = None, security_groups = None, block_devices = None, replica_set = None, replica_set_index = None): super(MongoDataNode, self).__init__(dry, verbose, size, cluster, environment, ami, region, role, keypair, availability_zone, security_groups, block_devices) self.replica_set = replica_set self.replica_set_index = replica_set_index def configure(self): super(MongoDataNode, self).configure() if self.replica_set is None: self.log.warn('No replica set provided') self.replica_set = 1 self.log.info('Using replica set {set}'.format(set=self.replica_set)) if self.replica_set_index is None: self.log.warn('No replica set set index provided') self.replica_set_index = 1 self.log.info('Using replica set index {index}'.format( index=self.replica_set_index))
Python
0
@@ -1683,12 +1683,512 @@ et_index))%0A%0A + @property%0A def name(self):%0A%0A try:%0A return self.unique_name%0A except Exception:%0A pass%0A%0A template = '%7Benvcl%7D-rs%7Bset%7D-%7Bzone%7D-%7Bindex%7D'%0A name = template.format(envcl=self.envcl, set=self.replica_set,%0A zone=self.availability_zone%5B-1:%5D,%0A index=self.replica_set_index)%0A%0A self.unique_name = name%0A%0A self.log.info('Using node name %7Bname%7D'.format(name=name))%0A%0A return name%0A
48303bced57e8e7c51a309c623b4da60b3920fba
Add /usr/local/namebench as a valid path, since that's where Python on Ubuntu puts us
libnamebench/util.py
libnamebench/util.py
# Copyright 2009 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Little utility functions.""" __author__ = 'tstromberg@google.com (Thomas Stromberg)' import math import re import util import os.path import sys import traceback # third party lib import dns.resolver import nameserver def CalculateListAverage(values): """Computes the arithmetic mean of a list of numbers.""" if not values: return 0 return sum(values) / float(len(values)) def DrawTextBar(value, max_value, max_width=53): """Return a simple ASCII bar graph, making sure it fits within max_width. Args: value: integer or float representing the value of this bar. max_value: integer or float representing the largest bar. max_width: How many characters this graph can use (int) Returns: string """ hash_width = max_value / max_width return int(math.ceil(value/hash_width)) * '#' def SecondsToMilliseconds(seconds): return seconds * 1000 def SplitSequence(seq, size): """Recipe From http://code.activestate.com/recipes/425397/ Modified to not return blank values.""" newseq = [] splitsize = 1.0/size*len(seq) for i in range(size): newseq.append(seq[int(round(i*splitsize)):int(round((i+1)*splitsize))]) return [ x for x in newseq if x ] def InternalNameServers(): """Return list of DNS server IP's used by the host.""" try: return dns.resolver.Resolver().nameservers except: print "Unable to get list of internal DNS servers." return [] def ExtractIPsFromString(ip_string): """Return a tuple of ip addressed held in a string.""" ips = [] # IPV6 If this regexp is too loose, see Regexp-IPv6 in CPAN for inspiration. ips.extend(re.findall('[\dabcdef:]+:[\dabcdef]+', ip_string, re.IGNORECASE)) ips.extend(re.findall('\d+\.\d+\.\d+\.+\d+', ip_string)) return ips def ExtractIPTuplesFromString(ip_string): ip_tuples = [] for ip in ExtractIPsFromString(ip_string): ip_tuples.append((ip,ip)) return ip_tuples def FindDataFile(filename): if os.path.exists(filename): return filename # If it's not a relative path, we can't do anything useful. if os.path.isabs(filename): return filename other_places = [os.getcwd(), os.path.join(os.getcwd(), 'namebench.app', 'Contents', 'Resources'), os.path.join(sys.prefix, 'namebench'), '/usr/local/share/namebench' '/usr/local/etc/namebench', '/etc/namebench', '/usr/share/namebench', '/usr/namebench'] for dir in reversed(sys.path): other_places.append(dir) other_places.append(os.path.join(dir, 'namebench')) for place in other_places: path = os.path.join(place, filename) if os.path.exists(path): return path return filename def GetLastExceptionString(): """Get the last exception and return a good looking string for it.""" (exc, error) = sys.exc_info()[0:2] exc_msg = str(exc) if '<class' in exc_msg: exc_msg = exc_msg.split("'")[1] exc_msg = exc_msg.replace('dns.exception.', '') return '%s %s' % (exc_msg, error)
Python
0.000001
@@ -2966,32 +2966,74 @@ etc/namebench',%0A + '/usr/local/namebench',%0A
cac7a02edd2f1effdf3da181542721a06e971790
Increase cached_input to 5 minutes
flexget/utils/cached_input.py
flexget/utils/cached_input.py
from __future__ import unicode_literals, division, absolute_import import copy import logging import hashlib from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, DateTime, PickleType, Unicode, ForeignKey from sqlalchemy.orm import relation from flexget import db_schema from flexget.utils.database import safe_pickle_synonym from flexget.utils.tools import parse_timedelta, TimedDict from flexget.entry import Entry from flexget.event import event from flexget.plugin import PluginError log = logging.getLogger('input_cache') Base = db_schema.versioned_base('input_cache', 0) class InputCache(Base): __tablename__ = 'input_cache' id = Column(Integer, primary_key=True) name = Column(Unicode) hash = Column(String) added = Column(DateTime, default=datetime.now) entries = relation('InputCacheEntry', backref='cache', cascade='all, delete, delete-orphan') class InputCacheEntry(Base): __tablename__ = 'input_cache_entry' id = Column(Integer, primary_key=True) _entry = Column('entry', PickleType) entry = safe_pickle_synonym('_entry') cache_id = Column(Integer, ForeignKey('input_cache.id'), nullable=False) @event('manager.db_cleanup') def db_cleanup(session): """Removes old input caches from plugins that are no longer configured.""" result = session.query(InputCache).filter(InputCache.added < datetime.now() - timedelta(days=7)).delete() if result: log.verbose('Removed %s old input caches.' % result) def config_hash(config): """ :param dict config: Configuration :return: MD5 hash for *config* """ if isinstance(config, dict): # this does in fact support nested dicts, they're sorted too! return hashlib.md5(str(sorted(config.items()))).hexdigest() else: return hashlib.md5(str(config)).hexdigest() class cached(object): """ Implements transparent caching decorator @cached for inputs. Decorator has two parameters: * **name** in which the configuration is present in tasks configuration. * **key** in which the configuration has the cached resource identifier (ie. url). If the key is not given or present in the configuration :name: is expected to be a cache name (ie. url) .. note:: Configuration assumptions may make this unusable in some (future) inputs """ cache = TimedDict(cache_time='1 minute') def __init__(self, name, persist=None): # Cast name to unicode to prevent sqlalchemy warnings when filtering self.name = unicode(name) # Parse persist time self.persist = persist and parse_timedelta(persist) def __call__(self, func): def wrapped_func(*args, **kwargs): # get task from method parameters task = args[1] # detect api version api_ver = 1 if len(args) == 3: api_ver = 2 if api_ver == 1: # get name for a cache from tasks configuration if not self.name in task.config: raise Exception('@cache config name %s is not configured in task %s' % (self.name, task.name)) hash = config_hash(task.config[self.name]) else: hash = config_hash(args[2]) log.trace('self.name: %s' % self.name) log.trace('hash: %s' % hash) cache_name = self.name + '_' + hash log.debug('cache name: %s (has: %s)' % (cache_name, ', '.join(self.cache.keys()))) if cache_name in self.cache: # return from the cache log.trace('cache hit') entries = [] for entry in self.cache[cache_name]: fresh = copy.deepcopy(entry) entries.append(fresh) if entries: log.verbose('Restored %s entries from cache' % len(entries)) return entries else: if self.persist and not task.options.nocache: # Check database cache db_cache = task.session.query(InputCache).filter(InputCache.name == self.name).\ filter(InputCache.hash == hash).\ filter(InputCache.added > datetime.now() - self.persist).\ first() if db_cache: entries = [Entry(e.entry) for e in db_cache.entries] log.verbose('Restored %s entries from db cache' % len(entries)) # Store to in memory cache self.cache[cache_name] = copy.deepcopy(entries) return entries # Nothing was restored from db or memory cache, run the function log.trace('cache miss') # call input event try: response = func(*args, **kwargs) except PluginError as e: # If there was an error producing entries, but we have valid entries in the db cache, return those. if self.persist and not task.options.nocache: db_cache = task.session.query(InputCache).filter(InputCache.name == self.name).\ filter(InputCache.hash == hash).first() if db_cache and db_cache.entries: log.error('There was an error during %s input (%s), using cache instead.' % (self.name, e)) entries = [Entry(e.entry) for e in db_cache.entries] log.verbose('Restored %s entries from db cache' % len(entries)) # Store to in memory cache self.cache[cache_name] = copy.deepcopy(entries) return entries # If there was nothing in the db cache, re-raise the error. raise if api_ver == 1: response = task.entries if not isinstance(response, list): log.warning('Input %s did not return a list, cannot cache.' % self.name) return response # store results to cache log.debug('storing to cache %s %s entries' % (cache_name, len(response))) try: self.cache[cache_name] = copy.deepcopy(response) except TypeError: # might be caused because of backlog restoring some idiotic stuff, so not neccessarily a bug log.critical('Unable to save task content into cache, if problem persists longer than a day please report this as a bug') if self.persist: # Store to database log.debug('Storing cache %s to database.' % cache_name) db_cache = task.session.query(InputCache).filter(InputCache.name == self.name).\ filter(InputCache.hash == hash).first() if not db_cache: db_cache = InputCache(name=self.name, hash=hash) db_cache.entries = [InputCacheEntry(entry=e) for e in response] db_cache.added = datetime.now() task.session.merge(db_cache) return response return wrapped_func
Python
0.000011
@@ -2402,16 +2402,17 @@ me=' -1 +5 minute +s ')%0A%0A
71289d3a22476001421454ff736ea03742e43158
Add basic parser
vumi_twilio_api/twilml_parser.py
vumi_twilio_api/twilml_parser.py
class Verb(object): """Represents a single verb in TwilML. """ def __init__(self, verb, attributes={}, nouns={}): self.verb = verb self.attributes = attributes self.nouns = nouns
Python
0.000334
@@ -1,8 +1,45 @@ +import xml.etree.ElementTree as ET%0A%0A%0A class Ve @@ -242,8 +242,749 @@ = nouns%0A +%0A%0Aclass TwilMLParseError(Exception):%0A %22%22%22Raised when trying to parse invalid TwilML%22%22%22%0A%0A%0Aclass TwilMLParser(object):%0A %22%22%22Parser for TwilML%22%22%22%0A%0A def parse_xml(self, xml):%0A %22%22%22Parses TwilML and returns a list of :class:%60Verb%60 objects%22%22%22%0A verbs = %5B%5D%0A root = ET.fromstring(xml)%0A if root.tag != %22Response%22:%0A raise TwilMLParseError(%0A %22Invalid root %25r. Should be 'Request'.%22 %25 root.tag)%0A for child in root:%0A parser = getattr(%0A self, '_parse_%25s' %25 child.tag, self._parse_default)%0A verbs.append(parser(child))%0A return verbs%0A%0A def _parse_default(self, element):%0A raise TwilMLParseError(%22Unable to parse verb %25r%22 %25 element.tag)%0A
84642bab00aecbb061789fc9e8a5d5103e3e9e42
add getdict
panoramisk/message.py
panoramisk/message.py
from . import utils from urllib.parse import unquote class Message(utils.CaseInsensitiveDict): """Handle both Responses and Events with the same api: .. >>> resp = Message({'Response': 'Follows'}, 'Response body') >>> event = Message({'Event': 'MeetmeEnd', 'Meetme': '4242'}) Responses: .. code-block:: python >>> bool(resp.success) True >>> resp <Message Response='Follows' content='Response body'> >>> print(resp.content) Response body >>> for line in resp.iter_lines(): ... print(resp.content) Response body Events: .. code-block:: python >>> print(event['meetme']) 4242 >>> print(event.meetme) 4242 >>> event.unknown_header '' """ quoted_keys = ['result'] success_responses = ['Success', 'Follows', 'Goodbye'] def __init__(self, headers, content=''): super(Message, self).__init__(headers, content=content) self.manager = None @property def id(self): if 'commandid' in self: return self['commandid'] elif 'actionid' in self: return self['actionid'] return None @property def action_id(self): if 'actionid' in self: return self['actionid'] return None @property def success(self): """return True if a response status is Success or Follows: .. code-block:: python >>> resp = Message({'Response': 'Success'}) >>> print(resp.success) True >>> resp['Response'] = 'Failed' >>> resp.success False """ if 'event' in self: return True if self.response in self.success_responses: return True return False def __repr__(self): message = ' '.join(['%s=%r' % i for i in sorted(self.items())]) return '<Message {0}>'.format(message) def iter_lines(self): """Iter over response body""" for line in self.content.split('\n'): yield line def parsed_result(self): """Get parsed result of AGI command""" if 'Result' in self: return utils.parse_agi_result(self['Result']) else: raise ValueError('No result in %r' % self) @classmethod def from_line(cls, line): mlines = line.split(utils.EOL) headers = {} content = '' has_body = ('Response: Follows', 'Response: Fail') if mlines[0].startswith(has_body): content = mlines.pop() while not content and mlines: content = mlines.pop() for mline in mlines: if ': ' in mline: k, v = mline.split(': ', 1) if k.lower() in cls.quoted_keys: v = unquote(v).strip() if k in headers: o = headers.setdefault(k, []) if not isinstance(o, list): o = [o] o.append(v) headers[k] = o else: headers[k] = v if 'Event' in headers or 'Response' in headers: return cls(headers, content)
Python
0.000001
@@ -2361,16 +2361,906 @@ self)%0A%0A + def getdict(self, key):%0A %22%22%22Convert a multi values header to a case-insensitive dict:%0A%0A .. code-block:: python%0A%0A %3E%3E%3E resp = Message(%7B%0A ... 'Response': 'Success',%0A ... 'ChanVariable': %5B%0A ... 'FROM_DID=', 'SIPURI=sip:42@10.10.10.1:4242'%5D,%0A ... %7D)%0A %3E%3E%3E print(resp.chanvariable)%0A %5B'FROM_DID=', 'SIPURI=sip:42@10.10.10.1:4242'%5D%0A %3E%3E%3E value = resp.getdict('chanvariable')%0A %3E%3E%3E print(value%5B'sipuri'%5D)%0A sip:42@10.10.10.1:4242%0A %22%22%22%0A values = self.get(key, None)%0A if not isinstance(values, list):%0A raise TypeError(%22%7B0%7D must be a list. got %7B1%7D%22.format(key, values))%0A result = utils.CaseInsensitiveDict()%0A for item in values:%0A k, v = item.split('=', 1)%0A result%5Bk%5D = v%0A return result%0A%0A @cla
948ce666053eee9fbdfd7f14e9f02e0aa6bdd18d
list[:limit] works fine if limit=None
djangofeeds/feedutil.py
djangofeeds/feedutil.py
from django.utils.text import truncate_html_words from djangofeeds import conf from datetime import datetime from djangofeeds.optimization import BeaconDetector import time from datetime import datetime, timedelta _beacon_detector = BeaconDetector() def entries_by_date(entries, limit=None): """Sort the feed entries by date :param entries: Entries given from :mod:`feedparser``. :param limit: Limit number of posts. """ now = datetime.now() def date_entry_tuple(entry, counter): """Find the most current date entry tuple.""" if "date_parsed" in entry: return (entry["date_parsed"].encode("utf-8"), entry) if "updated_parsed" in entry: return (entry["updated_parsed"].encode("utf-8"), entry) if "published_parsed" in entry: return (entry["published_parsed"].encode("utf-8"), entry) return (now - timedelta(seconds=(counter * 30)), entry) sortede_entries = [date_entry_tuple(entry, counter) for counter, entry in enumerate(entries)] sorted_entries.sort() sorted_entries.reverse() return [entry for (date, entry) in sorted_entries[slice(0, limit)]] def find_post_content(feed_obj, entry): """Find the correct content field for a post.""" try: content = entry["content"][0]["value"] except (IndexError, KeyError): content = entry.get("description") or entry.get("summary", "") try: #content = _beacon_detector.stripsafe(content) content = truncate_html_words(content, conf.DEFAULT_ENTRY_WORD_LIMIT) except UnicodeDecodeError: content = "" return content def date_to_datetime(field_name): """Given a post field, convert its :mod:`feedparser` date tuple to :class:`datetime.datetime` objects. :param field_name: The post field to use. """ def _parsed_date_to_datetime(feed_obj, entry): """generated below""" if field_name in entry: try: time_ = time.mktime(entry[field_name]) date = datetime.fromtimestamp(time_) except TypeError: date = datetime.now() return date return datetime.now() _parsed_date_to_datetime.__doc__ = \ """Convert %s to :class:`datetime.datetime` object""" % field_name return _parsed_date_to_datetime
Python
0.999999
@@ -1138,17 +1138,17 @@ try for -( +_ date, en @@ -1150,17 +1150,16 @@ e, entry -) in sort @@ -1173,23 +1173,14 @@ ies%5B -slice(0, +: limit -) %5D%5D%0A%0A
a49095bf078603e046288629aa8497f031ed6bd3
Add transpose_join, joins 2 infinite lists by transposing the next elements
node/divide.py
node/divide.py
#!/usr/bin/env python from nodes import Node class Divide(Node): """ Takes two items from the stack and divides them """ char = "/" args = 2 results = 1 @Node.test_func([4,2], [2]) @Node.test_func([2,4], [0.5]) def func(self, a: Node.number, b: Node.number): """a/b. floating point division. For integer division, see `f`""" return a/b @Node.test_func(["test", "t"], [2]) @Node.test_func([(3,1,2,1,3), 3], [2]) def count(self, a: Node.indexable, b): """a.count(b)""" return a.count(b) @Node.test_func([[4, 4, 2, 2, 9, 9], [1, 2, 3]], [[[4], [4, 2], [2, 9, 9]]]) def split_length(self, inp: Node.indexable, lengths: Node.sequence): """Split inp into sections length lengths""" rtn = [[]] cur_length = 0 for i in inp: if cur_length != len(lengths) and len(rtn[-1]) == lengths[cur_length]: cur_length += 1 rtn.append([]) rtn[-1].append(i) return [rtn] def time_int_div(self, a: Node.clock, b: Node.number): return a.divide_int(b) def time_int_div_2(self, a: Node.number, b: Node.clock): return b.divide_int(a) def time_div(self, a: Node.clock, b: Node.clock): return b.divide_time(a)
Python
0.000001
@@ -15,16 +15,17 @@ python%0A +%0A from nod @@ -43,96 +43,75 @@ ode%0A -%0Aclass Divide(Node):%0A %22%22%22%0A Takes two items from the stack and divides them%0A %22%22%22 +from type.type_infinite_list import DummyList%0A%0A%0Aclass Divide(Node): %0A @@ -151,25 +151,16 @@ lts = 1%0A - %0A %0A @No @@ -175,16 +175,17 @@ func(%5B4, + 2%5D, %5B2%5D) @@ -208,16 +208,17 @@ func(%5B2, + 4%5D, %5B0.5 @@ -438,14 +438,18 @@ %5B(3, + 1, + 2, + 1, + 3), @@ -1290,8 +1290,224 @@ _time(a) +%0A%0A def transpose_inf_list(self, a: Node.infinite, b: Node.infinite):%0A def transpose():%0A while 1:%0A yield next(a)%0A yield next(b)%0A return DummyList(transpose())%0A
e869d59dddf6e574155a4c5307b184d46e145d7c
Delete Feeds/Posts and retry query if MultipleObjectsReturned
djangofeeds/managers.py
djangofeeds/managers.py
from django.db import models from django.db.models.query import QuerySet from djangofeeds.utils import truncate_field_data DEFAULT_POST_LIMIT = 5 def update_with_dict(obj, fields): set_value = lambda (name, val): setattr(obj, name, val) map(set_value, fields.items()) obj.save() return obj class ExtendedQuerySet(QuerySet): def update_or_create(self, **kwargs): obj, created = self.get_or_create(**kwargs) if not created: fields = dict(kwargs.pop("defaults", {})) fields.update(kwargs) update_with_dict(obj, fields) return obj class ExtendedManager(models.Manager): def get_query_set(self): return ExtendedQuerySet(self.model) def update_or_create(self, **kwargs): return self.get_query_set().update_or_create(**kwargs) FeedManager = ExtendedManager CategoryManager = ExtendedManager EnclosureManager = ExtendedManager class PostManager(ExtendedManager): """Manager class for Posts""" def all_by_order(self, limit=DEFAULT_POST_LIMIT): ordering = self.model._meta.ordering return self.all().order_by(*ordering)[:limit] def update_post(self, feed_obj, **fields): fields = truncate_field_data(self.model, fields) if fields.get("guid"): # Unique on guid, feed post = self.update_or_create(guid=fields["guid"], feed=feed_obj, defaults=fields) else: # Unique on title, feed, date_published lookup_fields = dict(date_published=fields["date_published"], title=fields["title"], feed=feed_obj) try: return self.update_or_create(defaults=fields, **lookup_fields) except self.model.MultipleObjectsReturned: dupe = self._find_duplicate_post(lookup_fields, fields) if dupe: return update_with_dict(dupe, fields) else: return self.create(**fields) def _find_duplicate_post(self, lookup_fields, fields): # If any of these fields matches, it's a dupe. # Compare in order, because you want to compare short fields # before having to match the content. cmp_fields = ("author", "link", "content") range = self.filter(**lookup_fields).iterator() for possible in range: for field in cmp_fields: orig_attr = getattr(possible, field, None) this_attr = fields.get(field) if orig_attr == this_attr: return possible
Python
0
@@ -115,16 +115,27 @@ eld_data +%0Aimport sys %0A%0ADEFAUL @@ -384,32 +384,299 @@ elf, **kwargs):%0A + try:%0A obj, created = self.get_or_create(**kwargs)%0A except self.model.MultipleObjectsReturned:%0A sys.stderr.write(%22djfeedsMultipleObjectsReturned: %25s%22 %25 (%0A str(kwargs)))%0A self.filter(**kwargs).delete()%0A obj, cre
04933787fc7d8783ce4ac100f946ca32c2599703
Fix unicode issues generating MathJax JS
indico/modules/events/abstracts/views.py
indico/modules/events/abstracts/views.py
# This file is part of Indico. # Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN). # # Indico is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # Indico is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from flask import render_template, session from MaKaC.common.TemplateExec import render from MaKaC.webinterface.pages.base import WPJinjaMixin from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase, WPConferenceModifBase class WPManageAbstracts(WPJinjaMixin, WPConferenceModifBase): template_prefix = 'events/abstracts/' sidemenu_option = 'abstracts' def getJSFiles(self): return (WPConferenceModifBase.getJSFiles(self) + self._asset_env['markdown_js'].urls() + self._asset_env['selectize_js'].urls() + self._asset_env['modules_abstracts_js'].urls()) def getCSSFiles(self): return (WPConferenceModifBase.getCSSFiles(self) + self._asset_env['markdown_sass'].urls() + self._asset_env['selectize_css'].urls() + self._asset_env['abstracts_sass'].urls()) def _getHeadContent(self): return (WPConferenceModifBase._getHeadContent(self) + render('js/mathjax.config.js.tpl') + '\n'.join('<script src="{0}" type="text/javascript"></script>'.format(url) for url in self._asset_env['mathjax_js'].urls())) class WPDisplayAbstractsBase(WPJinjaMixin, WPConferenceDefaultDisplayBase): template_prefix = 'events/abstracts/' def getJSFiles(self): return (WPConferenceDefaultDisplayBase.getJSFiles(self) + self._asset_env['markdown_js'].urls() + self._asset_env['selectize_js'].urls() + self._asset_env['modules_abstracts_js'].urls()) def getCSSFiles(self): return (WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['markdown_sass'].urls() + self._asset_env['selectize_css'].urls() + self._asset_env['abstracts_sass'].urls() + self._asset_env['event_display_sass'].urls() + self._asset_env['contributions_sass'].urls()) def _getBody(self, params): return WPJinjaMixin._getPageContent(self, params).encode('utf-8') def _getHeadContent(self): return (WPConferenceDefaultDisplayBase._getHeadContent(self) + render('js/mathjax.config.js.tpl') + '\n'.join('<script src="{0}" type="text/javascript"></script>'.format(url) for url in self._asset_env['mathjax_js'].urls())) class WPDisplayAbstracts(WPDisplayAbstractsBase): menu_entry_name = 'call_for_abstracts' class WPMyAbstracts(WPDisplayAbstractsBase): menu_entry_name = 'user_abstracts' class WPSubmitAbstract(WPDisplayAbstractsBase): menu_entry_name = 'abstract_submission' class WPDisplayAbstractsReviewing(WPDisplayAbstracts): menu_entry_name = 'user_tracks' def getJSFiles(self): return (WPDisplayAbstracts.getJSFiles(self) + self._asset_env['modules_event_management_js'].urls()) def getCSSFiles(self): return (WPDisplayAbstracts.getCSSFiles(self) + self._asset_env['event_display_sass'].urls() + self._asset_env['tracks_sass'].urls()) def render_abstract_page(abstract, view_class=None, management=False): from indico.modules.events.abstracts.forms import (AbstractCommentForm, AbstractJudgmentForm, AbstractReviewedForTracksForm, build_review_form) comment_form = AbstractCommentForm(abstract=abstract, user=session.user, formdata=None) review_forms = {track.id: build_review_form(abstract, track) for track in abstract.reviewed_for_tracks if track.can_review_abstracts(session.user)} judgment_form = AbstractJudgmentForm(abstract=abstract, formdata=None) review_track_list_form = AbstractReviewedForTracksForm(event=abstract.event_new, obj=abstract, formdata=None) params = {'abstract': abstract, 'comment_form': comment_form, 'review_forms': review_forms, 'review_track_list_form': review_track_list_form, 'judgment_form': judgment_form, 'management': management} if view_class: return view_class.render_template('abstract.html', abstract.event_new.as_legacy, **params) else: return render_template('events/abstracts/abstract.html', no_javascript=True, **params)
Python
0.001111
@@ -1011,16 +1011,293 @@ fBase%0A%0A%0A +class _MathjaxMixin:%0A def _getHeadContent(self):%0A return (render('js/mathjax.config.js.tpl') +%0A b'%5Cn'.join(b'%3Cscript src=%22%7B0%7D%22 type=%22text/javascript%22%3E%3C/script%3E'.format(url)%0A for url in self._asset_env%5B'mathjax_js'%5D.urls()))%0A%0A%0A class WP @@ -1312,16 +1312,31 @@ stracts( +_MathjaxMixin, WPJinjaM @@ -2001,33 +2001,32 @@ %0A return -( WPConferenceModi @@ -2059,210 +2059,42 @@ ) + -render('js/mathjax.config.js.tpl') +%0A '%5Cn'.join('%3Cscript src=%22%7B0%7D%22 type=%22text/javascript%22%3E%3C/script%3E'.format(url)%0A for url in self._asset_env%5B'mathjax_js'%5D.urls()) +_MathjaxMixin._getHeadContent(self )%0A%0A%0A @@ -2122,16 +2122,31 @@ ctsBase( +_MathjaxMixin, WPJinjaM @@ -3037,33 +3037,32 @@ %0A return -( WPConferenceDefa @@ -3104,210 +3104,42 @@ ) + -render('js/mathjax.config.js.tpl') +%0A '%5Cn'.join('%3Cscript src=%22%7B0%7D%22 type=%22text/javascript%22%3E%3C/script%3E'.format(url)%0A for url in self._asset_env%5B'mathjax_js'%5D.urls()) +_MathjaxMixin._getHeadContent(self )%0A%0A%0A