commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
1058a9cb6e667c850f56b6003038496b77c359c5
Add tool to fix links.
rangadi/beam,robertwb/incubator-beam,lukecwik/incubator-beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,chamikaramj/beam,chamikaramj/beam,RyanSkraba/beam,rangadi/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,charlesccychen/incubator-beam,chamikaramj/beam,rangadi/beam,charlesccychen/beam,markflyhigh/incubator-beam,rangadi/beam,rangadi/beam,lukecwik/incubator-beam,chamikaramj/beam,charlesccychen/beam,apache/beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,apache/beam,RyanSkraba/beam,robertwb/incubator-beam,RyanSkraba/beam,chamikaramj/beam,markflyhigh/incubator-beam,rangadi/incubator-beam,apache/beam,robertwb/incubator-beam,apache/beam,iemejia/incubator-beam,RyanSkraba/beam,chamikaramj/beam,iemejia/incubator-beam,apache/beam,mxm/incubator-beam,chamikaramj/beam,charlesccychen/beam,lukecwik/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,apache/beam,lukecwik/incubator-beam,rangadi/beam,charlesccychen/beam,markflyhigh/incubator-beam,charlesccychen/incubator-beam,apache/beam,markflyhigh/incubator-beam,markflyhigh/incubator-beam,robertwb/incubator-beam,apache/beam,charlesccychen/beam,mxm/incubator-beam,charlesccychen/beam,lukecwik/incubator-beam,rangadi/incubator-beam,robertwb/incubator-beam,charlesccychen/incubator-beam,RyanSkraba/beam,charlesccychen/beam,robertwb/incubator-beam,chamikaramj/beam,chamikaramj/beam,chamikaramj/beam,rangadi/beam,robertwb/incubator-beam,apache/beam,apache/beam,lukecwik/incubator-beam,RyanSkraba/beam,rangadi/beam,lukecwik/incubator-beam,apache/beam,RyanSkraba/beam
website/tools/append_index_html_to_internal_links.py
website/tools/append_index_html_to_internal_links.py
"""Script to fix the links in the staged website. Finds all internal links which do not have index.html at the end and appends index.html in the appropriate place (preserving anchors, etc). Usage: From root directory, after running the jekyll build, execute 'python tools/append_index_html_to_internal_links.py'. Dependencies: beautifulsoup4 Installable via pip as 'sudo pip install beautifulsoup4' or apt via 'sudo apt-get install python-beautifulsoup4'. """ import fnmatch import os import re from bs4 import BeautifulSoup # Original link match. Matches any string which starts with '/' and doesn't # have a file extension. linkMatch = r'^\/(.*\.(?!([^\/]+)$))?[^.]*$' # Regex which matches strings of type /internal/link/#anchor. Breaks into two # groups for ease of inserting 'index.html'. anchorMatch1 = r'(.+\/)(#[^\/]+$)' # Regex which matches strings of type /internal/link#anchor. Breaks into two # groups for ease of inserting 'index.html'. anchorMatch2 = r'(.+\/[a-zA-Z0-9]+)(#[^\/]+$)' matches = [] # Recursively walk content directory and find all html files. for root, dirnames, filenames in os.walk('content'): for filename in fnmatch.filter(filenames, '*.html'): # Javadoc does not have the index.html problem, so omit it. if 'javadoc' not in root: matches.append(os.path.join(root, filename)) print 'Matches: ' + str(len(matches)) # Iterates over each matched file looking for link matches. for match in matches: print 'Fixing links in: ' + match mf = open(match) soup = BeautifulSoup(mf, "lxml") # Iterates over every <a> for a in soup.findAll('a'): try: hr = a['href'] if re.match(linkMatch, hr) is not None: if hr.endswith('/'): # /internal/link/ a['href'] = hr + 'index.html' elif re.match(anchorMatch1, hr) is not None: # /internal/link/#anchor mat = re.match(anchorMatch1, hr) a['href'] = mat.group(1) + 'index.html' + mat.group(2) elif re.match(anchorMatch2, hr) is not None: # /internal/link#anchor mat = re.match(anchorMatch2, hr) a['href'] = mat.group(1) + '/index.html' + mat.group(2) else: # /internal/link a['href'] = hr + '/index.html' mf.close() html = soup.prettify("utf-8") # Write back to the file. with open(match, "wb") as f: print 'Replacing ' + hr + ' with: ' + a['href'] f.write(html) except KeyError as e: # Some <a> tags don't have an href. continue
apache-2.0
Python
8bf248f304e7188e279a37ff06c8fc41f54e1df8
Add console log
anthonykoch/sublimegulpserver,anthonykoch/sublimegulpserver
Logging.py
Logging.py
from GulpServer.Settings import Settings user_settings = None def plugin_loaded(): global user_settings user_settings = Settings() class Console(object): def log(self, *args): if user_settings.get('dev'): print(*args)
mit
Python
9451bfccaf9e2782dc0b1e7670f61ce765b8e7c2
Update for Issue #163
dtrip/.ubuntu,RexGene/monsu-server,dtrip/.ubuntu,RexGene/monsu-server
tamper/nonrecursivereplacement.py
tamper/nonrecursivereplacement.py
#!/usr/bin/env python """ Copyright (c) 2006-2012 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import random import re from lib.core.common import singleTimeWarnMessage from lib.core.enums import PRIORITY __priority__ = PRIORITY.NORMAL def tamper(payload, headers): """ Replaces predefined SQL keywords with representations suitable for replacement (e.g. .replace("SELECT", "")) filters Example: * Input: 1 UNION SELECT 2-- * Output: 1 UNUNIONION SELSELECTECT 2-- Notes: * Useful to bypass very weak custom filters """ keywords = ("UNION", "SELECT", "INSERT", "UPDATE", "FROM", "WHERE") retVal = payload warnMsg = "currently only couple of keywords are being processed %s. " % str(keywords) warnMsg += "You can set it manually according to your needs" singleTimeWarnMessage(warnMsg) if payload: for keyword in keywords: _ = random.randint(1, len(keyword) - 1) retVal = re.sub(r"(?i)\b%s\b" % keyword, "%s%s%s" % (keyword[:_], keyword, keyword[_:]), retVal) return retVal, headers
mit
Python
ec07c74852eaf9bc6ec7d4abb0e5bb3a740501a4
Add BoundingBox tests
larrybradley/photutils,astropy/photutils
photutils/aperture/tests/test_bounding_box.py
photutils/aperture/tests/test_bounding_box.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import (absolute_import, division, print_function, unicode_literals) from numpy.testing import assert_allclose from astropy.tests.helper import pytest from ..bounding_box import BoundingBox try: import matplotlib HAS_MATPLOTLIB = True except: HAS_MATPLOTLIB = False def test_bounding_box_init(): bbox = BoundingBox(1, 10, 2, 20) assert bbox.ixmin == 1 assert bbox.ixmax == 10 assert bbox.iymin == 2 assert bbox.iymax == 20 def test_bounding_box_init_minmax(): with pytest.raises(ValueError): BoundingBox(100, 1, 1, 100) with pytest.raises(ValueError): BoundingBox(1, 100, 100, 1) def test_bounding_box_inputs(): with pytest.raises(TypeError): BoundingBox([1], [10], [2], [9]) with pytest.raises(TypeError): BoundingBox([1, 2], 10, 2, 9) with pytest.raises(TypeError): BoundingBox(1.0, 10.0, 2.0, 9.0) with pytest.raises(TypeError): BoundingBox(1.3, 10, 2, 9) with pytest.raises(TypeError): BoundingBox(1, 10.3, 2, 9) with pytest.raises(TypeError): BoundingBox(1, 10, 2.3, 9) with pytest.raises(TypeError): BoundingBox(1, 10, 2, 9.3) def test_bounding_box_from_float(): # This is the example from the method docstring bbox = BoundingBox._from_float(xmin=1.0, xmax=10.0, ymin=2.0, ymax=20.0) assert bbox == BoundingBox(ixmin=1, ixmax=11, iymin=2, iymax=21) bbox = BoundingBox._from_float(xmin=1.4, xmax=10.4, ymin=1.6, ymax=10.6) assert bbox == BoundingBox(ixmin=1, ixmax=11, iymin=2, iymax=12) def test_bounding_box_eq(): bbox = BoundingBox(1, 10, 2, 20) assert bbox == bbox assert bbox != BoundingBox(9, 10, 2, 20) assert bbox != BoundingBox(1, 99, 2, 20) assert bbox != BoundingBox(1, 10, 9, 20) assert bbox != BoundingBox(1, 10, 2, 99) def test_bounding_box_repr(): bbox = BoundingBox(1, 10, 2, 20) assert repr(bbox) == 'BoundingBox(ixmin=1, ixmax=10, iymin=2, iymax=20)' assert eval(repr(bbox)) == bbox def test_bounding_box_shape(): bbox = BoundingBox(1, 10, 2, 20) assert bbox.shape == (18, 9) def test_bounding_box_slices(): bbox = BoundingBox(1, 10, 2, 20) assert bbox.slices == (slice(2, 20), slice(1, 10)) def test_bounding_box_extent(): bbox = BoundingBox(1, 10, 2, 20) assert_allclose(bbox.extent, (0.5, 9.5, 1.5, 19.5)) @pytest.mark.skipif('not HAS_MATPLOTLIB') def test_bounding_box_as_patch(): bbox = BoundingBox(1, 10, 2, 20) patch = bbox.as_patch() assert_allclose(patch.get_xy(), (0.5, 1.5)) assert_allclose(patch.get_width(), 9) assert_allclose(patch.get_height(), 18)
bsd-3-clause
Python
eef0cb0ff41ec35d92e3d76e1e15c1d6edd5b786
Add ICC(2,1) and ICC(3,1) calculation
tassolom/twq-app,tassolom/twq-app,teamworkquality/twq-app,tassolom/twq-app,tassolom/twq-app,teamworkquality/twq-app,teamworkquality/twq-app,teamworkquality/twq-app
analise/icc.py
analise/icc.py
def icc(data, icc_type): ''' Calculate intraclass correlation coefficient for data within Brain_Data class ICC Formulas are based on: Shrout, P. E., & Fleiss, J. L. (1979). Intraclass correlations: uses in assessing rater reliability. Psychological bulletin, 86(2), 420. icc1: x_ij = mu + beta_j + w_ij icc2/3: x_ij = mu + alpha_i + beta_j + (ab)_ij + epsilon_ij Code modifed from nipype algorithms.icc https://github.com/nipy/nipype/blob/master/nipype/algorithms/icc.py Args: icc_type: type of icc to calculate (icc: voxel random effect, icc2: voxel and column random effect, icc3: voxel and column fixed effect) Returns: ICC: intraclass correlation coefficient ''' # n: number of targets # k: number of judges Y = data [n, k] = Y.shape # Degrees of Freedom dfb = n - 1 dfw = n * (k - 1) dfj = k - 1 dfe = (n - 1) * (k - 1) # Sum Square Total mean_Y = np.mean(Y) SST = ((Y - mean_Y) ** 2).sum() # create the design matrix for the different levels x = np.kron(np.eye(k), np.ones((n, 1))) # sessions x0 = np.tile(np.eye(n), (k, 1)) # subjects X = np.hstack([x, x0]) # Sum Square Error predicted_Y = np.dot(np.dot(np.dot(X, np.linalg.pinv(np.dot(X.T, X))), X.T), Y.flatten('F')) residuals = Y.flatten('F') - predicted_Y SSE = (residuals ** 2).sum() EMS = SSE / dfe # Sum square column effect - between colums SSC = ((np.mean(Y, 0) - mean_Y) ** 2).sum() * n JMS = SSC / dfj # Sum Square subject effect - between rows/subjects SSR = SST - SSC - SSE BMS = SSR / dfb # SSW = ((Y - np.mean(Y, 0)) ** 2).sum() # print((Y - np.mean(Y, 0)) ** 2) # WMS = SSW / dfw # print("SST = " + str(SST)) # print("SSE = " + str(SSE)) # print("SSC = " + str(SSC)) # print("SSR = " + str(SSR)) # print("SSW = " + str(SSW)) if icc_type == 'icc1': # ICC = (BMS - WMS) / (BMS + (k-1) * WMS) ICC = -1 elif icc_type == 'icc2': # ICC(2,1) = (mean square subject - mean square error) / # (mean square subject + (k-1)*mean square error + # k*(mean square columns - mean square error)/n) ICC = (BMS - EMS) / (BMS + (k-1) * EMS + k * (JMS - EMS) / n) elif icc_type == 'icc3': # ICC(3,1) = (mean square subject - mean square error) / # (mean square subject + (k-1)*mean square error) ICC = (BMS - EMS) / (BMS + (k-1) * EMS) return ICC import numpy as np data = np.array([ [9,2,5,8], [6,1,3,2], [8,4,6,8], [7,1,2,6], [10,5,6,9], [6,2,4,7] ]) # print("ICC(1,1): " + str(icc(data,'icc1'))) # aprox. 0.17 print("ICC(2,1): " + str(icc(data,'icc2'))) # aprox. 0.29 print("ICC(3,1): " + str(icc(data,'icc3'))) # aprox. 0.71
mit
Python
43ccd46f3319f6afe154c5ed663143742c229074
add voronoi_follower
irvs/ros_tms,irvs/ros_tms,irvs/ros_tms,irvs/ros_tms,irvs/ros_tms,irvs/ros_tms,irvs/ros_tms,irvs/ros_tms,irvs/ros_tms
tms_rc/tms_rc_double/scripts/voronoi_follower.py
tms_rc/tms_rc_double/scripts/voronoi_follower.py
# -*- coding:utf-8 -*- import rospy from geometry_msgs.msg import Pose2D, Twist from tms_msg_rc_srv import rc_robot_control, rc_robot_controlResponse from tms_msg_db.srv import TmsdbGetData, TmsdbGetDataRequest import datetime import pymongo from math import sin, cos, atan2, pi, radians, degrees, sqrt pub = rospy.Publisher("tms_rc_double/cmd_vel_mux/input/keyop) GOAL = None def main(): global GOAL print "Double_voronoi_follower" rospy.init_node ('wheelchair_voronoi_follower') rospy.wait_for_service('/tms_db_reader') service = rospy.Service( "double_goal_pose" , rc_robot_control, goalPoseCallBack) r = rospy.rate(10) while not rospy.is_shutdown(): if None == GOAL: continue KPang = 0.2 KDang = 0 KPdist = 0.1 KDdist = 0 ARV_DIST = 0.25 pose = getCurrentPose() errorX = GOAL.x - pose.x errorY = GOAL.y - pose.y targetT = atan2(errorY, errorX) errorNX = errorX * cos(-pose.theta) - errorY * sin(-pose.theta) errorNT = normalizeAng(targetT - pose.theta) tmp_spd = limit(KPdist * errorNX, 100, -100) tmp_turn = limit(KPang * degrees(errorNT), 30, -30) twist = Twist() distance = sqrt(errorX ** 2 + errorY **2) rospy.loginfo("dist:{0}".format(distance)) rospy.loginfo("psd:{0}" "turn:{1}".format(tmp_spd, tmp_turn)) if distance <= ARV_DIST: twist.angular.z = 0 twist.linear.x = 0 GOAL = None else: twist.angular.z = radians(tmp_turn) twist.linear.x = tmp_spd pub.publish(twist) r.sleep() def goalPoseCallBack(req): global GOAL GOAL = Pose2D() GOAL.x = req.arg[0] GOAL.y = req.arg[1] GOAL.theta = radians(req.arg[2]) return rc_robot_controlResponse() def getCurrentPose(): pose = Pose2D() db_req = TmsdbGetDataRequest() db_req.tmsdb.id = 2012 db_req.tmsdb.sensor = 3001 try: srv_client = rospy.ServiceProxy("/tms_db_reader", TmsdbGetData) res = srv_client(db_req) if 0 == len(res.tmsdb): return pose pose.x = res.tmsdb[0].x pose.y = res.tmsdb[0].y pose.theta = res.tmsdb.ry except rospy.ServiceException as e: print "Service call failed: %s" %e return pose def normalizeAng(rad): while rad > pi: # 角度を-180°~180°(-π~π)の範囲に合わせる rad = rad - (2 * pi) while rad < -pi: rad = rad + (2 * pi) return rad def limit(val, maxn, minn): return max(min(maxn, val), minn) if __name__ == '__main__': try: main() except rospy.ROSInterruptException: pass
bsd-3-clause
Python
9e090675765a2c0c6412ee51d1e0e007404a30fd
Create k-diff-pairs-in-an-array.py
tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,jaredkoontz/leetcode,yiwen-luo/LeetCode,jaredkoontz/leetcode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,yiwen-luo/LeetCode,jaredkoontz/leetcode,kamyu104/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,jaredkoontz/leetcode,yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode
Python/k-diff-pairs-in-an-array.py
Python/k-diff-pairs-in-an-array.py
# Time: O(n) # Space: O(n) # Total Accepted: 5671 # Total Submissions: 20941 # Difficulty: Easy # Contributors: murali.kf370 # Given an array of integers and an integer k, # you need to find the number of unique k-diff pairs in the array. # Here a k-diff pair is defined as an integer pair (i, j), # where i and j are both numbers in the array and their absolute difference is k. # # Example 1: # Input: [3, 1, 4, 1, 5], k = 2 # Output: 2 # Explanation: There are two 2-diff pairs in the array, (1, 3) and (3, 5). # Although we have two 1s in the input, we should only return the number of unique pairs. # Example 2: # Input:[1, 2, 3, 4, 5], k = 1 # Output: 4 # Explanation: There are four 1-diff pairs in the array, (1, 2), (2, 3), (3, 4) and (4, 5). # Example 3: # Input: [1, 3, 1, 5, 4], k = 0 # Output: 1 # Explanation: There is one 0-diff pair in the array, (1, 1). # Note: # The pairs (i, j) and (j, i) count as the same pair. # The length of the array won't exceed 10,000. # All the integers in the given input belong to the range: [-1e7, 1e7]. class Solution(object): def findPairs(self, nums, k): """ :type nums: List[int] :type k: int :rtype: int """ if k < 0: return 0 result, lookup = set(), set() for num in nums: if num-k in lookup: result.add(num-k) if num+k in lookup: result.add(num) lookup.add(num) return len(result)
mit
Python
586e7745f8ed76985f28a391dcf451c06af61903
add sphinx helper functions
myint/rstcheck,myint/rstcheck
src/rstcheck/_sphinx.py
src/rstcheck/_sphinx.py
"""Sphinx helper functions.""" import contextlib import pathlib import tempfile import typing from . import _docutils, _extras if _extras.SPHINX_INSTALLED: import sphinx.application import sphinx.domains.c import sphinx.domains.cpp import sphinx.domains.javascript import sphinx.domains.python import sphinx.domains.std @contextlib.contextmanager def load_sphinx_if_available() -> typing.Generator[None, None, None]: """Contextmanager to register Sphinx directives and roles if sphinx is available.""" if _extras.SPHINX_INSTALLED: with tempfile.TemporaryDirectory() as temp_dir: outdir = pathlib.Path(temp_dir) / "_build" sphinx.application.Sphinx( srcdir=temp_dir, confdir=None, outdir=str(outdir), doctreedir=str(outdir), buildername="dummy", status=None, # type: ignore[arg-type] # NOTE: sphinx type hint is incorrect ) yield else: yield def get_sphinx_directives_and_roles() -> typing.Tuple[typing.List[str], typing.List[str]]: """Return Sphinx directives and roles loaded from sphinx. :return: Tuple of directives and roles """ _extras.install_guard("sphinx") sphinx_directives = list(sphinx.domains.std.StandardDomain.directives) sphinx_roles = list(sphinx.domains.std.StandardDomain.roles) for domain in [ sphinx.domains.c.CDomain, sphinx.domains.cpp.CPPDomain, sphinx.domains.javascript.JavaScriptDomain, sphinx.domains.python.PythonDomain, ]: domain_directives = list(domain.directives) domain_roles = list(domain.roles) sphinx_directives += domain_directives + [ f"{domain.name}:{item}" for item in domain_directives ] sphinx_roles += domain_roles + [f"{domain.name}:{item}" for item in domain_roles] sphinx_directives += list( sphinx.application.docutils.directives._directives # pylint: disable=protected-access ) sphinx_roles += list( sphinx.application.docutils.roles._roles # pylint: disable=protected-access ) return (sphinx_directives, sphinx_roles) def filter_whitelisted_directives_and_roles( directives: typing.List[str], roles: typing.List[str] ) -> typing.Tuple[typing.List[str], typing.List[str]]: """Filter whitelisted directives and roles out of input. :param directives: Directives to filter :param roles: Roles to filter :return: Tuple of fitlered directives and roles """ directive_whitelist = ["code", "code-block", "include"] role_whitelist: typing.List[str] = [] directives = list(filter(lambda d: d not in directive_whitelist, directives)) roles = list(filter(lambda r: r not in role_whitelist, roles)) return (directives, roles) def load_sphinx_ignores() -> None: """Register Sphinx directives and roles to ignore.""" _extras.install_guard("sphinx") (directives, roles) = get_sphinx_directives_and_roles() (directives, roles) = filter_whitelisted_directives_and_roles(directives, roles) _docutils.ignore_directives_and_roles(directives, roles)
mit
Python
9f66f31d42a16d8b9536a9cb160e454118ff4369
Add tests for UninstallPathSet
James-Firth/pip,techtonik/pip,zenlambda/pip,dstufft/pip,luzfcb/pip,erikrose/pip,chaoallsome/pip,fiber-space/pip,h4ck3rm1k3/pip,sigmavirus24/pip,yati-sagade/pip,squidsoup/pip,willingc/pip,mindw/pip,haridsv/pip,zorosteven/pip,rbtcollins/pip,zorosteven/pip,sbidoul/pip,sigmavirus24/pip,wkeyword/pip,jasonkying/pip,benesch/pip,zenlambda/pip,rouge8/pip,jythontools/pip,graingert/pip,h4ck3rm1k3/pip,supriyantomaftuh/pip,ChristopherHogan/pip,wkeyword/pip,jasonkying/pip,nthall/pip,prasaianooz/pip,jamezpolley/pip,esc/pip,tdsmith/pip,pypa/pip,mujiansu/pip,atdaemon/pip,mindw/pip,jasonkying/pip,nthall/pip,zorosteven/pip,caosmo/pip,xavfernandez/pip,mindw/pip,rouge8/pip,RonnyPfannschmidt/pip,jythontools/pip,RonnyPfannschmidt/pip,jythontools/pip,caosmo/pip,chaoallsome/pip,dstufft/pip,erikrose/pip,RonnyPfannschmidt/pip,James-Firth/pip,tdsmith/pip,willingc/pip,benesch/pip,techtonik/pip,KarelJakubec/pip,zvezdan/pip,yati-sagade/pip,prasaianooz/pip,esc/pip,supriyantomaftuh/pip,Gabriel439/pip,zvezdan/pip,ChristopherHogan/pip,h4ck3rm1k3/pip,erikrose/pip,yati-sagade/pip,willingc/pip,rbtcollins/pip,wkeyword/pip,luzfcb/pip,squidsoup/pip,haridsv/pip,jmagnusson/pip,graingert/pip,habnabit/pip,alex/pip,KarelJakubec/pip,xavfernandez/pip,pypa/pip,sbidoul/pip,davidovich/pip,ChristopherHogan/pip,prasaianooz/pip,pradyunsg/pip,Gabriel439/pip,benesch/pip,jamezpolley/pip,habnabit/pip,habnabit/pip,atdaemon/pip,rbtcollins/pip,tdsmith/pip,graingert/pip,blarghmatey/pip,qbdsoft/pip,KarelJakubec/pip,ncoghlan/pip,mujiansu/pip,haridsv/pip,atdaemon/pip,ncoghlan/pip,davidovich/pip,harrisonfeng/pip,squidsoup/pip,jamezpolley/pip,rouge8/pip,natefoo/pip,qbdsoft/pip,nthall/pip,techtonik/pip,blarghmatey/pip,qbdsoft/pip,caosmo/pip,Gabriel439/pip,ncoghlan/pip,alex/pip,natefoo/pip,jmagnusson/pip,natefoo/pip,zenlambda/pip,xavfernandez/pip,harrisonfeng/pip,blarghmatey/pip,sigmavirus24/pip,zvezdan/pip,jmagnusson/pip,James-Firth/pip,luzfcb/pip,dstufft/pip,harrisonfeng/pip,pfmoore/pip,fiber-space/pip,pradyunsg/pip,esc/pip,mujiansu/pip,supriyantomaftuh/pip,fiber-space/pip,alex/pip,davidovich/pip,chaoallsome/pip,pfmoore/pip
tests/unit/test_req_uninstall.py
tests/unit/test_req_uninstall.py
import os import shutil import sys import tempfile import pytest from mock import Mock from pip.locations import running_under_virtualenv from pip.req.req_uninstall import UninstallPathSet class TestUninstallPathSet(object): def setup(self): if running_under_virtualenv(): # Construct tempdir in sys.prefix, otherwise UninstallPathSet # will reject paths. self.tempdir = tempfile.mkdtemp(prefix=sys.prefix) else: self.tempdir = tempfile.mkdtemp() def teardown(self): shutil.rmtree(self.tempdir, ignore_errors=True) def test_add(self): file_extant = os.path.join(self.tempdir, 'foo') file_nonexistant = os.path.join(self.tempdir, 'nonexistant') with open(file_extant, 'w'): pass ups = UninstallPathSet(dist=Mock()) assert ups.paths == set() ups.add(file_extant) assert ups.paths == set([file_extant]) ups.add(file_nonexistant) assert ups.paths == set([file_extant]) @pytest.mark.skipif("sys.platform == 'win32'") def test_add_symlink(self): f = os.path.join(self.tempdir, 'foo') with open(f, 'w'): pass l = os.path.join(self.tempdir, 'foo_link') os.symlink(f, l) ups = UninstallPathSet(dist=Mock()) ups.add(l) assert ups.paths == set([l])
mit
Python
8d8f89c82511b86fb87cef5db3bad633283283cc
Add missing migrations in develop branch
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
modelview/migrations/0044_auto_20191007_1227.py
modelview/migrations/0044_auto_20191007_1227.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.25 on 2019-10-07 10:27 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('modelview', '0043_merge_20190425_1036'), ] operations = [ migrations.RemoveField( model_name='energyscenario', name='networks_electricity_gas_electricity', ), migrations.RemoveField( model_name='energyscenario', name='networks_electricity_gas_gas', ), migrations.AlterField( model_name='basicfactsheet', name='logo', field=models.ImageField(help_text='If a logo for the model exists load it up', null=True, upload_to='logos', verbose_name='Logo'), ), migrations.AlterField( model_name='basicfactsheet', name='methodical_focus_1', field=models.CharField(help_text='1-3 Keyords describing the main methodical focus of the model e.g."open source", "sector coupling"', max_length=50, verbose_name='Methodical Focus'), ), migrations.AlterField( model_name='basicfactsheet', name='source_of_funding', field=models.CharField(help_text='What is the main source of funding for the development of the model?', max_length=200, null=True, verbose_name='Source of funding'), ), ]
agpl-3.0
Python
4a25572283448a820cf55008e81405f3eb84a072
Add test for unicode in env (#345)
catkin/catkin_tools,catkin/catkin_tools,rhaschke/catkin_tools,rhaschke/catkin_tools,catkin/catkin_tools,catkin/catkin_tools,rhaschke/catkin_tools
tests/system/verbs/catkin_build/test_unicode_in_env.py
tests/system/verbs/catkin_build/test_unicode_in_env.py
import os from ....utils import catkin_success from ...workspace_factory import workspace_factory def test_catkin_build_with_unicode_in_env(): with workspace_factory() as wf: wf.create_package('foo', depends=['bar']) wf.create_package('bar') wf.build() print('Workspace: {0}'.format(wf.workspace)) assert os.path.isdir(wf.workspace) env = {'NON_ASCII': '\xc3\xb6'} cmd = ['build', '--no-status', '--no-notify', '--verbose'] assert catkin_success(cmd, env)
apache-2.0
Python
53f91164ce93a01c2ad628fd49109a5fa8917ecb
Extend datasource model schema (#2342)
google/timesketch,google/timesketch,google/timesketch,google/timesketch
timesketch/migrations/versions/180a387da650_extend_datasource_model_with_total_file_.py
timesketch/migrations/versions/180a387da650_extend_datasource_model_with_total_file_.py
"""Extend datasource model with total file events field Revision ID: 180a387da650 Revises: 75af34d75b1e Create Date: 2022-09-26 13:04:10.336534 """ # This code is auto generated. Ignore linter errors. # pylint: skip-file # revision identifiers, used by Alembic. revision = '180a387da650' down_revision = '75af34d75b1e' from alembic import op import sqlalchemy as sa def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('datasource', sa.Column('total_file_events', sa.BigInteger(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('datasource', 'total_file_events') # ### end Alembic commands ###
apache-2.0
Python
63dc7fb2586824b6a6de52b1ba80e6196d80ff42
Create credentials.py
fogonwater/pystatsnz
credentials.py
credentials.py
# add your primary statsnz key here # available from https://statisticsnz.portal.azure-api.net/ statsnz_key = "MY_SECRET_KEY"
mit
Python
2aadd55510684c4065c1bed1c1387ee57b18fd77
Add a prototype of Simulated Annealing Algorithm, and a TSP example.
PyOCL/TSP,PyOCL/oclGA,PyOCL/OpenCLGA,PyOCL/oclGA,PyOCL/oclGA,PyOCL/oclGA,PyOCL/TSP,PyOCL/OpenCLGA,PyOCL/OpenCLGA
OpenCLGA/sa.py
OpenCLGA/sa.py
#!/usr/bin/python3 from abc import ABCMeta from utils import calc_linear_distance, plot_tsp_result import math import random class SAImpl(metaclass = ABCMeta): def __init__(self): pass ## Calculate the cost of the solution def cost(self, solution): pass ## Return a new neighbor solution def neighbor(self, solution): pass ## Return a probability to decide whether accpet or not. def acceptance_probability(self, old_cost, new_cost, temperature): pass ## Start annealing def anneal(self): pass class TSPSolution(SAImpl): def __init__(self, city_info): SAImpl.__init__(self) self.city_info = city_info self.temperature = 1000.0 self.alpha = 0.9 self.terminate_temperature = 0.00001 self.iterations = 500 @staticmethod def get_init_params(): num_cities = 20 random.seed() city_ids = list(range(0, num_cities)) city_info = {city_id: (random.random() * 100, random.random() * 100) for city_id in city_ids} return city_info ## For TSP, we calculate the total distance between all cities. def cost(self, solution): total = len(self.city_info.keys()) cost = 0 for index, cid in enumerate(solution): first_city = cid next_city = solution[(index + 1) % total] cost += calc_linear_distance(self.city_info[first_city][0], self.city_info[first_city][1], self.city_info[next_city][0], self.city_info[next_city][1]) return cost ## Find a neighbor solution by swapping random two nodes. def neighbor(self, solution): neighbor = solution[:] total = len(self.city_info.keys()) a = random.randint(0, total-1) b = random.randint(0, total-1) while a == b: b = random.randint(0, total-1) neighbor[a] = solution[b] neighbor[b] = solution[a] return neighbor def acceptance_probability(self, old_cost, new_cost, temperature): if new_cost < old_cost: return 1.0 else: return math.exp(float(old_cost - new_cost) / temperature) def anneal(self): solution = list(self.city_info.keys()) random.shuffle(solution) old_cost = self.cost(solution) # print('1st round : cost = {} '.format(old_cost)) T = self.temperature T_min = self.terminate_temperature alpha = self.alpha while T > T_min: i = 1 print('T={}'.format(T)) while i <= self.iterations: new_solution = self.neighbor(solution) new_cost = self.cost(new_solution) ap = self.acceptance_probability(old_cost, new_cost, T) if ap > random.random(): solution = new_solution old_cost = new_cost # print('i={} round : cost = {} '.format(T, i, old_cost)) i += 1 T = T*alpha plot_tsp_result(self.city_info, solution) return solution class SimulatedAnnealing(object): def __init__(self, cls_solution): self.sas = cls_solution(cls_solution.get_init_params()) pass ## To save the annealing state def save(self): pass ## To restore the annealing state def restore(self): pass ## Start annealing def anneal(self): best_solution = self.sas.anneal() pass sa = SimulatedAnnealing(TSPSolution) sa.anneal()
mit
Python
41e21884418cdd2b525b4f02d1cfa4ed9ea2c000
Add bug test for 9268 (#65)
iree-org/iree-samples,iree-org/iree-samples,iree-org/iree-samples,iree-org/iree-samples
bugs/issue_9268.py
bugs/issue_9268.py
# RUN: %PYTHON %s # XFAIL: * import iree.compiler.tools.tflite as iree_tflite # https://github.com/iree-org/iree/issues/9268 ir = ''' func.func @main(%a : tensor<f32>, %b : tensor<f32>) -> tensor<*xf32> { %val = "tfl.add"(%a, %b) {fused_activation_function = "NONE"} : (tensor<f32>, tensor<f32>) -> tensor<*xf32> return %val : tensor<*xf32> } ''' print(ir) ir = iree_tflite.compile_str(ir, target_backends=["cpu"])
apache-2.0
Python
8c401af5bb7c3678de4091b88d81e04ddf248705
Remove unused 'fahrenheit' config option
szibis/Diamond,gg7/diamond,codepython/Diamond,bmhatfield/Diamond,TAKEALOT/Diamond,hamelg/Diamond,signalfx/Diamond,works-mobile/Diamond,h00dy/Diamond,Basis/Diamond,tusharmakkar08/Diamond,EzyInsights/Diamond,TAKEALOT/Diamond,Precis/Diamond,joel-airspring/Diamond,Precis/Diamond,actmd/Diamond,rtoma/Diamond,joel-airspring/Diamond,gg7/diamond,socialwareinc/Diamond,MichaelDoyle/Diamond,dcsquared13/Diamond,tusharmakkar08/Diamond,eMerzh/Diamond-1,actmd/Diamond,acquia/Diamond,Netuitive/Diamond,Netuitive/netuitive-diamond,Basis/Diamond,gg7/diamond,jumping/Diamond,cannium/Diamond,actmd/Diamond,hvnsweeting/Diamond,ramjothikumar/Diamond,hamelg/Diamond,python-diamond/Diamond,codepython/Diamond,works-mobile/Diamond,Basis/Diamond,jaingaurav/Diamond,jaingaurav/Diamond,dcsquared13/Diamond,Nihn/Diamond-1,zoidbergwill/Diamond,h00dy/Diamond,mfriedenhagen/Diamond,acquia/Diamond,Slach/Diamond,dcsquared13/Diamond,russss/Diamond,signalfx/Diamond,Nihn/Diamond-1,codepython/Diamond,timchenxiaoyu/Diamond,mfriedenhagen/Diamond,timchenxiaoyu/Diamond,Precis/Diamond,rtoma/Diamond,skbkontur/Diamond,russss/Diamond,joel-airspring/Diamond,tuenti/Diamond,Ssawa/Diamond,acquia/Diamond,actmd/Diamond,skbkontur/Diamond,ramjothikumar/Diamond,Ensighten/Diamond,MichaelDoyle/Diamond,eMerzh/Diamond-1,jumping/Diamond,MichaelDoyle/Diamond,Netuitive/Diamond,hamelg/Diamond,tusharmakkar08/Diamond,tuenti/Diamond,socialwareinc/Diamond,cannium/Diamond,timchenxiaoyu/Diamond,joel-airspring/Diamond,h00dy/Diamond,TAKEALOT/Diamond,Ormod/Diamond,cannium/Diamond,Ormod/Diamond,Clever/Diamond,Netuitive/netuitive-diamond,hamelg/Diamond,szibis/Diamond,mzupan/Diamond,EzyInsights/Diamond,Netuitive/netuitive-diamond,EzyInsights/Diamond,socialwareinc/Diamond,MichaelDoyle/Diamond,russss/Diamond,skbkontur/Diamond,anandbhoraskar/Diamond,mzupan/Diamond,szibis/Diamond,hvnsweeting/Diamond,python-diamond/Diamond,Netuitive/Diamond,Ormod/Diamond,socialwareinc/Diamond,rtoma/Diamond,dcsquared13/Diamond,mfriedenhagen/Diamond,signalfx/Diamond,Nihn/Diamond-1,Ensighten/Diamond,Ormod/Diamond,russss/Diamond,jriguera/Diamond,hvnsweeting/Diamond,bmhatfield/Diamond,zoidbergwill/Diamond,jumping/Diamond,tuenti/Diamond,works-mobile/Diamond,zoidbergwill/Diamond,szibis/Diamond,anandbhoraskar/Diamond,Ensighten/Diamond,Ensighten/Diamond,Clever/Diamond,stuartbfox/Diamond,cannium/Diamond,TAKEALOT/Diamond,gg7/diamond,Slach/Diamond,codepython/Diamond,Clever/Diamond,eMerzh/Diamond-1,jriguera/Diamond,EzyInsights/Diamond,stuartbfox/Diamond,works-mobile/Diamond,Ssawa/Diamond,skbkontur/Diamond,timchenxiaoyu/Diamond,python-diamond/Diamond,eMerzh/Diamond-1,h00dy/Diamond,mfriedenhagen/Diamond,signalfx/Diamond,stuartbfox/Diamond,Slach/Diamond,ramjothikumar/Diamond,jaingaurav/Diamond,jriguera/Diamond,bmhatfield/Diamond,hvnsweeting/Diamond,anandbhoraskar/Diamond,bmhatfield/Diamond,mzupan/Diamond,acquia/Diamond,tusharmakkar08/Diamond,tuenti/Diamond,Ssawa/Diamond,Precis/Diamond,ramjothikumar/Diamond,Netuitive/netuitive-diamond,Netuitive/Diamond,mzupan/Diamond,Ssawa/Diamond,zoidbergwill/Diamond,jumping/Diamond,Clever/Diamond,anandbhoraskar/Diamond,Nihn/Diamond-1,rtoma/Diamond,stuartbfox/Diamond,jaingaurav/Diamond,jriguera/Diamond,Slach/Diamond,Basis/Diamond
src/collectors/lmsensors/lmsensors.py
src/collectors/lmsensors/lmsensors.py
# coding=utf-8 """ This class collects data from libsensors. It should work against libsensors 2.x and 3.x, pending support within the PySensors Ctypes binding: [http://pypi.python.org/pypi/PySensors/](http://pypi.python.org/pypi/PySensors/) Requires: 'sensors' to be installed, configured, and the relevant kernel modules to be loaded. Requires: PySensors requires Python 2.6+ If you're having issues, check your version of 'sensors'. This collector written against: sensors version 3.1.2 with libsensors version 3.1.2 #### Dependencies * [PySensors](http://pypi.python.org/pypi/PySensors/) """ import diamond.collector try: import sensors sensors # workaround for pyflakes issue #13 except ImportError: sensors = None class LMSensorsCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(LMSensorsCollector, self).get_default_config_help() config_help.update({ 'send_zero': 'Send sensor data even when there is no value' }) return config_help def get_default_config(self): """ Returns default collector settings. """ config = super(LMSensorsCollector, self).get_default_config() config.update({ 'path': 'sensors', 'send_zero': 'False' }) return config def collect(self): if sensors is None: self.log.error('Unable to import module sensors') return {} sensors.init() try: for chip in sensors.iter_detected_chips(): for feature in chip: label = feature.label.replace(' ', '-') try: value = feature.get_value() except: if self.config['send_zero']: value = 0 if value is not None: self.publish(".".join([str(chip), label]), value) finally: sensors.cleanup()
# coding=utf-8 """ This class collects data from libsensors. It should work against libsensors 2.x and 3.x, pending support within the PySensors Ctypes binding: [http://pypi.python.org/pypi/PySensors/](http://pypi.python.org/pypi/PySensors/) Requires: 'sensors' to be installed, configured, and the relevant kernel modules to be loaded. Requires: PySensors requires Python 2.6+ If you're having issues, check your version of 'sensors'. This collector written against: sensors version 3.1.2 with libsensors version 3.1.2 #### Dependencies * [PySensors](http://pypi.python.org/pypi/PySensors/) """ import diamond.collector try: import sensors sensors # workaround for pyflakes issue #13 except ImportError: sensors = None class LMSensorsCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(LMSensorsCollector, self).get_default_config_help() config_help.update({ 'fahrenheit': "True/False", 'send_zero': 'Send sensor data even when there is no value' }) return config_help def get_default_config(self): """ Returns default collector settings. """ config = super(LMSensorsCollector, self).get_default_config() config.update({ 'path': 'sensors', 'fahrenheit': 'True', 'send_zero': 'False' }) return config def collect(self): if sensors is None: self.log.error('Unable to import module sensors') return {} sensors.init() try: for chip in sensors.iter_detected_chips(): for feature in chip: label = feature.label.replace(' ', '-') try: value = feature.get_value() except: if self.config['send_zero']: value = 0 if value is not None: self.publish(".".join([str(chip), label]), value) finally: sensors.cleanup()
mit
Python
f3e1b1404f32cd0195aa8148d1ab4285cf9ad352
Add class BaseSpider
SaltusVita/ReoGrab
Spiders.py
Spiders.py
''' Created on 2 сент. 2016 г. @author: garet ''' class BaseSpider(): def __init__(self): pass def AddUrls(self, urls): pass def Routing(self, url): pass def SaveCache(self, url, data=None): pass def GetCache(self, url): pass def Run(self): pass
bsd-3-clause
Python
e5be29bc3c5a77493fe64bb3fc8b52611cc13469
Add tests for Generic Interface.
rht/zulip,mahim97/zulip,rht/zulip,mahim97/zulip,brainwane/zulip,hackerkid/zulip,zulip/zulip,eeshangarg/zulip,Galexrt/zulip,vabs22/zulip,kou/zulip,rishig/zulip,amanharitsh123/zulip,punchagan/zulip,eeshangarg/zulip,andersk/zulip,shubhamdhama/zulip,showell/zulip,jackrzhang/zulip,eeshangarg/zulip,hackerkid/zulip,vabs22/zulip,kou/zulip,verma-varsha/zulip,amanharitsh123/zulip,dhcrzf/zulip,brainwane/zulip,hackerkid/zulip,shubhamdhama/zulip,kou/zulip,tommyip/zulip,vabs22/zulip,andersk/zulip,dhcrzf/zulip,vaidap/zulip,showell/zulip,eeshangarg/zulip,vabs22/zulip,synicalsyntax/zulip,shubhamdhama/zulip,timabbott/zulip,mahim97/zulip,rht/zulip,brainwane/zulip,shubhamdhama/zulip,brainwane/zulip,vabs22/zulip,rishig/zulip,synicalsyntax/zulip,punchagan/zulip,shubhamdhama/zulip,verma-varsha/zulip,verma-varsha/zulip,Galexrt/zulip,jackrzhang/zulip,tommyip/zulip,rishig/zulip,vaidap/zulip,brockwhittaker/zulip,tommyip/zulip,tommyip/zulip,brockwhittaker/zulip,brockwhittaker/zulip,zulip/zulip,jackrzhang/zulip,showell/zulip,timabbott/zulip,amanharitsh123/zulip,synicalsyntax/zulip,rishig/zulip,punchagan/zulip,Galexrt/zulip,hackerkid/zulip,brainwane/zulip,brockwhittaker/zulip,hackerkid/zulip,rht/zulip,vabs22/zulip,mahim97/zulip,rishig/zulip,rishig/zulip,kou/zulip,eeshangarg/zulip,vaidap/zulip,amanharitsh123/zulip,andersk/zulip,synicalsyntax/zulip,punchagan/zulip,hackerkid/zulip,andersk/zulip,verma-varsha/zulip,timabbott/zulip,vaidap/zulip,timabbott/zulip,verma-varsha/zulip,mahim97/zulip,punchagan/zulip,synicalsyntax/zulip,timabbott/zulip,brainwane/zulip,zulip/zulip,rht/zulip,amanharitsh123/zulip,vaidap/zulip,vaidap/zulip,punchagan/zulip,showell/zulip,zulip/zulip,jackrzhang/zulip,kou/zulip,showell/zulip,synicalsyntax/zulip,jackrzhang/zulip,punchagan/zulip,shubhamdhama/zulip,kou/zulip,amanharitsh123/zulip,Galexrt/zulip,shubhamdhama/zulip,dhcrzf/zulip,brockwhittaker/zulip,kou/zulip,dhcrzf/zulip,jackrzhang/zulip,timabbott/zulip,eeshangarg/zulip,andersk/zulip,zulip/zulip,hackerkid/zulip,zulip/zulip,rishig/zulip,timabbott/zulip,showell/zulip,zulip/zulip,tommyip/zulip,brockwhittaker/zulip,synicalsyntax/zulip,Galexrt/zulip,andersk/zulip,verma-varsha/zulip,brainwane/zulip,jackrzhang/zulip,Galexrt/zulip,rht/zulip,mahim97/zulip,Galexrt/zulip,andersk/zulip,dhcrzf/zulip,dhcrzf/zulip,dhcrzf/zulip,tommyip/zulip,eeshangarg/zulip,tommyip/zulip,rht/zulip,showell/zulip
zerver/tests/test_outgoing_webhook_interfaces.py
zerver/tests/test_outgoing_webhook_interfaces.py
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import print_function from typing import Any import mock import json from requests.models import Response from zerver.lib.test_classes import ZulipTestCase from zerver.outgoing_webhooks.generic import GenericOutgoingWebhookService class Test_GenericOutgoingWebhookService(ZulipTestCase): def setUp(self): # type: () -> None self.event = { u'command': '@**test**', u'message': { 'content': 'test_content', } } self.handler = GenericOutgoingWebhookService(service_name='test-service', base_url='http://example.domain.com', token='abcdef', user_profile=None) def test_process_event(self): # type: () -> None rest_operation, request_data = self.handler.process_event(self.event) request_data = json.loads(request_data) self.assertEqual(request_data['data'], "@**test**") self.assertEqual(request_data['token'], "abcdef") self.assertEqual(rest_operation['base_url'], "http://example.domain.com") self.assertEqual(rest_operation['method'], "POST") self.assertEqual(request_data['message'], self.event['message']) def test_process_success(self): # type: () -> None response = mock.Mock(spec=Response) response.text = json.dumps({"response_not_required": True}) success_response = self.handler.process_success(response, self.event) self.assertEqual(success_response, None) response.text = json.dumps({"response_string": 'test_content'}) success_response = self.handler.process_success(response, self.event) self.assertEqual(success_response, 'test_content') response.text = json.dumps({}) success_response = self.handler.process_success(response, self.event) self.assertEqual(success_response, "") def test_process_failure(self): # type: () -> None response = mock.Mock(spec=Response) response.text = 'test_content' success_response = self.handler.process_failure(response, self.event) self.assertEqual(success_response, 'test_content')
apache-2.0
Python
48d38c28212c0b3ac8bb8ee324221d94b07e84ee
Add initial Domain Tools module
MISP/misp-modules,MISP/misp-modules,VirusTotal/misp-modules,MISP/misp-modules,Rafiot/misp-modules,amuehlem/misp-modules,amuehlem/misp-modules,Rafiot/misp-modules,VirusTotal/misp-modules,VirusTotal/misp-modules,amuehlem/misp-modules,Rafiot/misp-modules
misp_modules/modules/expansion/domaintools.py
misp_modules/modules/expansion/domaintools.py
import json import logging import sys from domaintools import API log = logging.getLogger('domaintools') log.setLevel(logging.DEBUG) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) log.addHandler(ch) misperrors = {'error': 'Error'} mispattributes = { 'input': ['domain'], 'output': ['whois-registrant-email', 'whois-registrant-phone', 'whois-registrant-name', 'whois-registrar', 'whois-creation-date', 'freetext'] } moduleinfo = { 'version': '0.1', 'author': 'Raphaël Vinot', 'description': 'DomainTools MISP expansion module.', 'module-type': ['expansion', 'hover'] } moduleconfig = ['username', 'api_key'] class DomainTools(object): def __init__(self): self.reg_mail = set() self.reg_phone = set() self.reg_name = set() self.registrar = set() self.creation_date = set() self.freetext = '' def dump(self): to_return = [] if self.reg_mail: to_return.append({'type': ['whois-registrant-email'], 'values': list(self.reg_mail)}) if self.reg_phone: to_return.append({'type': ['whois-registrant-phone'], 'values': list(self.reg_phone)}) if self.reg_name: to_return.append({'type': ['whois-registrant-name'], 'values': list(self.reg_name)}) if self.registrar: to_return.append({'type': ['whois-registrar'], 'values': list(self.registrar)}) if self.creation_date: to_return.append({'type': ['whois-creation-date'], 'values': list(self.creation_date)}) if self.freetext: to_return.append({'type': ['freetext'], 'values': [self.freetext]}) return to_return def handler(q=False): if not q: return q request = json.loads(q) to_query = None for t in mispattributes['input']: to_query = request.get(t) if to_query: break if not to_query: misperrors['error'] = "Unsupported attributes type" return misperrors if request.get('config'): if (request['config'].get('username') is None) or (request['config'].get('api_key') is None): misperrors['error'] = 'DomainTools authentication is incomplete' return misperrors else: domtools = API(request['config'].get('username'), request['config'].get('api_key')) else: misperrors['error'] = 'DomainTools authentication is missing' return misperrors whois_entry = domtools.parsed_whois(to_query) values = DomainTools() if whois_entry.has_key('error'): misperrors['error'] = whois_entry['error']['message'] return misperrors if whois_entry.has_key('registrant'): values.reg_name.add(whois_entry['registrant']) if whois_entry.has_key('registration'): values.creation_date.add(whois_entry['registration']['created']) if whois_entry.has_key('whois'): values.freetext = whois_entry['whois']['record'] if whois_entry.emails(): # NOTE: not sure we want to do that (contains registrar emails) values.reg_mail |= whois_entry.emails() if whois_entry.has_key('parsed_whois'): if whois_entry['parsed_whois']['created_date']: values.creation_date.add(whois_entry['parsed_whois']['created_date']) if whois_entry['parsed_whois']['registrar']['name']: values.registrar.add(whois_entry['parsed_whois']['registrar']['name']) for key, entry in whois_entry['parsed_whois']['contacts'].items(): # TODO: pass key as comment if entry['email']: values.reg_mail.add(entry['email']) if entry['phone']: values.reg_phone.add(entry['phone']) if entry['name']: values.reg_name.add(entry['name']) return json.dumps({'results': values.dump()}) def introspection(): return mispattributes def version(): moduleinfo['config'] = moduleconfig return moduleinfo
agpl-3.0
Python
1f80f3cc606d9c42e41e30108e97f776b02803c5
Create abcprob.py
beepingmoon/code,beepingmoon/code
abcprob.py
abcprob.py
# by beepingmoon, 2014-07-22 # abc problem, http://rosettacode.org/wiki/ABC_Problem import time class Blok: def __init__(self, znaki, czyDostepny = True): self.znaki = znaki self.czyDostepny = czyDostepny def sprawdzZnaki(self, znak): for z in self.znaki: if z == znak: return True return False bloki = [Blok('ob'),Blok('xk'),Blok('dq'),Blok('cp'),Blok('na'), Blok('gt'),Blok('re'),Blok('tg'),Blok('qd'),Blok('fs'),Blok('jw'), Blok('hu'),Blok('vi'),Blok('an'),Blok('ob'),Blok('er'),Blok('fs'), Blok('ly'),Blok('pc'),Blok('zm')] def resetuj(): for b in bloki: b.czyDostepny = True def funkcjaABC(bloki, slowo, indeks): if indeks == len(slowo): return True for blok in bloki: if blok.czyDostepny == False: continue if blok.sprawdzZnaki(slowo[indeks]) == True: blok.czyDostepny = False if funkcjaABC(bloki, slowo, indeks+1): return True blok.czyDostepny = True return False # check long arbitrary string in this file f = open("slowo.txt",'r') data = f.read() f.close() start = time.time() print funkcjaABC(bloki, data, 0) print "Czas szukania: %f sekund " % (time.time() - start) resetuj() #print funkcjaABC(bloki, 'a', 0) # true #resetuj() #print funkcjaABC(bloki, 'bark', 0) # true #resetuj() #print funkcjaABC(bloki, 'book', 0) # false #resetuj() #print funkcjaABC(bloki, 'treat', 0) # true #resetuj() #print funkcjaABC(bloki, 'common', 0) # false #resetuj() #print funkcjaABC(bloki, 'squad', 0) # true #resetuj() #print funkcjaABC(bloki, 'confuse', 0) # true
mit
Python
b7b29a00b1a2e448d78c8f3c4333753668589e16
Create __init__.py
0nse/WikiWho,wikiwho/WikiWho,0nse/WikiWho,maribelacosta/wikiwho
etc/__init__.py
etc/__init__.py
mit
Python
e1ea3859b08a14c80ccd65fc5551336bdc760f96
add biggan projukti blog
banglakit/corpus-builder
corpus_builder/spiders/public_blog/biggan_projukti.py
corpus_builder/spiders/public_blog/biggan_projukti.py
# -*- coding: utf-8 -*- import scrapy from scrapy.linkextractors import LinkExtractor from scrapy.spiders import Rule from corpus_builder.templates.spider import CommonSpider class BigganProjuktiSpider(CommonSpider): name = 'biggan_projukti' allowed_domains = ['www.bigganprojukti.com', 'bigganprojukti.com'] base_url = 'http://www.bigganprojukti.com/' start_request_url = base_url content_body = { 'css': 'div.td-post-content p::text' } rules = ( Rule(LinkExtractor( restrict_css='div.td-main-content h3.entry-title' ), callback='parse_content'), ) allowed_configurations = [ ['start_page'], ['start_page', 'end_page'] ] def request_index(self, response): for page in range(self.start_page + 1, self.end_page + 1): yield scrapy.Request(self.base_url + 'page/{page}'.format(page=page))
mit
Python
204e6fc49bcc739f1e5c53bfbfc3eb7e86a7640c
Add windows autostart.
franekp/millandict,franekp/ankidict,franekp/ankidict,franekp/ankidict,franekp/ankidict,franekp/millandict
StartAtBoot.py
StartAtBoot.py
import sys if sys.platform.startswith('win'): from PyQt4.QtCore import QSettings RUN_PATH = "HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Run" settings = QSettings(RUN_PATH, QSettings.NativeFormat) settings.setValue("Anki", sys.argv[0]) # to remove that: # self.settings.remove("Anki")
unknown
Python
7919fa239e597c0358b518740aa2657b49caddbf
add oop_advance
hewentian/python-learning
src/python27/oop_advance/slots.py
src/python27/oop_advance/slots.py
# -*- coding: utf-8 -*- class Student(object): pass s = Student() s.name = 'Tim Ho' print s.name def set_age(self, age): self.age = age from types import MethodType s.set_age = MethodType(set_age, s, Student) s.set_age(25) print s.age s2 = Student() # s2.set_age(25) def set_score(self, score): self.score = score Student.set_score = MethodType(set_score, None, Student) s.set_score(100) print s.score s2.set_score(99) print s2.score class Student2(object): __slots__ = ('name', 'age') s3 = Student2() s3.name = 'Tim Ho' s3.age = 25 #s3.score = 99 print s3.name print s3.age
apache-2.0
Python
f576b7b151c6c74eea668e66fff54ab2c33f39d6
add 100
zeyuanxy/project-euler,EdisonAlgorithms/ProjectEuler,zeyuanxy/project-euler,EdisonAlgorithms/ProjectEuler,EdisonAlgorithms/ProjectEuler,zeyuanxy/project-euler,zeyuanxy/project-euler,EdisonAlgorithms/ProjectEuler
Volume2/100.py
Volume2/100.py
if __name__ == "__main__": b, n, L = 85, 120, 10 ** 12 while n <= L: b, n = 3 * b + 2 * n - 2, 4 * b + 3 * n - 3 print b, n
mit
Python
68efa8a0fb206da8cd1410d74572520f558ebded
Create apriori.py
Jan-Ko/basketanalysis
apriori.py
apriori.py
def preprocessing(data): """ preprocesses data to be applicable to apriori Parameters ---------- data : tbd Returns --------- list of sets """ pass class apriori(): """ Frequent Itemsets using the apriori algorithm Parameters ---------- baskets : list of sets max_set_size : int, default None determine frequent item sets up to max_set_size items if None, determine alls frequent item sets s : float >0 and <=1 minimum threshold for item sets to count as frequent rules : boolen if True return association rules additionally to frequent item sets confidence : boolean if True compute confidence of association rule. Only viable if rules is True interest : boolean if True compute interest of association rule. Only viable if rules is True """ def __init__(self, baskets, max_set_size = None, s = 0.1, rules = False, confidence=False, interest=False): self.baskets = baskets self.max_set_size = max_set_size self.s = s self.rules = rules self.confidence = confidence self.interest = interest def compute(self): """ Applies the apriori algorithm to baskets """ pass def _initialize(self): pass def _construct(self): pass def _filter(self): pass def _construct_and_count(self, j, frequent_tuples): if j == 1: # count items ind baskets and return if j > 1: # for every basket, filter tuples subset of basket # double loop through filtered tuples # if tuple difference is j-2, unite and count unison # if count(unison) = j add tuple to output and increase count #memoization?
mit
Python
8adf39f011d8290c07f01e807b65373e40b4c314
Create score.py
lettersonsounds/sing
score.py
score.py
""" Requires sox and text2wave (via festival) """ from pippi import dsp from pippi import tune import subprocess import os def sox(cmd, sound): path = os.getcwd() filename_in = '/proc-in' filename_out = '/proc-out.wav' dsp.write(sound, filename_in) cmd = cmd % (path + filename_in + '.wav', path + filename_out) subprocess.call(cmd, shell=True) sound = dsp.read(path + filename_out).data return sound def text2wave(lyrics): path = os.getcwd() + '/bag.wav' cmd = "echo '%s' | /usr/bin/text2wave -o %s" % (lyrics, path) ret = subprocess.call(cmd, shell=True) words = dsp.read('bag.wav').data return words def singit(lyrics, mult): words = text2wave(lyrics) pitches = [ dsp.randint(1, 10) for i in range(dsp.randint(2, 4)) ] pitches = tune.fromdegrees(pitches, octave=dsp.randint(1, 4), root='a') sings = [ dsp.pine(words, dsp.flen(words) * mult, pitch) for pitch in pitches ] sings = dsp.mix(sings) sings = sox("sox %s %s tempo 5.0", sings) return sings verses = [ 'sing a ling a ling a', 'ding ling a sing ling ding a', 'ee oh ee oh see low', 'me low see low tree low', 'ping a ding a ding a', 'sling ding a bing ling ding a', 'ee oh ee oh see low', 'me low see low tree low', 'sing a ling a ling a', 'ding ling a sing ling ding a', 'ee oh ee oh see low', 'me low see low tree low', ] layers = [] # v1: 1 layers, 50 - 1000 mult # v2: 3 layers, 50 - 1000 mult # v3: 2 layers, 50 - 100 mult for l in range(2): out = ''.join([ singit(lyric, dsp.randint(50, 100)) for lyric in verses ]) layers += [ out ] out = dsp.mix(layers) dsp.write(out, 'sing')
unlicense
Python
0ac53ef31a47c61382557b9fb3ba588fd4e1ae67
Add first working setup.py script
rlc2/pygame_maker,rlc2/pygame_maker
setup.py
setup.py
from setuptools import setup, find_packages setup(name='pygame_maker', version='0.1', description='ENIGMA-like pygame-based game engine', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPGv2)', 'Progamming Language :: Python :: 2.7', 'Topic :: Software Development :: Libraries :: pygame', ], keywords='pygame engine', url='http://github.com/rlc2/pygame_maker', author='Ron Lockwood-Childs', author_email='rlc2@dslextreme.com', license='LGPL v2.1', packages=[ 'pygame_maker', 'pygame_maker.actions', 'pygame_maker.actors', 'pygame_maker.events', 'pygame_maker.logic', 'pygame_maker.scenes', 'pygame_maker.sounds', 'pygame_maker.support', ], package_data = { '': ['script_data/*.png','script_data/*.wav','script_data/*.yaml','script_data/*.tmpl','tests/unittest_files/*'] }, scripts = [ 'scripts/pygame_maker_app.py' ], install_requires=[ 'numpy>=1.10.1', 'yaml>=3.11', 'pyparsing>=2.0.5', 'pygame>=1.9.0', ], zip_safe=False)
lgpl-2.1
Python
1c608e69ecf61484ea1210fe0d6dc8d116c583d3
Update homepage in setup.py
Polyconseil/django-pagination,atugushev/django-pagination,zyga/django-pagination,alternativshik/django-pagination,Polyconseil/django-pagination,alternativshik/django-pagination,zyga/django-pagination,atugushev/django-pagination
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='linaro-django-pagination', version=version, description="linaro-django-pagination", long_description=open("README").read(), classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "Framework :: Django", "Environment :: Web Environment", ], keywords='pagination,django', author='Zygmunt Krynicki', author_email='zygmunt.krynicki@linaro.org', url='https://github.com/zyga/django-pagination', license='BSD', packages=find_packages(), include_package_data=True, zip_safe=False, )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='linaro-django-pagination', version=version, description="linaro-django-pagination", long_description=open("README").read(), classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "Framework :: Django", "Environment :: Web Environment", ], keywords='pagination,django', author='Zygmunt Krynicki', author_email='zygmunt.krynicki@linaro.org', url='http://launchpad.net/linaro-django-pagination/', license='BSD', packages=find_packages(), include_package_data=True, zip_safe=False, )
bsd-3-clause
Python
3693c5696da5bb96fc242d276f0d1a0a983a9c5d
Add setup.py script
zillolo/vsut-python
setup.py
setup.py
import os from setuptools import setup def read(file): return open(os.path.join(os.path.dirname(__file__), file)).read() setup( name="vsut", version="1.5.2", author="Alex Egger", author_email="alex.egger96@gmail.com", description="A simple unit testing framework for Python 3.", license="MIT", keywords="unit unittest test testing", url="http://github.com/zillolo/vsut-python", packages=["vsut"], scripts=["runner.py"], entry_points = {"console_scripts" : ["vrun = runner:main"]}, long_description="""**V**\ ery **S**\ imple **U**\ nit **T**\ est ============================================= **VSUT** is a simple unit test framework for Python. Usage ----- A unit can be described , like follows: .. code:: python ... class UnitTest(vsut.unit.Unit): def testComponentOne(self): ... def testComponentTwo(self): ... Any methods that start with ‘test’ will be executed automatically, once the case is run. Asserts & Fail Conditions ------------------------- The following methods can be used in a test-case to check for success or failure: - ``assertEqual(expected, actual)`` - Checks for equality of the two arguments. - ``assertNotEqual(expected, actual)`` - Checks for inequality of the two arguments. - ``assertTrue(expected)`` - Checks whether the argument is the boolean value True. - ``assertFalse(expected)`` - Checks whether the argument is the boolean value False. - ``assertIn(expected, collection)`` - Checks whether the argument is in the collection. - ``assertNotIn(expected, collection)`` - Checks whether the argument is not in the collection. - ``assertIs(expected, actual)`` - Checks whether the value is the expected. - ``assertIsNot(expected, actual)`` - Checks whether the value is not the expected. - ``assertIsNone(expected)`` - Checks whether the argument is None. - ``assertIsNotNone(expected)`` - Checks whether the argument is not None. - ``assertRaises(exception, func, *args)`` - Checks whether the function ‘func’ raises an exception of the type ‘exception’. For any of these methods a **message** parameter can be specified, that will be printed instead of the default message. Example ^^^^^^^ .. code:: python ... assertEqual(True, False, message="True is not False") ... Full Example ------------ .. code:: python from vsut.unit import Unit from vsut.assertion import assertEqual class TestCase(Unit): def testExample(self): a = True b = True c = False assertEqual(a, b) assertEqual(b, c) Running units ------------- Units can be run with the test runner, as follows: :: python runner.py [--format=table] module.TestClass module1.TestClass1 ... | The ``--format`` argument is optional and specifies the method of formatting the output. Available methods are ``table`` and ``csv``, with ``table`` being the default. | The separator for the csv-data can be specified with the parameter ``--separator``. **NOTE**: Some characters require escaping with ``\``, as they are special characters. Output as Table ^^^^^^^^^^^^^^^ | Output as a table can look like this for example: | \`\`\` | [TestCase] | Id \| Name \| Status \| Time \| Assert \| Message | 0 \| testAssertEqual \| OK \| 0.000003 \| \| | 1 \| testAssertEqualFail \| OK \| 0.000008 \| \| | 2 \| testAssertFalse \| OK \| 0.000001 \| \| | 3 \| testAssertIn \| OK \| 0.000002 \| \| | 4 \| testAssertIs \| OK \| 0.000001 \| \| | 5 \| testAssertIsNone \| OK \| 0.000002 \| \| | 6 \| testAssertIsNot \| OK \| 0.000001 \| \| | 7 \| testAssertIsNotNone \| OK \| 0.000001 \| \| | 8 \| testAssertNotEqual \| OK \| 0.000001 \| \| | 9 \| testAssertNotIn \| OK \| 0.000002 \| \| | 10 \| testAssertRaises \| OK \| 0.000005 \| \| | 11 \| testAssertTrue \| OK \| 0.000002 \| \| | 12 \| testFailWithCustomMessage \| FAIL \| 0.000003 \| assertEqual \| A custom message. | 13 \| testWillFail \| FAIL \| 0.000003 \| assertEqual \| 1 != 2 | 14 \| testWillFailToo \| FAIL \| 0.000003 \| assertNotEqual \| 1 == 1 :: #### Output as CSV Output as CSV can look like this for example: | TestCase | 0,testAssertEqual,OK,0.000004 | 1,testAssertEqualFail,OK,0.000011 | 2,testAssertFalse,OK,0.000002 | 3,testAssertIn,OK,0.000004 | 4,testAssertIs,OK,0.000004 | 5,testAssertIsNone,OK,0.000002 | 6,testAssertIsNot,OK,0.000004 | 7,testAssertIsNotNone,OK,0.000002 | 8,testAssertNotEqual,OK,0.000003 | 9,testAssertNotIn,OK,0.000002 | 10,testAssertRaises,OK,0.000007 | 11,testAssertTrue,OK,0.000003 | 12,testFailWithCustomMessage,FAIL,0.000006,assertEqual,A custom message. | 13,testWillFail,FAIL,0.000007,assertEqual,1 != 2 | 14,testWillFailToo,FAIL,0.000006,assertNotEqual,1 == 1 | \`\`\` """, classifiers=[ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Topic :: Software Development :: Testing"] ) #TODO: Find a way so people can execute runner.py when importing with pip.
mit
Python
565ff051cabe9eaec6f24df6e8c31115e0a4eed8
Add setup.py
eternnoir/VSTools
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup setup(name='VSTools', version='0.1', description='Easy use Visual Studio msbuild with python. ', author='eternnoir', author_email='eternnoir@gmail.com', url='https://github.com/eternnoir/VSTools', packages=['VSTools'], )
apache-2.0
Python
b164ec6fae6ea9a6734ac58ddd8c3b89f73713fe
Add setup.py
tysonclugg/django-classy-settings,RonnyPfannschmidt/django-classy-settings,MarkusH/django-classy-settings,rapilabs/django-classy-settings,ar45/django-classy-settings,funkybob/django-classy-settings,pombredanne/django-classy-settings
setup.py
setup.py
from distutils.core import setup setup( name='django-classy-settings', version='0.1', description='Simple class-based settings for Django', author='Curtis Maloney', author_email='curtis@tinbrain.net', packages=['cbs',], )
bsd-2-clause
Python
69f9f090cfa5c9ca5d7dde70cfcdd3327147bdb7
Create setup.py
nurnbeck/project-1-CMPUT-291
setup.py
setup.py
import cx_Oracle ''' - This is to drop all pervious tables and create new tables - Call setup(curs, connection) in main function - setup(curs, connection) returns nothing - Do not call dropTable() and createTable() in main unless you really want to do so ''' def dropTable(curs): droplst = [] droplst.append("drop table owner") droplst.append("drop table auto_sale") droplst.append("drop table restriction") droplst.append("drop table driving_condition") droplst.append("drop table ticket") droplst.append("drop table ticket_type") droplst.append("drop table vehicle") droplst.append("drop table vehicle_type") droplst.append("drop table drive_licence") droplst.append("drop table people") for i in range(len(droplst)): try: curs.execute(droplst[i]) except: pass return def createTable(curs): createPeople = ("create table people " """(sin CHAR(15), name VARCHAR(40), height number(5, 2), weight number(5, 2), eyecolor VARCHAR(10), haircolor VARCHAR(10), addr VARCHAR2(50), gender CHAR, birthday DATE)""") createdrive_licence = ("create table drive_licence " """(licence_no CHAR(15), sin CHAR(15), class VARCHAR(10), photo BLOB, issuing_date DATE, expiring_date DATE)""") createdriving_condition = ("create table driving_condition " """(c_id INTEGER, description VARCHAR(1024))""") createrestriction = ("create table restriction " """(licence_no CHAR(15), r_id INTEGER)""") createvehicle_type = ("create table vehicle_type " """(type_id INTEGER, type CHAR(10))""") createvehicle = ("create table vehicle " """(serial_no CHAR(15), maker VARCHAR(20), model VARCHAR(20), year number(4, 0), color VARCHAR(10), type_id INTEGER)""") createowner = ("create table owner " """(owner_id CHAR(15), vehicle_id CHAR(15), is_primary_owner CHAR(1))""") createauto_sale = ("create table auto_sale " """(transaction_id int, seller_id CHAR(15), buyer_id CHAR(15), vehicle_id CHAR(15), s_date date, price numeric(9, 2))""") createticket_type = ("create table ticket_type " """(vtype CHAR(10), fine number(5, 2))""") createticket = ("create table ticket " """(ticket_no int, violator_no CHAR(15), vehicle_id CHAR(15), office_no CHAR(15), vtype CHAR(10), vdate date, place VARCHAR(20), descriptions VARCHAR(1024))""") curs.execute(createPeople) curs.execute(createdrive_licence) curs.execute(createdriving_condition) curs.execute(createrestriction) curs.execute(createvehicle_type) curs.execute(createvehicle) curs.execute(createowner) curs.execute(createauto_sale) curs.execute(createticket_type) curs.execute(createticket) return def setup(curs, connection): dropTable(curs) createTable(curs) connection.commit() return
mit
Python
fa4ce6dc15e8b47c5978c476db7801473820af0d
add setup.py
yasserglez/pymdptoolbox,silgon/pymdptoolbox,yasserglez/pymdptoolbox,silgon/pymdptoolbox,McCabeJM/pymdptoolbox,McCabeJM/pymdptoolbox,sawcordwell/pymdptoolbox,sawcordwell/pymdptoolbox
setup.py
setup.py
# -*- coding: utf-8 -*-
bsd-3-clause
Python
8e8fbf8b63239915736b788b7f1c8ac21a48c190
Add a basic setup.py script
marineam/coil,kovacsbalu/coil,tectronics/coil,tectronics/coil,marineam/coil,kovacsbalu/coil
setup.py
setup.py
from distutils.core import setup from coil import __version__ as VERSION setup( name = 'coil', version = VERSION, author = 'Michael Marineau', author_email = 'mike@marineau.org', description = 'A powerful configuration language', license = 'MIT', packages = ['coil', 'coil.test'], scripts = ['bin/coildump'], )
mit
Python
d074995f8ce5a62104525b1f3cfed10ace12c3bc
add setup.py
slyrz/feature
setup.py
setup.py
from setuptools import setup setup(name="feature", version="0.1", url="https://github.com/slyrz/feature", description="Easy feature engineering.", long_description=open('README.md').read(), packages=['feature', 'feature.plugin'], license='MIT')
mit
Python
699ac33eec57fa49e2c1917d2bf17950bd6e6474
Create setup script
Fantomas42/mots-vides,Fantomas42/mots-vides
setup.py
setup.py
"""Setup script of mots-vides""" from setuptools import setup from setuptools import find_packages import mots_vides setup( name='mots-vides', version=mots_vides.__version__, description='Python library for managing stop words in many languages.', long_description=open('README.rst').read(), keywords='stop, words, text, parsing', author=mots_vides.__author__, author_email=mots_vides.__email__, url=mots_vides.__url__, license=open('LICENSE').read(), packages=find_packages(), classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules'] )
bsd-3-clause
Python
959580ea313e4445374e8ee9f32e1a8822dd5beb
add setup script for install
ljwolf/spvcm,ljwolf/spvcm
setup.py
setup.py
from setuptools import setup setup(name='hlm_gibbs', version='0.0.1', description='Fit spatial multilevel models and diagnose convergence', url='https://github.com/ljwolf/hlm_gibbs', author='Levi John Wolf', author_email='levi.john.wolf@gmail.com', license='3-Clause BSD', packages=['hlm_gibbs'], install_requires=['numpy','scipy','pysal','pandas','seaborn'] zip_safe=False)
mit
Python
dca7a5f766b7e2fd5cfc346cbc358faafa1ec9f1
add setup.py file
schaul/py-vgdl
setup.py
setup.py
try: from setuptools import setup except ImportError: from distutils.core import setup from distutils.extension import Extension libname="vgdl" setup( name = libname, version="1.0", description='A video game description language (VGDL) built on top pf pygame', author='Tom Schaul', url='https://github.com/schaul/py-vgdl', packages= ['vgdl'], install_requires=['pygame'] )
bsd-3-clause
Python
1618d8afeca1b667b4439d62b3727528dcba9159
Add setup.py
lamby/django-filebased-email-backend-ng
setup.py
setup.py
from setuptools import setup setup( name='django-filebased-email-backend-ng', packages=( 'django_filebased_email_backend_ng', ) )
bsd-3-clause
Python
95d1f63ce4d9698f8ab4b64757e3669c75accbbd
throw on some more setup.py pypi classifiers
goldblatt/django-object-actions,goldblatt/django-object-actions,crccheck/django-object-actions,Cuuuurzel/django-object-actions,dukebody/django-object-actions,dukebody/django-object-actions,Cuuuurzel/django-object-actions,crccheck/django-object-actions
setup.py
setup.py
from distutils.core import setup setup( name='django-object-actions', version='0.0.1', author="The Texas Tribune", author_email="cchang@texastribune.org", maintainer="Chris Chang", # url packages=['django_object_actions'], include_package_data=True, # automatically include things from MANIFEST license='Apache License, Version 2.0', description='A Django app for adding object tools to models', long_description=open('README.md').read(), classifiers=[ "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 2", ], )
from distutils.core import setup setup( name='django-object-actions', version='0.0.1', author="The Texas Tribune", author_email="cchang@texastribune.org", maintainer="Chris Chang", # url packages=['django_object_actions'], include_package_data=True, # automatically include things from MANIFEST license='Apache License, Version 2.0', description='A Django app for adding object tools to models', long_description=open('README.md').read(), classifiers=[ "Development Status :: 3 - Alpha", "Framework :: Django", ], )
apache-2.0
Python
591b9be8d03cf2ecd12eed1bd36f9d762e91195c
Add setup.py for package installation
josephl/simplio
setup.py
setup.py
from setuptools import setup setup( name='simplio', version='0.1', description='Simplest-case command-line input/output', long_description=( 'Simplio is a Python function decorator that applies an input file ' 'object and an output file object as arguments to the decorated ' 'function. It determines this based on STDIN or the presence of ' 'command-line arguments.'), url='https://github.com/josephl/simplio', author='Joseph Lee', author_email='joe.lee.three.thousand@gmail.com', license='MIT', keywords='input output file io', )
mit
Python
0abe1e173b73770b5f2ee81f57f21c41466e5c61
Add setup script
cadyyan/technic-solder-client,durandj/technic-solder-client
setup.py
setup.py
#!/usr/bin/env python import os.path from setuptools import find_packages, setup setup( name = 'technic-solder-client', version = '1.0', description = 'Python implementation of a Technic Solder client', author = 'Cadyyan', url = 'https://github.com/cadyyan/technic-solder-client', licensee = 'MIT', packages = find_packages(), install_requires = [ 'tabulate', ], scripts = [ os.path.join('bin', 'solder'), ], )
mit
Python
af49ecf6ce12b2fa909733c17569c7231c343190
add simple sql shell
denisenkom/pytds,m32/pytds,tpow/pytds,denisenkom/pytds,m32/pytds,tpow/pytds
shell.py
shell.py
# simple interactive shell for MSSQL server import pytds import os def main(): conn = pytds.connect(dsn=os.getenv("HOST", "localhost"), user=os.getenv("SQLUSER", "sa"), password=os.getenv("SQLPASSWORD")) while True: try: sql = input("sql> ") except KeyboardInterrupt: return with conn.cursor() as cursor: try: cursor.execute(sql) except pytds.ProgrammingError as e: print("Error: " + str(e)) else: for _, msg in cursor.messages: print(msg.text) if cursor.description: print('\t'.join(col[0] for col in cursor.description)) print('-' * 80) count = 0 for row in cursor: print('\t'.join(str(col) for col in row)) count += 1 print('-' * 80) print("Returned {} rows".format(count)) print() main()
mit
Python
93e2d3d72099b854f854abc44a79b2c4edb74af8
add basic file splitter
amccreight/mochitest-logs
split.py
split.py
#!/usr/bin/python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Echo all output starting with the line after the line that starts with splitStart. import sys splitStart = "QQQQQQQQQ" foundLine = False for l in sys.stdin: if foundLine: print l, continue if l.startswith(splitStart): foundLine = True
mpl-2.0
Python
3d44701308fe1c32d8ae2efab609d5e7bcd563c0
Create ajastin.py
taloprojekti/SP2017,taloprojekti/SP2017
ajastin.py
ajastin.py
def downloader(): #import downloader #downloader.main() return 0 def lampotila(): Tnow = 15 #import lampotila #lampotila.main() return Tnow def main(): import time from datetime import datetime n = 0 ret1 = 0 t0 = time.time() try: while ret1 == 0: time.sleep(10) #tähän tulee PID funktio now = datetime.now() print("{:d}:{:d}:{:d}".format(now.hour, now.minute, now.second)) if now.minute == 0 and now.hour == 0: downloader() while now.minute == 0: time.sleep(1) now = datetime.now() if now.minute % 30 == 0: lampotila() while now.minute % 30: time.sleep(1) datetime.now() except KeyboardInterrupt: return main()
mit
Python
c45da8544bd3e4f85073e61cfba417862ce66fc2
add 'Appeaser' strategy
uglyfruitcake/Axelrod,risicle/Axelrod,bootandy/Axelrod,mojones/Axelrod,risicle/Axelrod,kathryncrouch/Axelrod,emmagordon/Axelrod,uglyfruitcake/Axelrod,emmagordon/Axelrod,bootandy/Axelrod,drvinceknight/Axelrod,kathryncrouch/Axelrod,mojones/Axelrod
axelrod/strategies/appeaser.py
axelrod/strategies/appeaser.py
from axelrod import Player class Appeaser(Player): """ A player who tries to guess what the opponent wants, switching his behaviour every time the opponent plays 'D'. """ def strategy(self, opponent): """ Start with 'C', switch between 'C' and 'D' when opponent plays 'D'. """ if len(self.history) == 0: self.str = 'C' if opponent.history[-1] == 'D': if self.str == 'C': self.str = 'D' else: self.str = 'C' return self.str def __repr__(self): """ The string method for the strategy: """ return 'Appeaser'
mit
Python
d29a94809f6f58e053a646d796fe9e55a51b334e
Initialize Ch. 1 caesarHacker
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
books/CrackingCodesWithPython/Chapter01/caesarHacker.py
books/CrackingCodesWithPython/Chapter01/caesarHacker.py
# Caesar Hacker improved # Rewritten as function for importing # SPOILERS: Chapter 6 (caesarHacker), Chapter 7 (functions) import books.CrackingCodesWithPython.Chapter01.config def hackCaesar(message): # Loop through every possible key: for key in range(len(books.CrackingCodesWithPython.Chapter01.config.SYMBOLS)): # It is important to set translated to the blank string so that the # previous iteration's value for translated is cleared: translated = '' # The rest of the program is almost the same as the Caesar program: # Loop through each symbol in message: for symbol in message: if symbol in books.CrackingCodesWithPython.Chapter01.config.SYMBOLS: symbolIndex = books.CrackingCodesWithPython.Chapter01.config.SYMBOLS.find(symbol) translatedIndex = symbolIndex - key # Handle the wraparound: if translatedIndex < 0: translatedIndex += len(books.CrackingCodesWithPython.Chapter01.config.SYMBOLS) # Append the decrypted symbol: translated += books.CrackingCodesWithPython.Chapter01.config.SYMBOLS[translatedIndex] else: # Append the symbol without encrypting/decrypting: translated += symbol # Display every possible decryption: print('Key #%s: %s' % (key, translated)) return None
mit
Python
cfdbfd30f41ea4a0dc5fb693e896c6e24ae78e05
Create pipeline.py
googleforgames/clean-chat,googleforgames/clean-chat
toxicity_ml/toxicBERT/pipeline.py
toxicity_ml/toxicBERT/pipeline.py
# coding=utf-8 # Copyright 2020 Google LLC import tensorflow_model_analysis as tfma from tfx.components import (Evaluator, ExampleValidator, ImportExampleGen, ModelValidator, Pusher, ResolverNode, SchemaGen, StatisticsGen, Trainer, Transform) from tfx.proto import example_gen_pb2 from tfx.dsl.experimental import latest_blessed_model_resolver from tfx.orchestration.kubeflow import kubeflow_dag_runner from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen from tfx.utils.dsl_utils import external_input def create_train_pipeline(pipeline_name: Text, pipeline_root: Text): ''' Args: pipeline_name: name of the TFX pipeline being created. pipeline_root: root directory of the pipeline. Should be a valid GCS path Returns: A TFX pipeline object. ''' ## Parameters TRAINING_STEPS = 10000 EVALUATION_STEPS = 1000 ## GCS Location serving_model_dir = "/directory" ## Bring Data Into Pipeline example_gen = example_gen_pb2.Output( split_config=example_gen_pb2.SplitConfig(splits=[ example_gen_pb2.SplitConfig.Split(name='train', hash_buckets=45), example_gen_pb2.SplitConfig.Split(name='eval', hash_buckets=5) ])) ## Computes Statistics for Validation statistics_gen = StatisticsGen( examples=example_gen.outputs['examples'] ) ## Performs Transforms transform = Transform( examples=example_gen.outputs['examples'], schema=schema_gen.outputs['schema'], module_file=os.path.abspath("transform.py") ) ## Trainer Component trainer = Trainer( module_file=os.path.abspath("trainer.py"), custom_executor_spec=executor_spec.ExecutorClassSpec(GenericExecutor), examples=transform.outputs['transformed_examples'], transform_graph=transform.outputs['transform_graph'], schema=schema_gen.outputs['schema'], train_args=trainer_pb2.TrainArgs(num_steps=TRAINING_STEPS), eval_args=trainer_pb2.EvalArgs(num_steps=EVALUATION_STEPS) ) ## Resolver Component model_resolver = ResolverNode( instance_name='latest_blessed_model_resolver', resolver_class=latest_blessed_model_resolver.LatestBlessedModelResolver, model=Channel(type=Model), model_blessing=Channel(type=ModelBlessing) ) ## Evaluator eval_config = tfma.EvalConfig( model_specs=[ tfma.ModelSpec(label_key='target') ], metrics_specs=[ tfma.MetricsSpec( metrics=[ tfma.MetricConfig(class_name='ExampleCount') ], thresholds = { 'binary_accuracy': tfma.MetricThreshold( value_threshold=tfma.GenericValueThreshold( lower_bound={'value': 0.5}), change_threshold=tfma.GenericChangeThreshold( direction=tfma.MetricDirection.HIGHER_IS_BETTER, absolute={'value': -1e-10})) } ) ], slicing_specs=[tfma.SlicingSpec(),] ) ## Evaluator Componant evaluator = Evaluator( examples=example_gen.outputs['examples'], model=trainer.outputs['model'], baseline_model=model_resolver.outputs['model'], eval_config=eval_config ) ## Pusher - Export for Model Serving pusher = Pusher( model=trainer.outputs['model'], model_blessing=evaluator.outputs['blessing'], push_destination=pusher_pb2.PushDestination( filesystem=pusher_pb2.PushDestination.Filesystem( base_directory=serving_model_dir))) return pipeline.Pipeline( pipeline_name=pipeline_name, pipeline_root=pipeline_root, components=[ example_gen, statistics_gen, schema_gen, example_validator, transform, trainer, model_resolver, evaluator ], ) def main(unused_argv): metadata_config = kubeflow_dag_runner.get_default_kubeflow_metadata_config() tfx_image = os.environ.get('KUBEFLOW_TFX_IMAGE', None) runner_config = kubeflow_dag_runner.KubeflowDagRunnerConfig( kubeflow_metadata_config=metadata_config, # Specify custom docker image to use. tfx_image=tfx_image) kubeflow_dag_runner.KubeflowDagRunner(config=runner_config).run( create_pipeline( pipeline_name=_pipeline_name, pipeline_root=_pipeline_root, )) if __name__ == '__main__': app.run(main)
apache-2.0
Python
03fce72b60eb8cad2368447cf23f72f8084f4a4b
Add py solution for 575. Distribute Candies
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
py/distribute-candies.py
py/distribute-candies.py
class Solution(object): def distributeCandies(self, candies): """ :type candies: List[int] :rtype: int """ return min(len(candies) / 2, len(set(candies)))
apache-2.0
Python
d34dcf1179e6e5c2b864627266ae1788d10142aa
Add Chuanping Yu's solutions to Problem02
GT-IDEaS/SkillsWorkshop2017,GT-IDEaS/SkillsWorkshop2017,GT-IDEaS/SkillsWorkshop2017
Week01/Problem02/cyu_02.py
Week01/Problem02/cyu_02.py
#!/usr/bin/env python3 """This script is written by Chuanping Yu, on Jul 24, 2017, for the Assignment#1 in IDEaS workshop""" #Problem 2 FIB = [] F = 1 S = 0 FIB.append(F) FIB.append(F) while F <= 4000000: F = FIB[-1] + FIB[-2] FIB.append(F) if F%2 == 0 and F <= 4000000: S = S + F print(S)
bsd-3-clause
Python
5eb9a910096f3e0000499390541a83bc50fb73ce
add binheap
ndraper2/data-structures,SakiFu/data-structures
binheap.py
binheap.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals class BinHeap(object): def __init__(self, iterable=None): self.list = [] if iterable: for item in iterable: self.push(item) def push(self, value): self.list.append(value) self._bubble_up(len(self.list) - 1) def _bubble_up(self, index): if self.list[index] < self.list[(index - 1) // 2]: self.list[index], self.list[(index - 1) // 2] = self.list[(index - 1) // 2], self.list[index] self._bubble_up((index - 1) // 2) def pop(self): return_val = self.list[0] self.list[0] = self.list.pop() self._bubble_down(0) return return_val def _bubble_down(self, index): child = None if self.list[2 * index + 1] > self.list[2 * index + 2]: child = 2 * index + 2 else: child = 2 * index + 1 if self.list[index] < self.list[child]: self.list[index], self.list[child] = self.list[child], self.list[index] self._bubble_down(child)
mit
Python
4f0e9a14286f21d835e36e549ebee80419e46cec
test game
cameronbriar/curses
blocker.py
blocker.py
#!/usr/bin/env python class Blocker: def __init__(self): print 'Blocker v1.0' return def run(self): return game = Blocker() game.run()
bsd-2-clause
Python
769019be1331fa58e363fba37957ec90ab6f8163
add code for more precise arbtirage math (WiP)
victorshch/pytrader
arbmath.py
arbmath.py
import decimal from decimal import Decimal class ExchangeModel(object); def __init__(self, depths, tradeApi): self.depths = depths; self.tradeApi = tradeApi self.symbols = [key[:3] for key, value in depths] + [key[3:] for key, value in depths] self.symbols = list(set(self.symbols)) # returns (balance, remaining order) def ModelL1Trade(balance, pair, type, price, amount): depth = self.depths[pair] remainingOrder = { 'pair': pair, 'type': type, 'price': price, 'amount': amount } remainder = remainingOrder['amount'] traded = False if type == 'buy': if(not depth['ask']): return (balance, remainingOrder, traded) ask = depth['ask'][0] if ask['price'] > price: return (balance, remainingOrder, traded) tradedAmount = min(amount, ask['amount']) remainder = max(amount - ask['amount'], 0) ask['amount'] -= tradedAmount balance[pair[:3]] += tradedAmount * k balance[pair[3:]] -= tradedAmount * ask['price'] traded = True if ask['amount'] == Decimal('0'): self.depths[pair]['ask'] = self.depths[pair]['ask'][1:] elif type == 'sell': if not depth['bid']: return (balance, remainingOrder, traded) bid = depth['bid'][0] if bid['price'] < price: return (balance, remainingOrder, traded) tradedAmount = min(amount, bid['amount']) remainder = max(amount - bid['amount'], 0) bid['amount'] -= tradedAmount balance[pair[:3]] -= tradedAmount balance[pair[3:]] += tradedAmount * bid['price'] * k traded = True if bid['amount'] == Decimal('0'): self.depths[pair]['bid'] = self.depths[pair]['bid'][1:] remainingOrder['amount'] = remainder return (balance, remainingOrder, traded) def ModelTrade(balance, pair, type, price, amount): if not (pair in depths): return None depth = depths[pair] if type == 'buy': ask = depth['ask'] def CalculateArb(direction, price1, price2, price3, k): def CalculateElemArb(direction, books, pair1, pair2, pair3, tradeApi, balance): # returns (list of orders that produces immediate profit, balance) def CalculateArb(books, pair1, pair2, pair3, maxArbDepth, tradeApi, balance): k =
mit
Python
22ee1754a1409fb40bf2bb31cb565bfe914c9c38
Create comparison charts from two summary.csv files
chrishantha/performance-apim,chrishantha/performance-apim,chrishantha/performance-apim
distribution/scripts/jmeter/create-comparison-charts.py
distribution/scripts/jmeter/create-comparison-charts.py
#!/usr/bin/env python3.6 # Copyright 2017 WSO2 Inc. (http://wso2.org) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ---------------------------------------------------------------------------- # Create comparison charts from two summary.csv files # ---------------------------------------------------------------------------- import pandas as pd import numpy as np import seaborn as sns import matplotlib.pyplot as plt import matplotlib.ticker as tkr import getopt, sys def usage(): print(sys.argv[0] + " --summary1 <summary1.csv> --name1 <name1> --summary2 <summary2.csv> --name2 <name2>") def main(): global summary1_file global summary2_file global name1 global name2 try: opts, args = getopt.getopt(sys.argv[1:], "h", ["help", "summary1=", "name1=", "summary2=", "name2="]) except getopt.GetoptError as err: # print help information and exit: print(err) # will print something like "option -a not recognized" usage() sys.exit(2) for o, a in opts: if o == "--summary1": summary1_file = a elif o == "--name1": name1 = a elif o == "--summary2": summary2_file = a elif o == "--name2": name2 = a elif o in ("-h", "--help"): usage() sys.exit() else: assert False, "unhandled option" if __name__ == "__main__": main() if summary1_file == '' or summary2_file == '' or name1 == '' or name2 == '': print("Please provide arguments") usage() sys.exit(1) def add_suffix(string, suffix): return string + " - " + suffix print("Comparing " + name1 + " and " + name2) df1 = pd.read_csv(summary1_file) df2 = pd.read_csv(summary2_file) keys=['Message Size (Bytes)', 'Sleep Time (ms)', 'Concurrent Users'] df = df1.merge(df2, on=keys, how='inner', suffixes=[add_suffix('', name1), add_suffix('', name2)]) sns.set_style("darkgrid") unique_sleep_times=df['Sleep Time (ms)'].unique() def save_multi_columns_categorical_charts(chart, sleep_time, columns, y, hue, title, kind='point'): print("Creating " + chart + " charts for " + str(sleep_time) + "ms backend delay") fig, ax = plt.subplots() df_results = df.loc[df['Sleep Time (ms)'] == sleep_time] all_columns=['Message Size (Bytes)','Concurrent Users'] for column in columns: all_columns.append(add_suffix(column, name1)) all_columns.append(add_suffix(column, name2)) df_results=df_results[all_columns] df_results = df_results.set_index(['Message Size (Bytes)', 'Concurrent Users']).stack().reset_index().rename(columns={'level_2': hue, 0: y}) g = sns.factorplot(x="Concurrent Users", y=y, hue=hue, col="Message Size (Bytes)", data=df_results, kind=kind, size=5, aspect=1, col_wrap=2 ,legend=False); for ax in g.axes.flatten(): ax.yaxis.set_major_formatter( tkr.FuncFormatter(lambda y, p: "{:,}".format(y))) plt.subplots_adjust(top=0.9, left=0.1) g.fig.suptitle(title) plt.legend(frameon=True) plt.savefig("comparison_" + chart + "_" + str(sleep_time) + "ms.png") plt.clf() plt.close(fig) for sleep_time in unique_sleep_times: save_multi_columns_categorical_charts("thrpt", sleep_time, ['Throughput'], "Throughput", "API Manager", "Throughput (Requests/sec) vs Concurrent Users for " + str(sleep_time) + "ms backend delay"); save_multi_columns_categorical_charts("avgt", sleep_time, ['Average (ms)'], "Average Response Time", "API Manager", "Average Response Time (ms) vs Concurrent Users for " + str(sleep_time) + "ms backend delay"); save_multi_columns_categorical_charts("response_time_summary", sleep_time, ['Min (ms)','90th Percentile (ms)','95th Percentile (ms)','99th Percentile (ms)','Max (ms)'], "Response Time", "API Manager", "Response Time Summary for " + str(sleep_time) + "ms backend delay", kind='bar'); save_multi_columns_categorical_charts("loadavg", sleep_time, ['API Manager Load Average - Last 1 minute','API Manager Load Average - Last 5 minutes','API Manager Load Average - Last 15 minutes'], "Load Average", "API Manager", "Load Average with " + str(sleep_time) + "ms backend delay"); save_multi_columns_categorical_charts("network", sleep_time, ['Received (KB/sec)', 'Sent (KB/sec)'], "Network Throughput (KB/sec)", "Network", "Network Throughput with " + str(sleep_time) + "ms backend delay"); save_multi_columns_categorical_charts("gc", sleep_time, ['API Manager GC Throughput (%)'], "GC Throughput", "API Manager", "GC Throughput with " + str(sleep_time) + "ms backend delay") print("Done")
apache-2.0
Python
cd5e6a14bb0a67d6558b691f6b55f7918c4d4970
Create new package (#6384)
matthiasdiener/spack,mfherbst/spack,EmreAtes/spack,mfherbst/spack,tmerrick1/spack,LLNL/spack,EmreAtes/spack,mfherbst/spack,mfherbst/spack,krafczyk/spack,krafczyk/spack,iulian787/spack,mfherbst/spack,matthiasdiener/spack,tmerrick1/spack,EmreAtes/spack,iulian787/spack,matthiasdiener/spack,iulian787/spack,tmerrick1/spack,krafczyk/spack,iulian787/spack,matthiasdiener/spack,EmreAtes/spack,LLNL/spack,EmreAtes/spack,LLNL/spack,matthiasdiener/spack,krafczyk/spack,krafczyk/spack,LLNL/spack,tmerrick1/spack,LLNL/spack,tmerrick1/spack,iulian787/spack
var/spack/repos/builtin/packages/r-fnn/package.py
var/spack/repos/builtin/packages/r-fnn/package.py
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class RFnn(RPackage): """Cover-tree and kd-tree fast k-nearest neighbor search algorithms and related applications including KNN classification, regression and information measures are implemented.""" homepage = "https://cran.r-project.org/web/packages/FNN/index.html" url = "https://cran.r-project.org/src/contrib/FNN_1.1.tar.gz" list_url = "https://cran.rstudio.com/src/contrib/Archive/FNN" version('1.1', '8ba8f5b8be271785593e13eae7b8c393') version('1.0', 'e9a47dc69d1ba55165be0877b8443fe0') version('0.6-4', '1c105df9763ceb7b13989cdbcb542fcc') version('0.6-3', 'f0f0184e50f9f30a36ed5cff24d6cff2') version('0.6-2', '20648ba934ea32b1b00dafb75e1a830c') depends_on('r@3.4.0:3.4.9') depends_on('r-mvtnorm', type=('build', 'run')) depends_on('r-chemometrics', type=('build', 'run'))
lgpl-2.1
Python
f68175870692d128fb2a01795d20605bb2e17aa9
Add initial functional tests
randomic/aniauth-tdd,randomic/aniauth-tdd
functional_tests/test_evexml.py
functional_tests/test_evexml.py
"""Functional tests for the xml api part of aniauth project. This is a temporary app as EVE Online's xml api is deprecated and will be disabled March 2018. """ from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.test import tag from django.shortcuts import reverse from selenium import webdriver from selenium.webdriver.common.keys import Keys MAX_WAIT = 10 @tag('functional') class SubmissionTest(StaticLiveServerTestCase): """Tests for users who are submitting xml api key. """ @classmethod def setUpClass(cls): super(SubmissionTest, cls).setUpClass() cls.browser = webdriver.Chrome() cls.browser.maximize_window() cls.browser.implicitly_wait(MAX_WAIT) super(SubmissionTest, cls).setUpClass() @classmethod def tearDownClass(cls): cls.browser.refresh() cls.browser.quit() super(SubmissionTest, cls).tearDownClass() def tearDown(self): self.browser.refresh() def test_user_can_see_apikey_form(self): """A user should be able to see the form for submitting api keys. """ # They browse to the eve api keys page. url = self.live_server_url + reverse('eveapi') self.browser.get(self.live_server_url) # They see input boxes for keyID and vCode. keyid_input = self.browser.find_element_by_name('keyID') vcode_input = self.browser.find_element_by_name('vCode')
mit
Python
6a7b32e271a264aad763fbd28749ac1258cf041f
Add dialplan filestring module
IndiciumSRL/wirecurly
wirecurly/dialplan/filestring.py
wirecurly/dialplan/filestring.py
import logging from wirecurly.exc import * from wirecurly.dialplan.expression import * import os log = logging.getLogger(__name__) __all__ = ['FileString'] class FileString(object): ''' Filestring oject to use with playback app in dialplan. ''' def __init__(self,*argv): super(FileString, self).__init__() self.audios = [] self.path = 'usr/share/freeswitch/sounds/en/us/callie/' for i in argv: self.addAudio(i) def addAudio(self,audio): ''' Add an audio file to FileString object ''' self.audios.append(audio) def setPath(self,path): ''' Set Path for audios ''' self.path = path def toString(self): ''' Return a string to use with playback app ''' return 'file_string://%s' % '!'.join(['%s%s' % (self.path,a) for a in self.audios])
mpl-2.0
Python
6ce84d454ef18f7b7dfc988195bfacb4e69e8c3f
add CRUD test cases for Snippet
castaway2000/OpenStay,castaway2000/OpenStay,castaway2000/OpenStay
hackathon_starter/hackathon/unittests/testsnippets.py
hackathon_starter/hackathon/unittests/testsnippets.py
from hackathon.models import Snippet from rest_framework import status from rest_framework.test import APITestCase class SnippetViewTestCase(APITestCase): def setUp(self): self.s1 = Snippet.objects.create(title='t1', code="""print("Hello, World.")""") self.s2 = Snippet.objects.create(title='t2', code="""print("Goodbye, World.")""") super(SnippetViewTestCase, self).setUp() def test_list(self): response = self.client.get('/hackathon/snippets/') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 2) def test_detail(self): response = self.client.get('/hackathon/snippets/{}/'.format(self.s1.id)) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['id'], self.s1.id) def test_create(self): payload = {'title': 't3', 'code': """print("Create, World.")"""} response = self.client.post('/hackathon/snippets/', payload) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.data['title'], 't3') self.assertEqual(response.data['code'], """print("Create, World.")""") def test_update(self): payload = {'title': 't666', 'code': '2 + 2'} response = self.client.put('/hackathon/snippets/{}/'.format(self.s1.id), payload) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['title'], 't666') self.assertEqual(response.data['code'], '2 + 2') def test_partial_update(self): payload = {'title': 't666'} response = self.client.patch('/hackathon/snippets/{}/'.format(self.s1.id), payload) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['title'], 't666') def test_delete(self): response = self.client.delete('/hackathon/snippets/{}/'.format(self.s1.id)) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(Snippet.objects.count(), 1)
mpl-2.0
Python
f59749db263291f481c4bdc9f6ede2f6de6cb6d4
Create foundation for input file generation (csv for connectivity table, etc.)
ndebuhr/openfea,ndebuhr/openfea
create_input_files.py
create_input_files.py
import csv import argparse import itertools from thermo_utils import csv_row_writer, read_csv_rows # Read input/output arguments parser = argparse.ArgumentParser() parser.add_argument('-o','--output',required=True) parser.add_argument('-d','--dof',required=True) # parser.add_argument('-v','--version',required=False) args = parser.parse_args() # Write all rows to equations CSV file csv_row_writer(args.output,outRows) print('Output file: %s' % args.output)
mit
Python
9f6df0b93a7a6911d9e7eee0e4fe87e34ea52832
Create main entrypoint of cli
victormartinez/shub_cli
shub_cli/cli.py
shub_cli/cli.py
""" Scrapinghub CLI Usage: shub-cli jobs shub-cli jobs [-t TAG1,TAG2] [-l LACK1,LACK2] [-s SPIDER] [-e STATE] [-c COUNT] shub-cli job -id <id> Options: -t TAG1,TAG2 Description. -l LACK1,LACK2 Description. -s SPIDER Description. -e STATE Description. -c COUNT Description. Examples: shub-cli jobs shub-cli jobs -c 100 shub-cli jobs -t fast,production -l consumed,dev -s spider1 state finished shub-cli jobs tags consumed lacks teste spider my-spider state state count 1000 shub-cli job -id '10/10/1000' Help: For help using this tool, please open an issue on the Github repository: https://github.com/victormartinez/shub_cli """ from docopt import docopt from shub_cli import __version__ as VERSION from shub.config import load_shub_config from shub_cli.commands.job import Job from shub_cli.commands.jobs import Jobs from shub_cli.util.display import display, display_jobs config = load_shub_config() api_keys = config.apikeys projects = config.projects # 70953/91/7817 def main(): """Main CLI entrypoint.""" default_api_key = api_keys['default'] default_project = projects['default'] options = dict(docopt(__doc__, version=VERSION).items()) print('Connection: {}'.format(default_api_key)) print('Project: {}'.format(default_project)) if 'job' in options.keys() and options['job'] == True: if '-id' in options.keys(): job = Job(options, api_key=default_api_key, project=default_project) display(job.run()) else: print('') print('Wrong command.') if 'jobs' in options.keys() and options['jobs'] == True: jobs = Jobs(options, api_key=default_api_key, project=default_project) display_jobs(jobs.run())
mit
Python
53c7233d0ecf7e3f807da9112d1c5eecb75c9ae2
Add a new moderation-style cog
Thessia/Liara
cogs/moderation.py
cogs/moderation.py
from discord.ext import commands import discord import datetime class Moderation: def __init__(self, liara): self.liara = liara @commands.command(pass_context=True, no_pm=True) async def userinfo(self, ctx, user: discord.Member=None): if user is None: user = ctx.message.author # user-friendly status if user.status == discord.Status.online: status = '<:online:212789758110334977>' elif user.status == discord.Status.idle: status = '<:away:212789859071426561>' elif user.status == discord.Status.do_not_disturb: status = '<:do_not_disturb:236744731088912384>' else: status = '<:offline:212790005943369728>' embed = discord.Embed() embed.title = '{} {}'.format(status, user) embed.description = '**Display name**: {0.display_name}\n**ID**: {0.id}\n[Avatar]({0.avatar_url})'.format(user) join_delta = datetime.datetime.now() - user.joined_at created_delta = datetime.datetime.now() - user.created_at embed.add_field(name='Join Dates', value='**This server**: {} ago ({})\n**Discord**: {} ago ({})' .format(join_delta, user.joined_at, created_delta, user.created_at)) roles = [x.mention for x in user.roles if not x.is_everyone] if roles: # only show roles if the member has any if len(str(roles)) < 1025: # deal with limits embed.add_field(name='Roles', value=', '.join(roles)) embed.set_thumbnail(url=user.avatar_url) try: await self.liara.say(embed=embed) except discord.HTTPException: await self.liara.say('Unable to post userinfo, please allow the Embed Links permission') @commands.command(pass_context=True) async def serverinfo(self, ctx): server = ctx.message.server if server.large: await self.liara.request_offline_members(server) embed = discord.Embed() embed.title = str(server) if server.icon_url is not None: embed.description = '**ID**: {0.id}\n[Icon URL]({0.icon_url})'.format(server) embed.set_thumbnail(url=server.icon_url) else: embed.description = '**ID**: {0.id}'.format(server) embed.add_field(name='Members', value=str(len(server.members))) roles = [x.mention for x in server.role_hierarchy if not x.is_everyone] if roles: # only show roles if the server has any if len(str(roles)) < 1025: # deal with limits embed.add_field(name='Roles', value=', '.join(roles)) channels = [x.mention for x in server.channels if x.type == discord.ChannelType.text] if len(str(channels)) < 1025: embed.add_field(name='Text channels', value=', '.join(channels)) if server.verification_level == discord.VerificationLevel.none: level = 'Off' elif server.verification_level == discord.VerificationLevel.low: level = 'Low' elif server.verification_level == discord.VerificationLevel.medium: level = 'Medium' else: level = '(╯°□°)╯︵ ┻━┻' embed.add_field(name='Other miscellaneous info', value='**AFK Channel**: {0.afk_channel}\n' '**AFK Timeout**: {0.afk_timeout} seconds\n' '**Owner**: {0.owner.mention}\n' '**Verification level**: {1}'.format(server, level)) embed.timestamp = server.created_at embed.set_footer(text='Created at') try: await self.liara.say(embed=embed) except discord.HTTPException: await self.liara.say('Unable to post serverinfo, please allow the Embed Links permission') def setup(liara): liara.add_cog(Moderation(liara))
mit
Python
087829b024ea9c5b2028c3f13786578be6dfd702
fix the bug of loading all cifar data
ouwenjie03/GAN
load_data.py
load_data.py
# encoding: utf-8 """ @author: ouwj @position: ouwj-win10 @file: load_data.py @time: 2017/4/26 14:33 """ from tensorflow.examples.tutorials.mnist import input_data import numpy as np def unpickle(file): import pickle with open(file, 'rb') as fo: dict = pickle.load(fo, encoding='bytes') return dict def load_data(dataset='MNIST'): if dataset == 'MNIST': return input_data.read_data_sets('MNIST/') elif dataset == 'CIFAR': dirname = 'CIFAR/cifar-10-batches-py/' # print(unpickle(dirname+'test_batch')) data = unpickle(dirname+'test_batch')[b'data'] / 255.0 for i in range(1, 6): data = np.vstack((data, unpickle(dirname+'data_batch_'+str(i))[b'data'] / 255.0)) return data if __name__ == '__main__': data = load_data('CIFAR') print(data[0:5, :])
# encoding: utf-8 """ @author: ouwj @position: ouwj-win10 @file: load_data.py @time: 2017/4/26 14:33 """ from tensorflow.examples.tutorials.mnist import input_data import numpy as np def unpickle(file): import pickle with open(file, 'rb') as fo: dict = pickle.load(fo, encoding='bytes') return dict def load_data(dataset='MNIST'): if dataset == 'MNIST': return input_data.read_data_sets('MNIST/') elif dataset == 'CIFAR': dirname = 'CIFAR/cifar-10-batches-py/' # print(unpickle(dirname+'test_batch')) data = unpickle(dirname+'test_batch')[b'data'] / 255.0 # for i in range(1, 6): # data = np.vstack((data, unpickle(dirname+'data_batch_'+str(i))[b'data'] / 255.0)) return data if __name__ == '__main__': data = load_data('CIFAR') print(data[0:5, :])
mit
Python
54404541913185a54fea75353d9fffc72ddc2ff6
Create discovery_diag.py
infobloxopen/netmri-toolkit,infobloxopen/netmri-toolkit,infobloxopen/netmri-toolkit
python/discovery_diag.py
python/discovery_diag.py
import requests import json requests.packages.urllib3.disable_warnings() s = requests.Session() def netmriLogin( temp, querystring ): username = "admin" password = "infioblox" url = "https://demo-netmri.infoblox.com/api/3.3" + temp response = s.request("GET", url, params=querystring, verify=False, auth=(username, password)) t = response.text return(t); t = netmriLogin(temp="/device_group_members/index", querystring={"GroupID":"20","select":"DeviceID"}) z = json.loads(t) for entry in z['device_group_members']: print(entry['DeviceID']) filename = str(entry['DeviceID']) + ".txt" device = {"DeviceID": entry['DeviceID']} with open(filename, "w") as f: p = netmriLogin(temp="/devices/diagnostic", querystring=device) i = json.loads(p) print(type(i)) print(i) f.write(i['text'])
mit
Python
35f4f5bbea5b291b8204a2ca30acddebfad86d3e
Create 2004-4.py
Chuck8521/LunchtimeBoredom,Chuck8521/LunchtimeBoredom,Chuck8521/LunchtimeBoredom
2004-4.py
2004-4.py
times = input() i = 0 while i < times: length = input() ascents = 0 descents = 0 plateaus = 0 maxA = 0 maxD = 0 maxP = 0 sequence = [] j = 0 while j < length: currentNum = input() sequence.append(currentNum) if j != 0: if currentNum < sequence[j-1]: #descent else: #first time you can do nothing except reset max length to 1 below maxA += 1 maxD += 1 maxP += 1 j += 1 i += 1
mit
Python
f228b0d76a5c619e45d40d4d0da12059cb2668e9
Create warlock.py
anuragpapineni/Hearthbreaker-evolved-agent,noa/hearthbreaker,pieiscool/edited-hearthbreaker,slaymaker1907/hearthbreaker,danielyule/hearthbreaker,Ragowit/hearthbreaker,kingoflolz/hearthbreaker,jirenz/CS229_Project,slaymaker1907/hearthbreaker,anuragpapineni/Hearthbreaker-evolved-agent,anuragpapineni/Hearthbreaker-evolved-agent,pieiscool/edited-hearthbreaker
hsgame/cards/minions/warlock.py
hsgame/cards/minions/warlock.py
import hsgame.targeting from hsgame.constants import CHARACTER_CLASS, CARD_RARITY, MINION_TYPE from hsgame.game_objects import MinionCard, Minion, Card #from hsgame.cards.battlecries import __author__ = 'randomflyingtaco' #let the train wreck begin
mit
Python
97fcef753647bfbdab0381b30d1533bdce36aeb9
fix admin
chuck211991/django-pyodbc,schmidsi/django-pyodbc,chuck211991/django-pyodbc
django-pyodbc/contrib/admin/models/models.py
django-pyodbc/contrib/admin/models/models.py
from django.db import models from django.contrib.contenttypes.models import ContentType from django.contrib.auth.models import User from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import smart_unicode from django.utils.safestring import mark_safe ADDITION = 1 CHANGE = 2 DELETION = 3 class LogEntryManager(models.Manager): def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''): e = self.model(None, None, user_id, content_type_id, smart_unicode(object_id), object_repr[:200], action_flag, change_message) e.save() class LogEntry(models.Model): action_time = models.DateTimeField(_('action time'), auto_now=True) user = models.ForeignKey(User) content_type = models.ForeignKey(ContentType, blank=True, null=True) object_id = models.TextField(_('object id'), blank=True, null=True) object_repr = models.CharField(_('object repr'), max_length=200) action_flag = models.PositiveSmallIntegerField(_('action flag')) change_message = models.TextField(_('change message'), blank=True) objects = LogEntryManager() class Meta: verbose_name = _('log entry') verbose_name_plural = _('log entries') db_table = 'django_admin_log' ordering = ('-action_time',) def __repr__(self): return smart_unicode(self.action_time) def is_addition(self): return self.action_flag == ADDITION def is_change(self): return self.action_flag == CHANGE def is_deletion(self): return self.action_flag == DELETION def get_edited_object(self): "Returns the edited object represented by this log entry" return self.content_type.get_object_for_this_type(pk=self.object_id) def get_admin_url(self): """ Returns the admin URL to edit the object represented by this log entry. This is relative to the Django admin index page. """ return mark_safe(u"%s/%s/%s/" % (self.content_type.app_label, self.content_type.model, self.object_id))
bsd-3-clause
Python
0ace48790374ea75ba2c6cbc51678e3240c22a88
Create Differ.py
thezakman/CTF-Scripts,thezakman/CTF-Scripts
Differ.py
Differ.py
file1 = raw_input('[file1:] ') modified = open(file1,"r").readlines()[0] file2 = raw_input('[file2:] ') pi = open(file2, "r").readlines()[0] # [:len(modified)] resultado = "".join( x for x,y in zip(modified, pi) if x != y) resultado2 = "".join( x for x,y in zip(pi, modified) if x != y) print "[Differ:] print '\n-------------------------------------' print "[file1] -> [file2]", resultado print '-------------------------------------' print "[file2] -> [file1]", resultado2
artistic-2.0
Python
60de63d2fc53c020649bc21576765366f310cf56
fix by adding migration
lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django
src/polls/migrations/0006_auto_20171114_1128.py
src/polls/migrations/0006_auto_20171114_1128.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.5 on 2017-11-14 10:28 from __future__ import unicode_literals import django.contrib.postgres.fields.jsonb import django.core.serializers.json from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('polls', '0005_poll_tags'), ] operations = [ migrations.AlterField( model_name='poll', name='rules', field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder, help_text='Un object JSON décrivant les règles. Actuellement, sont reconnues `options`,`min_options` et `max_options', verbose_name='Les règles du vote'), ), ]
agpl-3.0
Python
a5d5dde8c523aa28452d790e7f0291c1cf52aacb
Make sure setUpModule is called by the test framework. We brought in pytest-2.4.0.dev8 for that specific functionality. However, one time we regressed, and our tests started misbehaving. So, this test is here to keep us honest.
breznak/nupic,breznak/nupic,breznak/nupic
tests/external/py2/testfixture_test.py
tests/external/py2/testfixture_test.py
#!/usr/bin/env python # ---------------------------------------------------------------------- # Copyright (C) 2013 Numenta Inc. All rights reserved. # # The information and source code contained herein is the # exclusive property of Numenta Inc. No part of this software # may be used, reproduced, stored or distributed in any form, # without explicit written authorization from Numenta Inc. # ---------------------------------------------------------------------- """ Unit tests for our dependencies in the pytest package; at the time of this writing, we were using an unreleased version of pytest that added support for the unittest setUpModule fixture and friends. Some of our tests rely on setUpModule. Once, there was a conflict with pytest installation in our build system, and an older version of pytest was installed that didn't support setUpModule, which resulted in suble side-effects in some of these tests. """ import unittest2 as unittest g_setUpModuleCalled = False def setUpModule(): global g_setUpModuleCalled g_setUpModuleCalled = True class TestPytest(unittest.TestCase): def testSetUpModuleCalled(self): self.assertTrue(g_setUpModuleCalled) if __name__ == '__main__': unittest.main()
agpl-3.0
Python
b0c74bcf7dd4120684a944a7cd8cc005bee039f5
Create BogoBogo.py
MaximeKjaer/dailyprogrammer-challenges
Challenge-175/01-Easy/BogoBogo.py
Challenge-175/01-Easy/BogoBogo.py
import random def bogosort(n, m): i = 0 while n != m: n = ''.join(random.sample(n,len(n))) i += 1 print(i, 'iterations') return i def bogobogosort(n, m): i = 0 #number of iterations j = 2 #number of elements while n[:j] != m: n = ''.join(random.sample(n,len(n))) while n[:j] != m[:j]: n = ''.join(random.sample(n,len(n))) i += 1 if n[:j] != m[:j]: j = 2 #Start over j += 1 print(i, 'iterations') return i print("BOGO SORT\n==============================") for i in range(10): bogosort("lolhe","hello") print("\nBOGOBOGO SORT\n==============================") for i in range(10): bogobogosort("lolhe","hello")
mit
Python
84b932df5520901645c6d999abddea1191654a34
create skeleton of a proper in place quicksort
BradleyMoore/Algorithms
algorithms/sorting/quicksort_ip.py
algorithms/sorting/quicksort_ip.py
from random import randint def partition(unsorted, start, end, pivot): pass def choose_pivot(start, end): pass def quicksort(unsorted, start=0, end=None): pass if __name__ == '__main__': unsorted = [3,345,456,7,879,970,7,4,23,123,45,467,578,78,6,4,324,145,345,3456,567,5768,6589,69,69] sorted = quicksort(unsorted) print '%r <-- unsorted' % unsorted print '%r <-- sorted' % sorted
mit
Python
60002062970a2f83725355911dde73673c5875a5
Add a snippet.
jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets
python/pyqt/pyqt5/button_clic_event_as_class.py
python/pyqt/pyqt5/button_clic_event_as_class.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org) # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import sys from PyQt5.QtWidgets import QApplication, QMainWindow, QPushButton class Window(QMainWindow): def __init__(self): super().__init__() self.resize(250, 150) self.setWindowTitle('Hello') button = QPushButton('Hello', self) button.clicked.connect(self.on_clic) self.show() def on_clic(self): print("Hello!") app = QApplication(sys.argv) window = Window() exit_code = app.exec_() sys.exit(exit_code)
mit
Python
6f5843fb04cfa2ed2082b340f282223ec374f9f6
copy group descriptions to text table
mgax/mptracker,mgax/mptracker,mgax/mptracker,mgax/mptracker
alembic/versions/49ed2a435cf_group_description.py
alembic/versions/49ed2a435cf_group_description.py
revision = '49ed2a435cf' down_revision = '5927719682b' import uuid from datetime import datetime from alembic import op import sqlalchemy as sa from sqlalchemy import sql import jinja2 def random_uuid(): return str(uuid.uuid4()) def upgrade(): text = sql.table('text', sql.column('id'), sql.column('ns'), sql.column('name'), ) text_version = sql.table( 'text_version', sql.column('id'), sql.column('text_id'), sql.column('time'), sql.column('content'), sql.column('more_content'), ) time = datetime(2014, 9, 22, 11, 50, 0) conn = op.get_bind() query = ( "SELECT short_name, description FROM mp_group " "WHERE year=2012 " "AND description IS NOT NULL" ) data = list(conn.execute(query)) for name, description in data: text_id = random_uuid() op.execute(text.insert().values({ 'id': text_id, 'ns': 'party', 'name': name, })) op.execute(text_version.insert().values({ 'id': random_uuid(), 'text_id': text_id, 'time': time, 'content': '<p>' + jinja2.escape(description) + '</p>', 'more_content': '', })) def downgrade(): op.execute( "DELETE FROM text_version " "WHERE text_id IN (SELECT id FROM text WHERE ns = 'party')" ) op.execute("DELETE FROM text WHERE ns = 'party'")
mit
Python
f18fd5c4ad61adb56ac7524a006ce9977aa06a31
Add worker to send queue mails
Aladom/django-mailing,Aladom/django-mailing
mailing/management/commands/send_queued_mails_worker.py
mailing/management/commands/send_queued_mails_worker.py
# -*- coding: utf-8 -*- # Copyright (c) 2016 Aladom SAS & Hosting Dvpt SAS from django.core.management.base import BaseCommand from ...utils import send_queued_mails import time class Command(BaseCommand): help = """Send mails with `status` Mail.STATUS_PENDING and having `scheduled_on` set on a past date. In daemon mode.""" def handle(self, *args, **options): while True: send_queued_mails() time.sleep(15)
mit
Python
5a7081c5c46a050566477adda19d30844192ceb2
Add migration to add authtokens for existing users
WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed
src/mmw/apps/user/migrations/0002_auth_tokens.py
src/mmw/apps/user/migrations/0002_auth_tokens.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings from django.contrib.auth.models import User from rest_framework.authtoken.models import Token def add_auth_tokens_to_users(apps, schema_editor): for user in User.objects.all(): Token.objects.create(user=user) class Migration(migrations.Migration): dependencies = [ ('authtoken', '0001_initial'), ('user', '0001_initial') ] operations = [ migrations.RunPython(add_auth_tokens_to_users) ]
apache-2.0
Python
31b309c1f5981a10207e85950ef8139018afd37c
add roles urls
avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf
src/python/expedient/clearinghouse/roles/urls.py
src/python/expedient/clearinghouse/roles/urls.py
''' Created on Jul 29, 2010 @author: jnaous ''' from django.conf.urls.defaults import patterns, url urlpatterns = patterns("expedient.clearinghouse.roles.views", url(r"^confirm/(?P<proj_id>\d+)/(?P<req_id>\d+)/(?P<allow>\d)/(?P<delegate>\d)/$", "confirm_request", name="roles_confirm_request"), )
bsd-3-clause
Python
abb72a3a248efd1b244798f91cbca09af01ebb3e
Fix CloneManga modules.
webcomics/dosage,peterjanes/dosage,peterjanes/dosage,webcomics/dosage
dosagelib/plugins/clonemanga.py
dosagelib/plugins/clonemanga.py
# -*- coding: utf-8 -*- # Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs # Copyright (C) 2012-2014 Bastian Kleineidam # Copyright (C) 2015-2017 Tobias Gruetzmacher from __future__ import absolute_import, division, print_function from ..helpers import indirectStarter, xpath_class from ..scraper import _ParserScraper from ..util import getQueryParams class CloneManga(_ParserScraper): baseUrl = 'http://manga.clone-army.org' imageSearch = '//div[%s]//img' % xpath_class('subsectionContainer') prevSearch = '//a[span[text()="<<"]]' latestSearch = '//a[span[text()=">|"]]' starter = indirectStarter help = 'Index format: n' def __init__(self, name, shortName, endOfLife=False): super(CloneManga, self).__init__('CloneManga/' + name) self.stripUrl = '%s/viewer.php?page=%%s&lang=&series=%s&HUDoff=' % ( self.baseUrl, shortName) self.url = self.stripUrl % '1' self.endOfLife = endOfLife def namer(self, image_url, page_url): return '%03d' % int(getQueryParams(page_url)['page'][0]) @classmethod def getmodules(cls): return ( cls('ACaptainsWorries', 'captains_worries'), cls('AHimehornsDailyLife', 'himehorn'), cls('AprilAndMay', 'anm', endOfLife=True), cls('DollAndMaker', 'maria_doll', endOfLife=True), cls('Kanami', 'kanami', endOfLife=True), cls('MomokaCorner', 'momoka', endOfLife=True), cls('MyShutInVampirePrincess', 'snax'), cls('NanasEverydayLife', 'nana', endOfLife=True), cls('NNN', 'nnn', endOfLife=True), cls('PaperEleven', 'pxi', endOfLife=True), cls('PennyTribute', 'penny', endOfLife=True), cls('Tomoyo42sRoom', 't42r'), )
# -*- coding: utf-8 -*- # Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs # Copyright (C) 2012-2014 Bastian Kleineidam # Copyright (C) 2015-2016 Tobias Gruetzmacher from __future__ import absolute_import, division, print_function from re import compile from ..scraper import _BasicScraper from ..util import tagre, getQueryParams class CloneManga(_BasicScraper): _linkTag = tagre("a", "href", r'([^"]+)') prevSearch = compile(_linkTag + tagre("img", "src", r"previous\.gif")) nextSearch = compile(_linkTag + tagre("img", "src", r"next\.gif")) latestSearch = compile(_linkTag + tagre("img", "src", r"last\.gif")) help = 'Index format: n' def __init__(self, name, shortName, imageFolder=None, lastStrip=None): super(CloneManga, self).__init__('CloneManga/' + name) _url = 'http://manga.clone-army.org' self.url = '%s/%s.php' % (_url, shortName) if imageFolder is None: imageFolder = shortName self.stripUrl = self.url + '?page=%s' self.imageSearch = compile(tagre("img", "src", r'((?:%s/)?%s/[^"]+)' % (_url, imageFolder), after="center")) if lastStrip is None: self.starter = self._starter else: self.url = self.stripUrl % lastStrip def namer(self, image_url, page_url): return '%03d' % int(getQueryParams(page_url)['page'][0]) def _starter(self): # first, try hopping to previous and next comic data = self.getPage(self.url) try: url = self.fetchUrl(self.url, data, self.prevSearch) except ValueError: # no previous link found, try hopping to last comic return self.fetchUrl(self.url, data, self.latestSearch) else: data = self.getPage(url) return self.fetchUrl(url, data, self.nextSearch) @classmethod def getmodules(cls): return [ cls('AprilAndMay', 'anm', imageFolder='AAM'), cls('Kanami', 'kanami'), cls('MomokaCorner', 'momoka'), cls('NanasEverydayLife', 'nana', lastStrip='78'), cls('PaperEleven', 'pxi', imageFolder='papereleven', lastStrip='311'), cls('Tomoyo42sRoom', 't42r'), cls('PennyTribute', 'penny'), ]
mit
Python
14e55d45428c617507c5c161f4d33154849f63a5
Create Endings.py
JLJTECH/TutorialTesting
Edabit/Endings.py
Edabit/Endings.py
#!/usr/bin/env python3 ''' Create a function that adds a string ending to each member in a list. ''' def add_ending(lst, ending): return [i + ending for i in lst]
mit
Python
ab53993b708b3f9cf3b5762664fef58bae99ea20
Add some code to auto-remove Ltac
JasonGross/coq-tools,JasonGross/coq-tools
recursive_remove_ltac.py
recursive_remove_ltac.py
import re __all__ = ["recursively_remove_ltac"] LTAC_REG = re.compile(r'^\s*(?:Local\s+|Global\s+)?Ltac\s+([^\s]+)', re.MULTILINE) def recursively_remove_ltac(statements, exclude_n=3): """Removes any Ltac statement which is not used later in statements. Does not remove any code in the last exclude_n statements.""" rtn = list(reversed(statements))[:exclude_n] for statement in reversed(statements)[exclude_n:]: match = LTAC_REG.search(statement) if match: ltac_name = match.groups()[0] # search for the name of the tactic, by itself reg = re.compile('\b%s\b' % ltac_name, re.MULTILINE) if any(reg.search(other_statement) for other_statement in rtn): rtn.append(statement) else: rtn.append(statement) return list(reversed(rtn))
mit
Python
cd6eebfecab9b93863e7e20acec1ba0481f6b95f
Fix benchmark naming in reporting
freedomtan/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,frreiss/tensorflow-fred,karllessard/tensorflow,gautam1858/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,aam-at/tensorflow,Intel-tensorflow/tensorflow,davidzchen/tensorflow,annarev/tensorflow,aam-at/tensorflow,sarvex/tensorflow,aam-at/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,aldian/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,cxxgtxy/tensorflow,paolodedios/tensorflow,davidzchen/tensorflow,annarev/tensorflow,gautam1858/tensorflow,annarev/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,gautam1858/tensorflow,davidzchen/tensorflow,aldian/tensorflow,cxxgtxy/tensorflow,freedomtan/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,petewarden/tensorflow,petewarden/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,petewarden/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,karllessard/tensorflow,annarev/tensorflow,petewarden/tensorflow,karllessard/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,davidzchen/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,cxxgtxy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,aam-at/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,freedomtan/tensorflow,freedomtan/tensorflow,aldian/tensorflow,Intel-Corporation/tensorflow,freedomtan/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,cxxgtxy/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,petewarden/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,cxxgtxy/tensorflow,aldian/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aldian/tensorflow,sarvex/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,sarvex/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,aldian/tensorflow,tensorflow/tensorflow,sarvex/tensorflow,aam-at/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,annarev/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,annarev/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,annarev/tensorflow,sarvex/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,petewarden/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,yongtang/tensorflow,aam-at/tensorflow,petewarden/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,frreiss/tensorflow-fred,sarvex/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,karllessard/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,freedomtan/tensorflow,Intel-tensorflow/tensorflow,cxxgtxy/tensorflow,paolodedios/tensorflow,aldian/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,davidzchen/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,annarev/tensorflow,annarev/tensorflow
tensorflow/python/eager/benchmarks_test_base.py
tensorflow/python/eager/benchmarks_test_base.py
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Benchmark base to run and report benchmark results.""" from __future__ import absolute_import as _absolute_import from __future__ import division as _division from __future__ import print_function as _print_function from tensorflow.python.eager import test class MicroBenchmarksBase(test.Benchmark): """Run and report benchmark results.""" def run_report(self, run_benchmark, func, num_iters, execution_mode=None): """Run and report benchmark results.""" total_time = run_benchmark(func, num_iters, execution_mode) mean_us = total_time * 1e6 / num_iters extras = { "examples_per_sec": float("{0:.3f}".format(num_iters / total_time)), "us_per_example": float("{0:.3f}".format(total_time * 1e6 / num_iters)) } benchmark_name = self._get_benchmark_name() self.report_benchmark( iters=num_iters, wall_time=mean_us, extras=extras, name=benchmark_name)
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Benchmark base to run and report benchmark results.""" from __future__ import absolute_import as _absolute_import from __future__ import division as _division from __future__ import print_function as _print_function from tensorflow.python.eager import test class MicroBenchmarksBase(test.Benchmark): """Run and report benchmark results.""" def run_report(self, run_benchmark, func, num_iters, execution_mode=None): """Run and report benchmark results.""" total_time = run_benchmark(func, num_iters, execution_mode) mean_us = total_time * 1e6 / num_iters extras = { "examples_per_sec": float("{0:.3f}".format(num_iters / total_time)), "us_per_example": float("{0:.3f}".format(total_time * 1e6 / num_iters)) } self.report_benchmark(iters=num_iters, wall_time=mean_us, extras=extras)
apache-2.0
Python
dccb0f292c86da942c5e4493a5e117e5f3047a05
add aiohttp exercise
ianzhengnan/learnpy,ianzhengnan/learnpy
aiohttp_ext.py
aiohttp_ext.py
import asyncio from aiohttp import web async def index(request): await asyncio.sleep(0.5) return web.Response(body=b'<h1>Index</h1>',content_type='text/html') async def hello(request): await asyncio.sleep(0.5) text = '<h1>hello, %s</h1>' % request.match_info['name'] return web.Response(body=text.encode('utf-8'), content_type='text/html') async def init(loop): app = web.Application(loop=loop) app.router.add_route('GET', '/', index) app.router.add_route('GET', '/hello/{name}', hello) srv = await loop.create_server(app.make_handler(), '127.0.0.1', 8000) print('Server started at http://127.0.0.1:8000') return srv loop = asyncio.get_event_loop() loop.run_until_complete(init(loop)) loop.run_forever()
apache-2.0
Python
5788864141c2b635a3c0b8358d868fa7e2b5e789
Create Pedido_Cadastrar.py
AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb
backend/Models/Turma/Pedido_Cadastrar.py
backend/Models/Turma/Pedido_Cadastrar.py
from Framework.Pedido import Pedido from Framework.ErroNoHTTP import ErroNoHTTP class PedidoCadastrar(Pedido): def __init__(self,variaveis_do_ambiente): super(PedidoCadastrar, self).__init__(variaveis_do_ambiente) try: self.letra = self.corpo['letra'] self.id_disciplina = self.corpo['id_dsciplina'] except: raise ErroNoHTTP(400) def getLetra(self): return self.letra def getId_disciplina(self): return self.id_disciplina
mit
Python
44e3876d76c7d7b3571c82030ff78260e4ec7e65
Add PCA.py template
christopherjenness/ML-lib
ML/PCA.py
ML/PCA.py
""" Exact principal component analysis (PCA) """ class PCA(object): """ Exact principal component analysis (PCA) """ def __init__(self): return def fit(self, X): return
mit
Python
cd2c959674043fcc3b6261129f57f266539a8658
Add a Python snippet.
niucheng/Snippets,niucheng/Snippets,niucheng/Snippets
Python.py
Python.py
#!/usr/bin/env python # coding: utf-8 """Python snippet """ import os import sys if __name__ == '__main__': if len (sys.argv) == 1: print ("Hi there!") else: print ("Hello, %s!" % sys.argv[1])
mit
Python
65449c60f357eeab5ddc9eb91a468ab1e3719de7
Add dismiss_recommendation example (#35)
googleads/google-ads-python
examples/v0/recommendations/dismiss_recommendation.py
examples/v0/recommendations/dismiss_recommendation.py
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This example dismisses a given recommendation. To retrieve recommendations for text ads, run get_text_ad_recommendations.py. """ from __future__ import absolute_import import argparse import six import sys import google.ads.google_ads.client def main(client, customer_id, recommendation_id): recommendation_service = client.get_service('RecommendationService') dismiss_recommendation_request = client.get_type( 'DismissRecommendationRequest') dismiss_recommendation_operation = (dismiss_recommendation_request. DismissRecommendationOperation()) dismiss_recommendation_operation.resource_name = ( recommendation_service.recommendation_path( customer_id, recommendation_id)) try: dismissal_response = recommendation_service.dismiss_recommendation( customer_id, [dismiss_recommendation_operation]) except google.ads.google_ads.errors.GoogleAdsException as ex: print('Request with ID "%s" failed with status "%s" and includes the ' 'following errors:' % (ex.request_id, ex.error.code().name)) for error in ex.failure.errors: print('\tError with message "%s".' % error.message) if error.location: for field_path_element in error.location.field_path_elements: print('\t\tOn field: %s' % field_path_element.field_name) sys.exit(1) print('Dismissed recommendation with resource name: "%s".' % dismissal_response.results[0].resource_name) if __name__ == '__main__': # GoogleAdsClient will read the google-ads.yaml configuration file in the # home directory if none is specified. google_ads_client = (google.ads.google_ads.client.GoogleAdsClient .load_from_storage()) parser = argparse.ArgumentParser( description=('Dismisses a recommendation with the given ID.')) # The following argument(s) should be provided to run the example. parser.add_argument('-c', '--customer_id', type=six.text_type, required=True, help='The Google Ads customer ID.') parser.add_argument('-r', '--recommendation_id', type=six.text_type, required=True, help='The recommendation ID.') args = parser.parse_args() main(google_ads_client, args.customer_id, args.recommendation_id)
apache-2.0
Python
8d6ca433d33551cc1fe5c08edcf68ec65e5447b0
Add solution to exercise 3.3.
HenrikSamuelsson/python-crash-course
exercises/chapter_03/exercise_03_03/exercies_03_03.py
exercises/chapter_03/exercise_03_03/exercies_03_03.py
# 3-3 Your Own List transportation = ["mountainbike", "teleportation", "Citroën DS3"] print("A " + transportation[0] + " is good when exercising in the woods.\n") print("The ultimate form of trarsportation must be " + transportation[1] + ".\n") print("Should I buy a " + transportation[2] + "?\n")
mit
Python
d82ecab372ed22da0b00512294ee6cd3f5fcb012
Add script to reindex datasets.
etalab/ckan-of-worms,etalab/ckan-of-worms
ckanofworms/scripts/reindex.py
ckanofworms/scripts/reindex.py
#! /usr/bin/env python # -*- coding: utf-8 -*- # CKAN-of-Worms -- A logger for errors found in CKAN datasets # By: Emmanuel Raviart <emmanuel@raviart.com> # # Copyright (C) 2013 Etalab # http://github.com/etalab/ckan-of-worms # # This file is part of CKAN-of-Worms. # # CKAN-of-Worms is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # CKAN-of-Worms is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Reindex objects.""" import argparse import logging import os import sys import paste.deploy from ckanofworms import contexts, environment, model app_name = os.path.splitext(os.path.basename(__file__))[0] log = logging.getLogger(app_name) def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = "CKAN-of-Worms configuration file") parser.add_argument('-a', '--all', action = 'store_true', default = False, help = "publish everything") parser.add_argument('-d', '--dataset', action = 'store_true', default = False, help = "publish datasets") parser.add_argument('-g', '--group', action = 'store_true', default = False, help = "publish groups") parser.add_argument('-o', '--organization', action = 'store_true', default = False, help = "publish organizations") parser.add_argument('-s', '--section', default = 'main', help = "Name of configuration section in configuration file") parser.add_argument('-u', '--user', action = 'store_true', default = False, help = "publish accounts") parser.add_argument('-v', '--verbose', action = 'store_true', default = False, help = "increase output verbosity") args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) site_conf = paste.deploy.appconfig('config:{0}#{1}'.format(os.path.abspath(args.config), args.section)) environment.load_environment(site_conf.global_conf, site_conf.local_conf) ctx = contexts.null_ctx if args.all or args.dataset: for dataset in model.Dataset.find(): dataset.compute_weight() dataset.compute_timestamp() if dataset.save(ctx, safe = False): log.info(u'Updated dataset: {}'.format(dataset.name)) return 0 if __name__ == "__main__": sys.exit(main())
agpl-3.0
Python
3a5df951bb9d12843d46107d0fbca9bd3d9105b3
Change the name of GUI.py to main_window.py, and change the __init__ function, so that board object and the size of the window can be passed when a frame object is created.
serenafr/My2048
src/main_window.py
src/main_window.py
import wx import wx.lib.stattext as ST import board class My2048_wx(wx.Frame): def __init__(self, parent, id, title, size, board_object): super(My2048_wx, self).__init__(parent, title = title, size = size) board_object = board.Board self.Construct() def Construct(self): SIZE = 4; '''panel_box is the container that contains all the widgets''' panel_box = wx.BoxSizer(wx.VERTICAL) '''header is the top parts which holds the name of the game, current score, and the best score''' header = wx.BoxSizer(wx.VERTICAL) '''upper_header contains three parts: game_name(2048), a boxsizer contains the current score information and another boxsizer contains the best score informaton All three parts are lined HORIZONTAL''' upper_header = wx.BoxSizer(wx.HORIZONTAL) game_name = ST.GenStaticText(self, -1, label = '2048', size = (100, 30), style = wx.ALIGN_CENTRE) upper_header.Add(game_name, flag = wx.EXPAND|wx.RIGHT, border = 60) upper_header_score = wx.BoxSizer(wx.VERTICAL) score_str = ST.GenStaticText(self, -1, label = 'SCORE', size = (50, 20), style = wx.ALIGN_CENTRE) score_str.SetBackgroundColour((187, 173, 160)) score = ST.GenStaticText(self, -1, label = '0', size = (50, 20), style = wx.ALIGN_CENTRE) score.SetForegroundColour('white') score.SetBackgroundColour((187, 173, 160)) upper_header_score.AddMany([score_str, score]) upper_header.Add(upper_header_score, flag = wx.EXPAND|wx.LEFT|wx.RIGHT, border = 10) upper_header_best = wx.GridSizer(2, 1) best_str = ST.GenStaticText(self, -1, label = 'BEST', size = (50, 20), style = wx.ALIGN_CENTRE) best_str.SetBackgroundColour((187, 173, 160)) best = ST.GenStaticText(self, -1, label = '0', size = (50, 20), style = wx.ALIGN_CENTRE) best.SetForegroundColour('white') best.SetBackgroundColour((187, 173, 160)) upper_header_best.AddMany([best_str, best]) upper_header.Add(upper_header_best) header.Add(upper_header) '''lower_header contains a sub_title and a button that allows users to start a new game''' lower_header = wx.BoxSizer(wx.HORIZONTAL) sub_title = ST.GenStaticText(self, -1, label = 'Join the numbers and get to the 2048 tile!') lower_header.Add(sub_title, flag = wx.EXPAND|wx.RIGHT, border = 5) new_game_button = wx.Button(self, -1, label = 'NEW GAME') lower_header.Add(new_game_button) header.Add(lower_header) panel_box.Add(header, flag = wx.EXPAND|wx.LEFT|wx.RIGHT|wx.TOP|wx.BOTTOM, border = 10) '''play_board is a container where all the tiles are put ''' play_board = wx.GridSizer(SIZE, SIZE) '''Set a list to store the numbers appear in different labels''' tile_list = [] '''Get tiles information frome board''' tile_list = board_object.get_tiles() text_list = [] for i in range(0, SIZE): for j in range(0, SIZE): if tile_list[i][j] == None: text_list.append('_') else: text_list.append(str(tile_list[i][j])) '''This list is used to store the wx labels with information ST.GenStaticText(self, -1, label = text_list[i]) And put all the numbers from the board into the GUI''' label_list = [] for i in range(0, SIZE * SIZE): label_list.append(ST.GenStaticText(self, -1, label = text_list[i], size = (60, 30), style = wx.ALIGN_CENTRE)) label_list[i].SetBackgroundColour((238, 228, 218)) play_board.Add(label_list[i], flag = wx.EXPAND|wx.RIGHT|wx.TOP, border = 10) panel_box.Add(play_board, flag = wx.EXPAND|wx.TOP|wx.LEFT, border = 10) '''User can use these keys to control the move and merge of the tile numbers''' ctrl_keys = wx.BoxSizer(wx.VERTICAL) up_box = wx.BoxSizer() up_button = wx.Button(self, -1, label = 'UP', size = (60, 30)) up_box.Add(up_button, flag = wx.EXPAND|wx.LEFT, border = 110) ctrl_keys.Add(up_box) left_right_box = wx.GridSizer(1, 2) left_button = wx.Button(self, -1, label = 'LEFT', size = (60, 30)) right_button = wx.Button(self, -1, label = 'RIGHT', size = (60, 30)) left_right_box.Add(left_button, flag = wx.LEFT, border = 80) left_right_box.Add(right_button, flag = wx.RIGHT) ctrl_keys.Add(left_right_box) down_box = wx.BoxSizer() down_button = wx.Button(self, -1, label = 'DOWN', size = (60, 30)) down_box.Add(down_button, flag = wx.EXPAND|wx.LEFT, border = 110) ctrl_keys.Add(down_box) panel_box.Add(ctrl_keys, flag = wx.EXPAND|wx.ALIGN_CENTRE|wx.TOP, border = 10) self.SetSizer(panel_box) self.Show(True) if __name__ == "__main__": app = wx.App() board_object = board.Board(2) frame = My2048_wx(None, -1, '2048', (380, 420), board_object) app.MainLoop()
mit
Python
ab99892d974503f2e0573a8937dc8f1b085b0014
Add stringbuilder module
nerevu/riko,nerevu/riko
modules/pipestrconcat.py
modules/pipestrconcat.py
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: part -- parts Yields (_OUTPUT): string """ s = "" for part in conf['part']: if "subkey" in part: pass #todo get from _INPUT e.g {u'type': u'text', u'subkey': u'severity'} else: s += util.get_value(part, kwargs) while True: yield s
mit
Python
e7053da76c14f12bfc02992ab745aac193e7c869
Create compareLists.py
Programing4Mathematics/Beginning
compareLists.py
compareLists.py
def unique(a): """ return the list with duplicate elements removed """ return list(set(a)) def intersect(a, b): """ return the intersection of two lists """ return list(set(a) & set(b)) def union(a, b): """ return the union of two lists """ return list(set(a) | set(b)) if __name__ == "__main__": a = [0,1,2,0,1,2,3,4,5,6,7,8,9] b = [5,6,7,8,9,10,11,12,13,14] print unique(a) print intersect(a, b) print union(a, b)
apache-2.0
Python
f347e84d4488d635d6b4a1eaf93855631f42c410
Add simple ant system based solver
Cosiek/KombiVojager
solvers/AntSystem.py
solvers/AntSystem.py
#!/usr/bin/env python # encoding: utf-8 from random import shuffle, random from itertools import permutations from base_solver import BaseSolver INF = float('inf') class Ant(object): route = [] score = INF def __init__(self, route): self.route = route def evaluate(self, task): start = task.start.name finish = task.finish.name route = [start, ] + self.route + [finish, ] self.score = task.get_path_distance(route) def update_trail(self, total_distance, arcs, start, finish): power = self.score / total_distance # update arcs on route for i in range(1, len(self.route)): arc = (self.route[i-1], self.route[i]) arcs[arc] += power # remember to update begining and end arcs arcs[(start, self.route[0])] += power arcs[(self.route[0], finish)] += power def run(self, arcs, start): route = [start,] unused_nodes = set(self.route) # use shuffled arcs list to prevent privleged arcs shuffled_arcs = arcs.keys() shuffle(shuffled_arcs) while unused_nodes: power_from_origin = 0.0 tmp_arcs = {} for arc, power in arcs.iteritems(): if arc[0] == route[-1] and arc[1] in unused_nodes: tmp_arcs[arc] = power power_from_origin += power n = random() for arc, power in tmp_arcs.items(): if power_from_origin == 0: break elif power / power_from_origin > n: break route.append(arc[1]) unused_nodes.remove(arc[1]) self.route = route[1:] class AntSystemSolver(BaseSolver): deterministic = False # genetic alghoritm settings ants_count = 50 vaporize_factor = 0.5 # helpers best_route = [] best_score = INF def run_search(self): # TODO - adjust settings acording to preblems complexity # genetate some random solutions self.ants = self.generate_initial_ants(self.task) # prepare data for pheromone trails self.prepare_arcs() # check stop condition (run loop) self.cycles = 0 while self.continue_(): # evaluate each ants solution self.evaluate_ants() # get all the best self.update_best_solutions() # update pheromone trail self.update_pheromone_trails() self.vaporize() # release the ants self.run_ants() self.cycles += 1 route = ([self.task.start.name] + self.best_route + [self.task.finish.name]) return route, self.best_score, self.cycles def generate_initial_ants(self, task): nodes = [node.name for node in task.mid_nodes] ants = [] for i in range(self.ants_count): route = nodes[:] shuffle(route) ants.append(Ant(route)) return ants def prepare_arcs(self): nodes = self.task.all_nodes.keys() self.arcs = {x: 0 for x in permutations(nodes, 2)} def continue_(self): return self.cycles <= 100 def evaluate_ants(self): for ant in self.ants: ant.evaluate(self.task) def update_pheromone_trails(self): total_distance = 0 for ant in self.ants: total_distance += ant.score start = self.task.start.name finish = self.task.finish.name for ant in self.ants: ant.update_trail(total_distance, self.arcs, start, finish) def vaporize(self): for arc, power in self.arcs.iteritems(): if power: self.arcs[arc] = self.get_vaporized_power(power) def get_vaporized_power(self, power): return max(0, power * self.vaporize_factor) def run_ants(self): start = self.task.start.name for ant in self.ants: ant.run(self.arcs, start) def update_best_solutions(self): for ant in self.ants: if ant.score < self.best_score: self.best_score = ant.score self.best_route = ant.route
mit
Python
0ca69bd8c29d123702e1934863d5d8a8c0d1703b
Create parse.py
V1Soft/Essential
parse.py
parse.py
# Parse the Essential Script def parse(source): parsedScript = [[]] word = '' prevChar = '' inArgs = False inList = False inString = False inQuote = False for char in source: if char == '(' and not inString and not inQuote: parsedScript.append([]) parsedScript[-1].append('args') if word: parsedScript[-1].append(word) word = '' elif char in (';', '\n') and not inString and not inQuote: if word: parsedScript[-1].append(word) word = '' parsedScript.append([]) elif char == '[': parsedScript.append([]) parsedScript[-1].append('list') if word: parsedScript[-1].append(word) word = '' elif char in (')', ']') and not inString and not inQuote: if word: parsedScript[-1].append(word) word = '' temp = parsedScript.pop() parsedScript[-1].append(temp) elif char in (' ', '\t') and not inString and not inQuote: if word: parsedScript[-1].append(word) word = '' elif char == '\"' and not prevChar == '\\': inString = not inString elif char == '\'' and not prevChar == '\\': inQuote = not inQuote elif char in ('+', '-', '*', '/'): if word: parsedScript[-1].append(word) word = '' parsedScript[-1].append(char) else: word += char prevChar = char if word: parsedScript[-1].append(word) word = '' reparsedScript = [[]] # Parse multi-line code until 'end' for word in parsedScript: if word: if word[0] in ('subroutine', 'if', 'for', 'while'): reparsedScript.append([]) reparsedScript[-1].append(word) elif word[0] == 'end': temp = reparsedScript.pop() reparsedScript[-1].append(temp) else: reparsedScript[-1].append(word) return reparsedScript[0]
bsd-3-clause
Python
c4b7bd5b74aaba210a05f946d59c98894b60b21f
Add test for pixel CLI
ceholden/yatsm,c11/yatsm,ceholden/yatsm,valpasq/yatsm,c11/yatsm,valpasq/yatsm
tests/cli/test_pixel.py
tests/cli/test_pixel.py
""" Test ``yatsm line`` """ import os from click.testing import CliRunner import pytest from yatsm.cli.main import cli @pytest.mark.skipif("DISPLAY" not in os.environ, reason="requires display") def test_cli_pixel_pass_1(example_timeseries): """ Correctly run for one pixel """ runner = CliRunner() result = runner.invoke( cli, ['-v', 'pixel', '--band', '5', '--plot', 'TS', '--style', 'ggplot', example_timeseries['config'], '1', '1' ]) assert result.exit_code == 0
mit
Python
26dd65a282ada1e79309c4ff35cee4e49b086b66
Create part3.py
PythonProgramming/pygame-tutorial-series
part3.py
part3.py
import pygame pygame.init() display_width = 800 display_height = 600 black = (0,0,0) white = (255,255,255) red = (255,0,0) gameDisplay = pygame.display.set_mode((display_width,display_height)) pygame.display.set_caption('A bit Racey') clock = pygame.time.Clock() carImg = pygame.image.load('racecar.png') def car(x,y): gameDisplay.blit(carImg,(x,y)) x = (display_width * 0.45) y = (display_height * 0.8) x_change = 0 crashed = False while not crashed: for event in pygame.event.get(): if event.type == pygame.QUIT: crashed = True if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT: x_change = -5 if event.key == pygame.K_RIGHT: x_change = 5 if event.type == pygame.KEYUP: if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT: x_change = 0 x += x_change gameDisplay.fill(white) car(x,y) pygame.display.update() clock.tick(60) pygame.quit() quit()
mit
Python
98663d644b90e0e4c6188555501bcbc2b42d391a
Create part4.py
PythonProgramming/pygame-tutorial-series
part4.py
part4.py
import pygame pygame.init() display_width = 800 display_height = 600 black = (0,0,0) white = (255,255,255) red = (255,0,0) car_width = 73 gameDisplay = pygame.display.set_mode((display_width,display_height)) pygame.display.set_caption('A bit Racey') clock = pygame.time.Clock() carImg = pygame.image.load('racecar.png') def car(x,y): gameDisplay.blit(carImg,(x,y)) def game_loop(): x = (display_width * 0.45) y = (display_height * 0.8) x_change = 0 gameExit = False while not gameExit: for event in pygame.event.get(): if event.type == pygame.QUIT: gameExit = True if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT: x_change = -5 if event.key == pygame.K_RIGHT: x_change = 5 if event.type == pygame.KEYUP: if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT: x_change = 0 x += x_change gameDisplay.fill(white) car(x,y) if x > display_width - car_width or x < 0: gameExit = True pygame.display.update() clock.tick(60) game_loop() pygame.quit() quit()
mit
Python
4727d86e5207dac3f53018b4ff2d1d0ade97d4e6
Add http_json external pillar (#32741)
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
salt/pillar/http_json.py
salt/pillar/http_json.py
# -*- coding: utf-8 -*- """ A module that adds data to the Pillar structure retrieved by an http request Configuring the HTTP_JSON ext_pillar ==================================== Set the following Salt config to setup Foreman as external pillar source: .. code-block:: json ext_pillar: - http_json: url: http://example.com/api/minion_id ::TODO:: username: username password: password Module Documentation ==================== """ from __future__ import absolute_import # Import python libs import logging def ext_pillar(minion_id, pillar, # pylint: disable=W0613 url=None): """ Read pillar data from HTTP response. :param url String to make request :returns dict with pillar data to add :returns empty if error """ # Set up logging log = logging.getLogger(__name__) data = __salt__['http.query'](url=url, decode=True, decode_type='json') if 'dict' in data: return data['dict'] log.error('Error caught on query to' + url + '\nMore Info:\n') for k, v in data.iteritems(): log.error(k + ' : ' + v) return {}
apache-2.0
Python
d2e5c2d20cf7e07f2dc8288d303e8f4088d5877a
Update module!
HubbeKing/Hubbot_Twisted
Modules/Update.py
Modules/Update.py
from ModuleInterface import ModuleInterface from IRCResponse import IRCResponse, ResponseType import GlobalVars import re import subprocess class Module(ModuleInterface): triggers = ["update"] help = "update - pulls the latest code from GitHub" def onTrigger(self, Hubbot, message): if message.User.Name not in GlobalVars.admins: return IRCResponse(ResponseType.Say, "Only my admins can update me!", message.ReplyTo) subprocess.call(["git", "fetch"]) output = subprocess.check_output(["git", "whatchanged", "..origin/master"]) changes = re.findall('\n\n\s{4}(.+?)\n\n', output) if len(changes) == 0: return IRCResponse(ResponseType.Say, "The bot is already up to date.", message.ReplyTo) changes = list(reversed(changes)) response = "New Commits: {}".format(" | ".join(changes)) subprocess.call(["git", "pull"]) return IRCResponse(ResponseType.Say, response, message.ReplyTo)
mit
Python
555cfbb827532c54598cecde01ef4e6e5e07714d
Create a test for re-evaluating external tasks while a workflow is running.
dlstadther/luigi,ViaSat/luigi,adaitche/luigi,humanlongevity/luigi,PeteW/luigi,Houzz/luigi,ZhenxingWu/luigi,springcoil/luigi,ContextLogic/luigi,ivannotes/luigi,percyfal/luigi,belevtsoff/luigi,Yoone/luigi,meyerson/luigi,tuulos/luigi,dkroy/luigi,Houzz/luigi,Dawny33/luigi,theoryno3/luigi,JackDanger/luigi,Magnetic/luigi,bowlofstew/luigi,Tarrasch/luigi,h3biomed/luigi,stephenpascoe/luigi,gpoulin/luigi,dylanjbarth/luigi,slvnperron/luigi,SeedScientific/luigi,edx/luigi,graingert/luigi,jamesmcm/luigi,JackDanger/luigi,casey-green/luigi,laserson/luigi,dlstadther/luigi,mfcabrera/luigi,ZhenxingWu/luigi,wakamori/luigi,bowlofstew/luigi,glenndmello/luigi,samuell/luigi,casey-green/luigi,huiyi1990/luigi,h3biomed/luigi,sahitya-pavurala/luigi,torypages/luigi,samuell/luigi,pkexcellent/luigi,realgo/luigi,rizzatti/luigi,percyfal/luigi,17zuoye/luigi,dylanjbarth/luigi,h3biomed/luigi,ehdr/luigi,rizzatti/luigi,walkers-mv/luigi,pkexcellent/luigi,leafjungle/luigi,torypages/luigi,drincruz/luigi,penelopy/luigi,moandcompany/luigi,harveyxia/luigi,dhruvg/luigi,theoryno3/luigi,republic-analytics/luigi,fw1121/luigi,penelopy/luigi,hellais/luigi,foursquare/luigi,hadesbox/luigi,joeshaw/luigi,kalaidin/luigi,pkexcellent/luigi,javrasya/luigi,linsomniac/luigi,DomainGroupOSS/luigi,samuell/luigi,jw0201/luigi,fabriziodemaria/luigi,pkexcellent/luigi,edx/luigi,jamesmcm/luigi,dlstadther/luigi,sahitya-pavurala/luigi,ContextLogic/luigi,JackDanger/luigi,aeron15/luigi,JackDanger/luigi,altaf-ali/luigi,mbruggmann/luigi,fabriziodemaria/luigi,mbruggmann/luigi,adaitche/luigi,ehdr/luigi,Wattpad/luigi,hellais/luigi,wakamori/luigi,kalaidin/luigi,dstandish/luigi,realgo/luigi,Magnetic/luigi,stroykova/luigi,joeshaw/luigi,republic-analytics/luigi,vine/luigi,soxofaan/luigi,walkers-mv/luigi,kalaidin/luigi,tuulos/luigi,stroykova/luigi,dhruvg/luigi,lichia/luigi,gpoulin/luigi,neilisaac/luigi,laserson/luigi,Yoone/luigi,17zuoye/luigi,mfcabrera/luigi,drincruz/luigi,anyman/luigi,realgo/luigi,ivannotes/luigi,ivannotes/luigi,hellais/luigi,alkemics/luigi,linsomniac/luigi,LamCiuLoeng/luigi,ChrisBeaumont/luigi,oldpa/luigi,ThQ/luigi,alkemics/luigi,javrasya/luigi,fabriziodemaria/luigi,thejens/luigi,linearregression/luigi,dstandish/luigi,bmaggard/luigi,stroykova/luigi,stroykova/luigi,harveyxia/luigi,ThQ/luigi,rayrrr/luigi,moritzschaefer/luigi,SeedScientific/luigi,stephenpascoe/luigi,dkroy/luigi,graingert/luigi,hadesbox/luigi,qpxu007/luigi,aeron15/luigi,Dawny33/luigi,SkyTruth/luigi,foursquare/luigi,dkroy/luigi,stephenpascoe/luigi,moandcompany/luigi,vine/luigi,rizzatti/luigi,walkers-mv/luigi,Magnetic/luigi,ThQ/luigi,spotify/luigi,samepage-labs/luigi,LamCiuLoeng/luigi,dhruvg/luigi,PeteW/luigi,oldpa/luigi,soxofaan/luigi,bowlofstew/luigi,ChrisBeaumont/luigi,jamesmcm/luigi,samuell/luigi,altaf-ali/luigi,wakamori/luigi,sahitya-pavurala/luigi,qpxu007/luigi,h3biomed/luigi,lungetech/luigi,DomainGroupOSS/luigi,fw1121/luigi,lungetech/luigi,moritzschaefer/luigi,17zuoye/luigi,linsomniac/luigi,belevtsoff/luigi,dylanjbarth/luigi,dstandish/luigi,Magnetic/luigi,qpxu007/luigi,drincruz/luigi,joeshaw/luigi,PeteW/luigi,lichia/luigi,ViaSat/luigi,SkyTruth/luigi,casey-green/luigi,samepage-labs/luigi,casey-green/luigi,adaitche/luigi,hadesbox/luigi,moandcompany/luigi,Dawny33/luigi,Yoone/luigi,mbruggmann/luigi,linsomniac/luigi,ViaSat/luigi,lungetech/luigi,riga/luigi,springcoil/luigi,upworthy/luigi,penelopy/luigi,hadesbox/luigi,vine/luigi,PeteW/luigi,kevhill/luigi,riga/luigi,samepage-labs/luigi,moritzschaefer/luigi,dylanjbarth/luigi,republic-analytics/luigi,anyman/luigi,edx/luigi,Tarrasch/luigi,glenndmello/luigi,ehdr/luigi,sahitya-pavurala/luigi,aeron15/luigi,linearregression/luigi,SeedScientific/luigi,dkroy/luigi,stephenpascoe/luigi,fw1121/luigi,fw1121/luigi,ivannotes/luigi,graingert/luigi,mbruggmann/luigi,SkyTruth/luigi,alkemics/luigi,soxofaan/luigi,linearregression/luigi,neilisaac/luigi,gpoulin/luigi,ChrisBeaumont/luigi,walkers-mv/luigi,aeron15/luigi,alkemics/luigi,springcoil/luigi,theoryno3/luigi,oldpa/luigi,ThQ/luigi,riga/luigi,harveyxia/luigi,realgo/luigi,tuulos/luigi,meyerson/luigi,dstandish/luigi,wakamori/luigi,qpxu007/luigi,Tarrasch/luigi,slvnperron/luigi,bmaggard/luigi,bmaggard/luigi,springcoil/luigi,harveyxia/luigi,dhruvg/luigi,soxofaan/luigi,ZhenxingWu/luigi,spotify/luigi,kevhill/luigi,vine/luigi,rayrrr/luigi,foursquare/luigi,slvnperron/luigi,LamCiuLoeng/luigi,samepage-labs/luigi,altaf-ali/luigi,torypages/luigi,Yoone/luigi,linearregression/luigi,rayrrr/luigi,foursquare/luigi,ContextLogic/luigi,Houzz/luigi,upworthy/luigi,lungetech/luigi,bowlofstew/luigi,drincruz/luigi,humanlongevity/luigi,SkyTruth/luigi,rizzatti/luigi,altaf-ali/luigi,glenndmello/luigi,lichia/luigi,Wattpad/luigi,SeedScientific/luigi,jw0201/luigi,penelopy/luigi,javrasya/luigi,upworthy/luigi,laserson/luigi,DomainGroupOSS/luigi,DomainGroupOSS/luigi,mfcabrera/luigi,anyman/luigi,thejens/luigi,belevtsoff/luigi,huiyi1990/luigi,humanlongevity/luigi,neilisaac/luigi,Dawny33/luigi,riga/luigi,jw0201/luigi,anyman/luigi,jamesmcm/luigi,Houzz/luigi,fabriziodemaria/luigi,torypages/luigi,percyfal/luigi,oldpa/luigi,humanlongevity/luigi,republic-analytics/luigi,ContextLogic/luigi,kevhill/luigi,neilisaac/luigi,thejens/luigi,tuulos/luigi,joeshaw/luigi,meyerson/luigi,ViaSat/luigi,glenndmello/luigi,ehdr/luigi,upworthy/luigi,graingert/luigi,ZhenxingWu/luigi,kevhill/luigi,huiyi1990/luigi,17zuoye/luigi,edx/luigi,bmaggard/luigi,moandcompany/luigi,leafjungle/luigi,javrasya/luigi,gpoulin/luigi,laserson/luigi,Wattpad/luigi,thejens/luigi,huiyi1990/luigi,Tarrasch/luigi,slvnperron/luigi,moritzschaefer/luigi,theoryno3/luigi,meyerson/luigi,percyfal/luigi,jw0201/luigi,lichia/luigi,belevtsoff/luigi,hellais/luigi,LamCiuLoeng/luigi,spotify/luigi,dlstadther/luigi,adaitche/luigi,kalaidin/luigi,leafjungle/luigi,mfcabrera/luigi,rayrrr/luigi,ChrisBeaumont/luigi,spotify/luigi,leafjungle/luigi
test/worker_external_task_test.py
test/worker_external_task_test.py
# Copyright (c) 2015 # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import luigi import unittest from mock import Mock, patch from helpers import with_config mock_external_task = Mock(spec=luigi.ExternalTask) mock_external_task.complete.side_effect = [False, False, True] class TestTask(luigi.Task): """ Requires a single file dependency """ def __init__(self): super(TestTask, self).__init__() self.has_run = False def requires(self): return mock_external_task def output(self): mock_target = Mock(spec=luigi.Target) # the return is False so that this task will be scheduled mock_target.exists.return_value = False def run(self): self.has_run = True class WorkerExternalTaskTest(unittest.TestCase): @with_config({'core': {'retry-external-tasks': 'true'}}) def test_external_dependency_satisified_later(self): """ Test that an external dependency that is not `complete` when luigi is invoked, but \ becomes `complete` while the workflow is executing is re-evaluated. """ assert luigi.configuration.get_config().getboolean('core', 'retry-external-tasks', False) == True test_task = TestTask() luigi.build([test_task], local_scheduler=True) assert test_task.has_run == True assert mock_external_task.complete.call_count == 3 if __name__ == '__main__': unittest.main()
apache-2.0
Python
6232295511ee780a6438c9cdfdcf576cb4f3d8e8
Add python script to convert .OBJ data for import
huttarl/slitherlink3D,huttarl/slitherlink3D,huttarl/slitherlink3D
obj2json.py
obj2json.py
# obj2json: Convert OBJ export from polyHedronisme to Slitherlink3D JSON data import sys, json faces = [] vertices = [] name = "unknown" num_edges = 0 class ParseError(SyntaxError): """Raised when there's trouble parsing the input.""" pass def process(line): # print("Processing", line) if line.startswith("#") or len(line) == 0: # ignore comments and blank lines pass elif line.startswith("g") or line.startswith("o"): # "o" for object # "g" for polygon group input_group(line) # Distinguish "v" from "vt", "vn", "vp" elif line.startswith("v "): input_vertex(line) elif line.startswith("f"): input_face(line) else: # We could raise warnings here. But it's probably not worth it. pass def input_group(line): global name s = line.split() if len(s) > 1: name = s[1] def input_vertex(line): global vertices # Not strictly necessary, as currently implemented. s = line.split() if len(s) < 4: raise ParseError("Malformed vertex line: '%s'" % line) else: vertex = [float(coord) for coord in s[1:]] # print("Appending vertex ", vertex) vertices.append(vertex) def input_face(line): global faces, num_edges # 1. Split into vertex "clusters" delimited by whitespace # 2. Split clusters delimited by "/" and take only the first. # 3. Convert to integer and subtract 1, because indices are 1-based. vx_indices = [int(index_group.split('/')[0]) - 1 for index_group in line.split()[1:]] if len(vx_indices) < 3: raise ParseError("Invalid face line (not enough vertices): " + line) # print("Appending face ", vx_indices) faces.append(vx_indices) num_edges += len(vx_indices) / 2.0 # Because each edge belongs to 2 faces. # TODO maybe: Catch cases where a vertex index is out of bounds. def output(): # Could use indent=2 here but it's not what I want. print(json.dumps({ "id": name, "name": name, "nCells": len(faces), # "cell" == "face" "nEdges": int(num_edges), "nVertices": len(vertices), # TODO: filter vertices and faces "vertices": vertices, "faces": faces, "puzzles": [] })) def main(): try: with open(sys.argv[1], "r") as f: for line in f: process(line.rstrip()) if num_edges + 2 != len(faces) + len(vertices): raise ParseError("F + V != E + 2: %d + %d != %0.1f + 2" % (len(faces), len(vertices), num_edges)) output() except ParseError as e: print("Parse error: %s" % e.args) sys.exit(1) except IOError as e: print("Couldn't read file: %s" % e) sys.exit(1) if __name__ == "__main__": main()
mit
Python