text
stringlengths
0
1.05M
meta
dict
7#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Feb 15 13:38:48 2018 @author: BallBlueMeercat """ import numpy as np from scipy.integrate import odeint import firstderivs #import lnprior # Standard cosmological parameters. H0 = 1 # Hubble parameter at t=now tH = 1.0/H0 # Hubble time c_over_H0 = 4167 * 10**6 # c/H0 in parsecs def odesolve(gamma,m,de,zpicks): """ Takes in: gamma = interaction constant; m = e_m(t)/ec(t0) at t=t0; de = e_de(t)/ec(t0) at t=t0. Returns: z = numpoints number of redshifts zmin<z<zmax; dlpc = luminosity distance in pc. """ # print('@@ odesolve has been called') # Time (in 1/H0) to integrate until. If this time isn't long # enough for 'a' to decrease to a_d then stoptime will be extended # by time until a_d is reached. # 0.665 matter only, 0.96 standard m+de time = 0.8 # Initial conditions at time = t0. a0 = 1.0 # scale factor a_dot0 = 1.0 # speed of expansion e_dash0m = m # e_m(t)/ec(t0) e_dash0de = de # e_de(t)/ec(t0) z0 = 0 dl0 = 0 # ODE solver parameters: abserr = 1.0e-8 relerr = 1.0e-6 numpoints = 100 stoptime = 0 # Integrating back in time as time now is t0. z = np.array([0]) stoptime -= time # theta = gamma, m, de # lp = lnprior.lnprior(theta) # if not np.isfinite(lp): # time += 500 if time > 0.9: print('time in odesolve is: %s, gamma = %s, m = %s, de = %s' %(time, gamma, m, de)) # Create time samples for the ODE solver. t = [stoptime * tH * float(i) / (numpoints - 1) for i in range(numpoints)] # Pack up the initial conditions and eq of state parameters. v0 = [a0, a_dot0, e_dash0m, e_dash0de, z0, dl0] # Call the ODE solver. maxstep=5000000 added later to try and avoid vsol = odeint(firstderivs.firstderivs, v0, t, args=(gamma,), atol=abserr, rtol=relerr, mxstep=5000000) # Remove unwanted results which are too close to big bang from the plot. # Separate results into their own arrays: a = vsol[:,0] a_dot = vsol[:,1] e_dashm = vsol[:,2] e_dashde = vsol[:,3] z = vsol[:,4] dl = vsol[:,5] * (1+z) # in units of dl*(H0/c) dlpc = dl * c_over_H0 # dl in parsecs (= vsol[dl] * c/H0) # Remove values after the index of first instance of z > 2. t_cut = np.asarray(t) a_cut = a a_dotcut = a_dot # # Age of the universe. # age = t_cut[np.argmin(t_cut)] # age = -round(age, 2) return z, dlpc, dl, gamma, e_dash0m, e_dash0de, t, a, a_dot, t_cut, a_cut, a_dotcut, e_dashm, e_dashde
{ "repo_name": "lefthandedroo/Cosmo-models", "path": "zprev versions/odesolve.py", "copies": "1", "size": "2767", "license": "mit", "hash": -2100113527404365000, "line_mean": 26.67, "line_max": 106, "alpha_frac": 0.5663173112, "autogenerated": false, "ratio": 2.8034447821681865, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8674474948835655, "avg_score": 0.03905742890650617, "num_lines": 100 }
7#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Thu Feb 15 13:38:48 2018 @author: BallBlueMeercat """ import numpy as np from scipy.integrate import odeint import firstderivs import lnprior # Standard cosmological parameters. H0 = 1 # Hubble parameter at t=now tH = 1.0/H0 # Hubble time c_over_H0 = 4167 * 10**6 # c/H0 in parsecs def odesolve(gamma,m,de,zpicks): """ Takes in: gamma = interaction constant; m = e_m(t)/ec(t0) at t=t0; de = e_de(t)/ec(t0) at t=t0. Returns: z = numpoints number of redshifts zmin<z<zmax; dlpc = luminosity distance in pc. """ # print('@@ odesolve has been called') # Time (in 1/H0) to integrate until. If this time isn't long # enough for 'a' to decrease to a_d then stoptime will be extended # by time until a_d is reached. # 0.665 matter only, 0.96 standard m+de time = 0.9 # Initial conditions at time = t0. a0 = 1.0 # scale factor a_dot0 = 1.0 # speed of expansion e_dash0m = m # e_m(t)/ec(t0) e_dash0de = de # e_de(t)/ec(t0) z0 = 0 dl0 = 0 # ODE solver parameters: abserr = 1.0e-8 relerr = 1.0e-6 numpoints = 125 stoptime = 0 # Integrating back in time as time now is t0. z = np.array([0]) # while z[np.argmax(z)] < 2.1: while len(z) < 100: stoptime -= time theta = gamma, m, de lp = lnprior.lnprior(theta) if not np.isfinite(lp): time += 500 if time > 0.9: print('time in odesolve is: %s, gamma = %s, m = %s, de = %s' %(time, gamma, m, de)) # Create time samples for the ODE solver. t = [stoptime * tH * float(i) / (numpoints - 1) for i in range(numpoints)] # Pack up the initial conditions and eq of state parameters. v0 = [a0, a_dot0, e_dash0m, e_dash0de, z0, dl0] # Call the ODE solver. maxstep=5000000 added later to try and avoid vsol = odeint(firstderivs.firstderivs, v0, t, args=(gamma,), atol=abserr, rtol=relerr, mxstep=5000000) # Remove unwanted results which are too close to big bang from the plot. # Separate results into their own arrays: a = vsol[:,0] a_dot = vsol[:,1] e_dashm = vsol[:,2] e_dashde = vsol[:,3] z = vsol[:,4] print('z = vsol[:,4] is: ', z) dl = vsol[:,5] * (1+z) # in units of dl*(H0/c) dlpc = dl * c_over_H0 # dl in parsecs (= vsol[dl] * c/H0) # Find where results start to get strange (smaller than a_d): blowups = np.where(z > 3) # Tuple with indecies of z > 2. blowups = np.asarray(blowups) # Converting to np array. if blowups.any(): # Check if instances of a < a_d exist. blowup = blowups[0,0] # Remove values after the index of first instance of z > 2. t_cut = np.asarray(t) t_cut = t_cut[:blowup] a_cut = a[:blowup] a_dotcut = a_dot[:blowup] e_dashm = e_dashm[:blowup] e_dashde = e_dashde[:blowup] z = z[:blowup] dl = dl[:blowup] dlpc = dlpc[:blowup] # Age of the universe. age = t_cut[np.argmin(t_cut)] age = -round(age, 2) return z, dlpc, dl, gamma, e_dash0m, e_dash0de, t, a, a_dot, t_cut, a_cut, a_dotcut, e_dashm, e_dashde
{ "repo_name": "lefthandedroo/Cosmo-models", "path": "zprev versions/odesolve copy.py", "copies": "1", "size": "3488", "license": "mit", "hash": -1939725985427761000, "line_mean": 28.811965812, "line_max": 106, "alpha_frac": 0.5401376147, "autogenerated": false, "ratio": 2.943459915611814, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8811810629458624, "avg_score": 0.034357380170638004, "num_lines": 117 }
7#!/usr/bin/env python import inspect import json from dataclasses import dataclass from typing import List, Optional, Iterable, Union, Dict, Any from dbt.dataclass_schema import dbtClassMixin from dbt.context.base import BaseContext from dbt.context.target import TargetContext from dbt.context.providers import ModelContext, MacroContext CONTEXTS_MAP = { 'base': BaseContext, 'target': TargetContext, 'model': ModelContext, 'macro': MacroContext, } @dataclass class ContextValue(dbtClassMixin): name: str value: str # a type description doc: Optional[str] @dataclass class MethodArgument(dbtClassMixin): name: str value: str # a type description @dataclass class ContextMethod(dbtClassMixin): name: str args: List[MethodArgument] result: str # a type description doc: Optional[str] @dataclass class Unknown(dbtClassMixin): name: str value: str doc: Optional[str] ContextMember = Union[ContextValue, ContextMethod, Unknown] def _get_args(func: inspect.Signature) -> Iterable[MethodArgument]: found_first = False for argname, arg in func.parameters.items(): if found_first is False and argname in {'self', 'cls'}: continue if found_first is False: found_first = True yield MethodArgument( name=argname, value=inspect.formatannotation(arg.annotation), ) def collect(cls): values = [] for name, v in cls._context_members_.items(): attrname = cls._context_attrs_[name] attrdef = getattr(cls, attrname) doc = getattr(attrdef, '__doc__') if inspect.isfunction(attrdef): sig = inspect.signature(attrdef) result = inspect.formatannotation(sig.return_annotation) sig_good_part = ContextMethod( name=name, args=list(_get_args(sig)), result=result, doc=doc, ) elif isinstance(attrdef, property): sig = inspect.signature(attrdef.fget) sig_txt = inspect.formatannotation(sig.return_annotation) sig_good_part = ContextValue( name=name, value=sig_txt, doc=doc ) else: sig_good_part = Unknown( name=name, value=repr(attrdef), doc=doc ) values.append(sig_good_part) return values @dataclass class ContextCatalog(dbtClassMixin): base: List[ContextMember] target: List[ContextMember] model: List[ContextMember] macro: List[ContextMember] schema: Dict[str, Any] def main(): catalog = ContextCatalog( base=collect(BaseContext), target=collect(TargetContext), model=collect(ModelContext), macro=collect(MacroContext), schema=ContextCatalog.json_schema(), ) print(json.dumps(catalog.to_dict())) if __name__ == '__main__': main()
{ "repo_name": "analyst-collective/dbt", "path": "scripts/collect-dbt-contexts.py", "copies": "1", "size": "2950", "license": "apache-2.0", "hash": 988235076767605200, "line_mean": 23.5833333333, "line_max": 69, "alpha_frac": 0.6322033898, "autogenerated": false, "ratio": 3.938584779706275, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5070788169506275, "avg_score": null, "num_lines": null }
7#!/usr/bin/env python import inspect import json from dataclasses import dataclass from typing import List, Optional, Iterable, Union, Dict, Any from hologram import JsonSchemaMixin from dbt.context.base import BaseContext from dbt.context.target import TargetContext from dbt.context.providers import ModelContext, MacroContext CONTEXTS_MAP = { 'base': BaseContext, 'target': TargetContext, 'model': ModelContext, 'macro': MacroContext, } @dataclass class ContextValue(JsonSchemaMixin): name: str value: str # a type description doc: Optional[str] @dataclass class MethodArgument(JsonSchemaMixin): name: str value: str # a type description @dataclass class ContextMethod(JsonSchemaMixin): name: str args: List[MethodArgument] result: str # a type description doc: Optional[str] @dataclass class Unknown(JsonSchemaMixin): name: str value: str doc: Optional[str] ContextMember = Union[ContextValue, ContextMethod, Unknown] def _get_args(func: inspect.Signature) -> Iterable[MethodArgument]: found_first = False for argname, arg in func.parameters.items(): if found_first is False and argname in {'self', 'cls'}: continue if found_first is False: found_first = True yield MethodArgument( name=argname, value=inspect.formatannotation(arg.annotation), ) def collect(cls): values = [] for name, v in cls._context_members_.items(): attrname = cls._context_attrs_[name] attrdef = getattr(cls, attrname) doc = getattr(attrdef, '__doc__') if inspect.isfunction(attrdef): sig = inspect.signature(attrdef) result = inspect.formatannotation(sig.return_annotation) sig_good_part = ContextMethod( name=name, args=list(_get_args(sig)), result=result, doc=doc, ) elif isinstance(attrdef, property): sig = inspect.signature(attrdef.fget) sig_txt = inspect.formatannotation(sig.return_annotation) sig_good_part = ContextValue( name=name, value=sig_txt, doc=doc ) else: sig_good_part = Unknown( name=name, value=repr(attrdef), doc=doc ) values.append(sig_good_part) return values @dataclass class ContextCatalog(JsonSchemaMixin): base: List[ContextMember] target: List[ContextMember] model: List[ContextMember] macro: List[ContextMember] schema: Dict[str, Any] def main(): catalog = ContextCatalog( base=collect(BaseContext), target=collect(TargetContext), model=collect(ModelContext), macro=collect(MacroContext), schema=ContextCatalog.json_schema(), ) print(json.dumps(catalog.to_dict())) if __name__ == '__main__': main()
{ "repo_name": "fishtown-analytics/dbt", "path": "scripts/collect-dbt-contexts.py", "copies": "1", "size": "2950", "license": "apache-2.0", "hash": 2804997544126462500, "line_mean": 23.5833333333, "line_max": 69, "alpha_frac": 0.6328813559, "autogenerated": false, "ratio": 3.9918809201623815, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.011437752120647637, "num_lines": 120 }
7# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2014 IBM Corporation # Copyright 2015-2019 Lenovo # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Ultimately, the design is to handle all the complicated stuff at set # rather than get tiime. When something is set on a group, then all # members of that group are examined and 'inheritedfrom' attributes # are pushed. as expression definned values are iinserted, their # dependdentt attributes are added to a private dict to aid in auto # calculation. When a name is changed, all attributes are re-evaluated # on get, should be simple read value *except* for encrypted values, # which are only decrypted when explicitly requested # encrypted fields do not support expressions, either as a source or # destination # Note on the cryptography. Default behavior is mostly just to pave the # way to meaningful security. Root all potentially sensitive data in # one key. That key is in plain sight, so not meaningfully protected # However, the key can be protected in the following ways: # - Passphrase protected (requiring human interaction every restart) # - TPM sealing (which would forgo the interactive assuming risk of # physical attack on TPM is not a concern) # This module provides cryptographic convenience functions, largely to be # used by config.py to protect/unlock configuration as appropriopriate. # The default behavior provides no meaningful protection, all encrypted # values are linked to a master key that is stored in the clear. # meanigful protection comes when the user elects to protect the key # by passphrase and optionally TPM import Cryptodome.Protocol.KDF as KDF from Cryptodome.Cipher import AES from Cryptodome.Hash import HMAC from Cryptodome.Hash import SHA256 import anydbm as dbm import ast import base64 import confluent.config.attributes as allattributes import confluent.config.conf as conf import confluent.log import confluent.noderange as noderange import confluent.util import confluent.netutil as netutil import confluent.exceptions as exc import copy import cPickle import errno import eventlet import eventlet.event as event import eventlet.green.select as select import eventlet.green.threading as gthread import fnmatch import json import operator import os import random import re import string import struct import sys import threading import traceback _masterkey = None _masterintegritykey = None _dirtylock = threading.RLock() _leaderlock = gthread.RLock() _synclock = threading.RLock() _rpclock = gthread.RLock() _initlock = gthread.RLock() _followerlocks = {} _config_areas = ('nodegroups', 'nodes', 'usergroups', 'users') tracelog = None statelessmode = False _cfgstore = None _pendingchangesets = {} _txcount = 0 _hasquorum = True _attraliases = { 'bmc': 'hardwaremanagement.manager', 'bmcuser': 'secret.hardwaremanagementuser', 'switchuser': 'secret.hardwaremanagementuser', 'bmcpass': 'secret.hardwaremanagementpassword', 'switchpass': 'secret.hardwaremanagementpassword', } def _mkpath(pathname): try: os.makedirs(pathname) except OSError as e: if e.errno == errno.EEXIST and os.path.isdir(pathname): pass else: raise def _derive_keys(password, salt): #implement our specific combination of pbkdf2 transforms to get at #key. We bump the iterations up because we can afford to #TODO: WORKERPOOL PBKDF2 is expensive tmpkey = KDF.PBKDF2(password, salt, 32, 50000, lambda p, s: HMAC.new(p, s, SHA256).digest()) finalkey = KDF.PBKDF2(tmpkey, salt, 32, 50000, lambda p, s: HMAC.new(p, s, SHA256).digest()) return finalkey[:16], finalkey[16:] def _get_protected_key(keydict, password, paramname): if password and 'unencryptedvalue' in keydict: set_global(paramname, _format_key( keydict['unencryptedvalue'], password=password)) if 'unencryptedvalue' in keydict: return keydict['unencryptedvalue'] # TODO(jbjohnso): check for TPM sealing if 'passphraseprotected' in keydict: if password is None: raise exc.LockedCredentials("Passphrase protected secret requires password") pp = keydict['passphraseprotected'] salt = pp[0] privkey, integkey = _derive_keys(password, salt) return decrypt_value(pp[1:], key=privkey, integritykey=integkey) else: raise exc.LockedCredentials("No available decryption key") def _parse_key(keydata, password=None): if keydata.startswith('*unencrypted:'): return base64.b64decode(keydata[13:]) elif password: salt, iv, crypt, hmac = [base64.b64decode(x) for x in keydata.split('!')] privkey, integkey = _derive_keys(password, salt) return decrypt_value([iv, crypt, hmac], privkey, integkey) raise(exc.LockedCredentials( "Passphrase protected secret requires password")) def _format_key(key, password=None): if password is not None: salt = os.urandom(32) privkey, integkey = _derive_keys(password, salt) cval = crypt_value(key, key=privkey, integritykey=integkey) return {"passphraseprotected": (salt,) + cval} else: return {"unencryptedvalue": key} def _do_notifier(cfg, watcher, callback): try: callback(nodeattribs=watcher['nodeattrs'], configmanager=cfg) except Exception: logException() def _rpc_master_set_user(tenant, name, attributemap): ConfigManager(tenant).set_user(name, attributemap) def _rpc_set_user(tenant, name, attributemap): ConfigManager(tenant)._true_set_user(name, attributemap) def _rpc_master_set_node_attributes(tenant, attribmap, autocreate): ConfigManager(tenant).set_node_attributes(attribmap, autocreate) def _rpc_master_clear_node_attributes(tenant, nodes, attributes): ConfigManager(tenant).clear_node_attributes(nodes, attributes) def _rpc_clear_node_attributes(tenant, nodes, attributes): ConfigManager(tenant)._true_clear_node_attributes(nodes, attributes) def _rpc_master_set_group_attributes(tenant, attribmap, autocreate): ConfigManager(tenant).set_group_attributes(attribmap, autocreate) def _rpc_master_clear_group_attributes(tenant, groups, attributes): ConfigManager(tenant).clear_group_attributes(groups, attributes) def _rpc_clear_group_attributes(tenant, groups, attributes): ConfigManager(tenant)._true_clear_group_attributes(groups, attributes) def _rpc_master_del_user(tenant, name): ConfigManager(tenant).del_user(name) def _rpc_del_user(tenant, name): ConfigManager(tenant)._true_del_user(name) def _rpc_master_create_user(tenant, *args): ConfigManager(tenant).create_user(*args) def _rpc_create_user(tenant, *args): ConfigManager(tenant)._true_create_user(*args) def _rpc_master_del_groups(tenant, groups): ConfigManager(tenant).del_groups(groups) def _rpc_del_groups(tenant, groups): ConfigManager(tenant)._true_del_groups(groups) def _rpc_master_del_nodes(tenant, nodes): ConfigManager(tenant).del_nodes(nodes) def _rpc_del_nodes(tenant, nodes): ConfigManager(tenant)._true_del_nodes(nodes) def _rpc_set_node_attributes(tenant, attribmap, autocreate): ConfigManager(tenant)._true_set_node_attributes(attribmap, autocreate) def _rpc_set_group_attributes(tenant, attribmap, autocreate): ConfigManager(tenant)._true_set_group_attributes(attribmap, autocreate) def check_quorum(): if isinstance(cfgleader, bool): raise exc.DegradedCollective() if cfgstreams and len(cfgstreams) < (len(_cfgstore['collective']) // 2): # the leader counts in addition to registered streams raise exc.DegradedCollective() if cfgleader and not _hasquorum: raise exc.DegradedCollective() def exec_on_leader(function, *args): if isinstance(cfgleader, bool): raise exc.DegradedCollective() xid = os.urandom(8) while xid in _pendingchangesets: xid = os.urandom(8) _pendingchangesets[xid] = event.Event() rpcpayload = cPickle.dumps({'function': function, 'args': args, 'xid': xid}) rpclen = len(rpcpayload) cfgleader.sendall(struct.pack('!Q', rpclen)) cfgleader.sendall(rpcpayload) _pendingchangesets[xid].wait() del _pendingchangesets[xid] return def exec_on_followers(fnname, *args): global _txcount if len(cfgstreams) < (len(_cfgstore['collective']) // 2): # the leader counts in addition to registered streams raise exc.DegradedCollective() pushes = eventlet.GreenPool() _txcount += 1 payload = cPickle.dumps({'function': fnname, 'args': args, 'txcount': _txcount}) for res in pushes.starmap( _push_rpc, [(cfgstreams[s], payload) for s in cfgstreams]): pass def logException(): global tracelog if tracelog is None: tracelog = confluent.log.Logger('trace') tracelog.log(traceback.format_exc(), ltype=confluent.log.DataTypes.event, event=confluent.log.Events.stacktrace) def _do_add_watcher(watcher, added, configmanager): try: watcher(added=added, deleting=[], configmanager=configmanager) except Exception: logException() def init_masterkey(password=None, autogen=True): global _masterkey global _masterintegritykey cfgn = get_global('master_privacy_key') if cfgn: _masterkey = _get_protected_key(cfgn, password, 'master_privacy_key') elif autogen: _masterkey = os.urandom(32) set_global('master_privacy_key', _format_key( _masterkey, password=password)) cfgn = get_global('master_integrity_key') if cfgn: _masterintegritykey = _get_protected_key(cfgn, password, 'master_integrity_key') elif autogen: _masterintegritykey = os.urandom(64) set_global('master_integrity_key', _format_key( _masterintegritykey, password=password)) def _push_rpc(stream, payload): with _rpclock: stream.sendall(struct.pack('!Q', len(payload))) if len(payload): stream.sendall(payload) def decrypt_value(cryptvalue, key=None, integritykey=None): iv, cipherdata, hmac = cryptvalue if key is None and integritykey is None: if _masterkey is None or _masterintegritykey is None: init_masterkey(autogen=False) key = _masterkey integritykey = _masterintegritykey check_hmac = HMAC.new(integritykey, cipherdata, SHA256).digest() if hmac != check_hmac: raise Exception("bad HMAC value on crypted value") decrypter = AES.new(key, AES.MODE_CBC, iv) value = decrypter.decrypt(cipherdata) padsize = ord(value[-1]) pad = value[-padsize:] # Note that I cannot grasp what could be done with a subliminal # channel in padding in this case, but check the padding anyway for padbyte in pad: if ord(padbyte) != padsize: raise Exception("bad padding in encrypted value") return value[0:-padsize] def fixup_attribute(attrname, attrval): # Normalize some data, for example strings and numbers to bool attrname = _get_valid_attrname(attrname) if attrname not in allattributes.node: # no fixup possible return attrval if 'type' in allattributes.node[attrname] and not isinstance(attrval, allattributes.node[attrname]['type']): if (allattributes.node[attrname]['type'] == bool and (isinstance(attrval, str) or isinstance(attrval, unicode))): return attrval.lower() in ('true', '1', 'y', 'yes', 'enable', 'enabled') return attrval def attribute_is_invalid(attrname, attrval): if attrname.startswith('custom.'): # No type checking or name checking is provided for custom, # it's not possible return False attrname = _get_valid_attrname(attrname) if attrname not in allattributes.node: # Otherwise, it must be in the allattributes key list return True if 'type' in allattributes.node[attrname]: if not isinstance(attrval, allattributes.node[attrname]['type']): # it is valid if it is {'value': actualvalue} if (isinstance(attrval, dict) and 'value' in attrval and isinstance(attrval['value'], allattributes.node[attrname]['type'])): return False # provide type checking for attributes with a specific type return True return False def _get_valid_attrname(attrname): if attrname.startswith('net.'): # For net.* attribtues, split on the dots and put back together # longer term we might want a generic approach, but # right now it's just net. attributes netattrparts = attrname.split('.') attrname = netattrparts[0] + '.' + netattrparts[-1] return attrname def crypt_value(value, key=None, integritykey=None): # encrypt given value # PKCS7 is the padding scheme to employ, if no padded needed, pad with 16 # check HMAC prior to attempting decrypt if key is None or integritykey is None: if _masterkey is None or _masterintegritykey is None: init_masterkey() key = _masterkey integritykey = _masterintegritykey iv = os.urandom(16) crypter = AES.new(key, AES.MODE_CBC, iv) neededpad = 16 - (len(value) % 16) pad = chr(neededpad) * neededpad value += pad try: cryptval = crypter.encrypt(value) except TypeError: cryptval = crypter.encrypt(value.encode('utf-8')) hmac = HMAC.new(integritykey, cryptval, SHA256).digest() return iv, cryptval, hmac def _load_dict_from_dbm(dpath, tdb): try: dbe = dbm.open(tdb, 'r') currdict = _cfgstore for elem in dpath: if elem not in currdict: currdict[elem] = {} currdict = currdict[elem] try: for tk in dbe: currdict[tk] = cPickle.loads(dbe[tk]) except AttributeError: tk = dbe.firstkey() while tk != None: currdict[tk] = cPickle.loads(dbe[tk]) tk = dbe.nextkey(tk) except dbm.error: return def is_tenant(tenant): try: return tenant in _cfgstore['tenant'] except KeyError: return False def get_global(globalname): """Get a global variable :param globalname: The global parameter name to read """ if _cfgstore is None: init() try: return _cfgstore['globals'][globalname] except KeyError: return None def set_global(globalname, value, sync=True): """Set a global variable. Globals should be rarely ever used. Almost everything should be under a tenant scope. Some things like master key and socket numbers/paths can be reasonably considered global in nature. :param globalname: The global parameter name to store :param value: The value to set the global parameter to. """ if _cfgstore is None: init(not sync) try: globalname = globalname.encode('utf-8') except AttributeError: # We have to remove the unicode-ness of the string, # but if it is already bytes in python 3, then we will # get an attributeerror, so pass pass with _dirtylock: if 'dirtyglobals' not in _cfgstore: _cfgstore['dirtyglobals'] = set() _cfgstore['dirtyglobals'].add(globalname) if 'globals' not in _cfgstore: _cfgstore['globals'] = {globalname: value} else: _cfgstore['globals'][globalname] = value if sync: ConfigManager._bg_sync_to_file() cfgstreams = {} def relay_slaved_requests(name, listener): global cfgleader global _hasquorum pushes = eventlet.GreenPool() if name not in _followerlocks: _followerlocks[name] = gthread.RLock() with _followerlocks[name]: try: stop_following() if name in cfgstreams: try: cfgstreams[name].close() except Exception: pass del cfgstreams[name] cfgstreams[name] = listener lh = StreamHandler(listener) _hasquorum = len(cfgstreams) >= ( len(_cfgstore['collective']) // 2) payload = cPickle.dumps({'quorum': _hasquorum}) for _ in pushes.starmap( _push_rpc, [(cfgstreams[s], payload) for s in cfgstreams]): pass if _hasquorum and _pending_collective_updates: apply_pending_collective_updates() msg = lh.get_next_msg() while msg: if name not in cfgstreams: raise Exception("Unexpected loss of node in followers: " + name) sz = struct.unpack('!Q', msg)[0] if sz != 0: rpc = '' while len(rpc) < sz: nrpc = listener.recv(sz - len(rpc)) if not nrpc: raise Exception('Truncated client error') rpc += nrpc rpc = cPickle.loads(rpc) exc = None try: globals()[rpc['function']](*rpc['args']) except Exception as e: exc = e if 'xid' in rpc: _push_rpc(listener, cPickle.dumps({'xid': rpc['xid'], 'exc': exc})) try: msg = lh.get_next_msg() except Exception: msg = None finally: try: listener.close() except Exception: pass try: del cfgstreams[name] except KeyError: pass # May have already been closed/deleted... if cfgstreams: _hasquorum = len(cfgstreams) >= ( len(_cfgstore['collective']) // 2) payload = cPickle.dumps({'quorum': _hasquorum}) for _ in pushes.starmap( _push_rpc, [(cfgstreams[s], payload) for s in cfgstreams]): pass if not cfgstreams and not cfgleader: # last one out, set cfgleader to boolean to mark dead collective stop_following(True) return False return True class StreamHandler(object): def __init__(self, sock): self.sock = sock self.keepalive = confluent.util.monotonic_time() + 20 self.expiry = self.keepalive + 40 def get_next_msg(self): r = (False,) try: while not r[0]: r = select.select( (self.sock,), (), (), self.keepalive - confluent.util.monotonic_time()) if confluent.util.monotonic_time() > self.expiry: return None if confluent.util.monotonic_time() > self.keepalive: _push_rpc(self.sock, b'') # nulls are a keepalive self.keepalive = confluent.util.monotonic_time() + 20 self.expiry = confluent.util.monotonic_time() + 60 msg = self.sock.recv(8) except Exception: msg = None return msg def close(self): self.sock = None def stop_following(replacement=None): with _leaderlock: global cfgleader if cfgleader and not isinstance(cfgleader, bool): try: cfgleader.close() except Exception: pass cfgleader = replacement def stop_leading(): for stream in list(cfgstreams): try: cfgstreams[stream].close() except Exception: pass try: del cfgstreams[stream] except KeyError: pass # may have already been deleted.. _oldcfgstore = None _oldtxcount = 0 def rollback_clear(): global _cfgstore global _txcount global _oldcfgstore global _oldtxcount _txcount = _oldtxcount _cfgstore = _oldcfgstore _oldtxcount = 0 _oldcfgstore = None ConfigManager.wait_for_sync(True) def clear_configuration(): global _cfgstore global _txcount global _oldcfgstore global _oldtxcount stop_leading() stop_following() _oldcfgstore = _cfgstore _oldtxcount = _txcount _cfgstore = {} _txcount = 0 def commit_clear(): global _oldtxcount global _oldcfgstore _oldcfgstore = None _oldtxcount = 0 with _synclock: todelete = ('transactioncount', 'globals', 'collective') + _config_areas for cfg in todelete: try: os.remove(os.path.join(ConfigManager._cfgdir, cfg)) except OSError as oe: pass ConfigManager.wait_for_sync(True) ConfigManager._bg_sync_to_file() cfgleader = None def follow_channel(channel): global _txcount global _hasquorum try: stop_leading() stop_following(channel) lh = StreamHandler(channel) msg = lh.get_next_msg() while msg: sz = struct.unpack('!Q', msg)[0] if sz != 0: rpc = '' while len(rpc) < sz: nrpc = channel.recv(sz - len(rpc)) if not nrpc: raise Exception('Truncated message error') rpc += nrpc rpc = cPickle.loads(rpc) if 'txcount' in rpc: _txcount = rpc['txcount'] if 'function' in rpc: try: globals()[rpc['function']](*rpc['args']) except Exception as e: print(repr(e)) if 'xid' in rpc and rpc['xid']: if rpc.get('exc', None): _pendingchangesets[rpc['xid']].send_exception(rpc['exc']) else: _pendingchangesets[rpc['xid']].send() if 'quorum' in rpc: _hasquorum = rpc['quorum'] _push_rpc(channel, b'') # use null as ACK msg = lh.get_next_msg() finally: # mark the connection as broken if cfgstreams: stop_following(None) else: stop_following(True) def add_collective_member(name, address, fingerprint): if cfgleader: return exec_on_leader('add_collective_member', name, address, fingerprint) if cfgstreams: exec_on_followers('_true_add_collective_member', name, address, fingerprint) _true_add_collective_member(name, address, fingerprint) _pending_collective_updates = {} def update_collective_address(name ,address): fprint = _cfgstore['collective'][name]['fingerprint'] oldaddress = _cfgstore['collective'][name]['address'] if oldaddress == address: return try: check_quorum() add_collective_member(name, address, fprint) except exc.DegradedCollective: _pending_collective_updates[name] = address def apply_pending_collective_updates(): for name in list(_pending_collective_updates): fprint = _cfgstore['collective'][name]['fingerprint'] address = _pending_collective_updates[name] add_collective_member(name, address, fprint) del _pending_collective_updates[name] def _true_add_collective_member(name, address, fingerprint, sync=True): try: name = name.encode('utf-8') except AttributeError: pass if _cfgstore is None: init(not sync) # use not sync to avoid read from disk if 'collective' not in _cfgstore: _cfgstore['collective'] = {} _cfgstore['collective'][name] = {'name': name, 'address': address, 'fingerprint': fingerprint} with _dirtylock: if 'collectivedirty' not in _cfgstore: _cfgstore['collectivedirty'] = set([]) _cfgstore['collectivedirty'].add(name) if sync: ConfigManager._bg_sync_to_file() def list_collective(): if _cfgstore is None: init() return iter(_cfgstore.get('collective', ())) def get_collective_member(name): if _cfgstore is None: init() return _cfgstore.get('collective', {}).get(name, None) def get_collective_member_by_address(address): if _cfgstore is None: init() for name in _cfgstore.get('collective', {}): currdrone = _cfgstore['collective'][name] if netutil.addresses_match(address, currdrone['address']): return currdrone def _mark_dirtykey(category, key, tenant=None): if type(key) in (str, unicode): key = key.encode('utf-8') with _dirtylock: if 'dirtykeys' not in _cfgstore: _cfgstore['dirtykeys'] = {} if tenant not in _cfgstore['dirtykeys']: _cfgstore['dirtykeys'][tenant] = {} if category not in _cfgstore['dirtykeys'][tenant]: _cfgstore['dirtykeys'][tenant][category] = set() _cfgstore['dirtykeys'][tenant][category].add(key) def _generate_new_id(): # generate a random id outside the usual ranges used for normal users in # /etc/passwd. Leave an equivalent amount of space near the end disused, # just in case uid = str(confluent.util.securerandomnumber(65537, 4294901759)) if 'idmap' not in _cfgstore['main']: return uid while uid in _cfgstore['main']['idmap']: uid = str(confluent.util.securerandomnumber(65537, 4294901759)) return uid class _ExpressionFormat(string.Formatter): # This class is used to extract the literal value from an expression # in the db # This is made easier by subclassing one of the 'fprintf' mechanisms # baked into python posmatch = re.compile('^n([0-9]*)$') nummatch = re.compile('[0-9]+') _supported_ops = { ast.Mult: operator.mul, ast.Div: operator.floordiv, ast.Mod: operator.mod, ast.Add: operator.add, ast.Sub: operator.sub, ast.LShift: operator.lshift, ast.RShift: operator.rshift, ast.BitAnd: operator.and_, ast.BitXor: operator.xor, ast.BitOr: operator.or_, } def __init__(self, nodeobj, nodename): self._nodeobj = nodeobj self._nodename = nodename self._numbers = None def get_field(self, field_name, args, kwargs): parsed = ast.parse(field_name) return self._handle_ast_node(parsed.body[0].value), field_name def _handle_ast_node(self, node): if isinstance(node, ast.Num): return node.n elif isinstance(node, ast.Attribute): #ok, we have something with a dot left = node key = '' while isinstance(left, ast.Attribute): # Loop through, to handle multi dot expressions # such as 'net.pxe.hwaddr' key = '.' + left.attr + key left = left.value key = left.id + key if (not key.startswith('custom.') and _get_valid_attrname(key) not in allattributes.node): raise ValueError( '{0} is not a valid attribute name'.format(key)) val = self._expand_attribute(key) return val['value'] if val and 'value' in val else "" elif isinstance(node, ast.Name): var = node.id if var in ('node', 'nodename'): return self._nodename if var in _attraliases: val = self._expand_attribute(_attraliases[var]) return val['value'] if 'value' in val else "" mg = re.match(self.posmatch, var) if mg: idx = int(mg.group(1)) if self._numbers is None: self._numbers = re.findall(self.nummatch, self._nodename) return int(self._numbers[idx - 1]) else: if var in self._nodeobj: val = self._expand_attribute(var) return val['value'] if val and 'value' in val else "" elif (not var.startswith('custom.') and _get_valid_attrname(var) not in allattributes.node): raise ValueError( '{0} is not a valid attribute name'.format(var)) elif isinstance(node, ast.BinOp): optype = type(node.op) if optype not in self._supported_ops: raise Exception("Unsupported operation") op = self._supported_ops[optype] return op(int(self._handle_ast_node(node.left)), int(self._handle_ast_node(node.right))) def _expand_attribute(self, key): if '_expressionkeys' not in self._nodeobj: self._nodeobj['_expressionkeys'] = set([key]) else: self._nodeobj['_expressionkeys'].add(key) val = _decode_attribute(key, self._nodeobj, formatter=self) return val def _decode_attribute(attribute, nodeobj, formatter=None, decrypt=False): if attribute not in nodeobj: return None # if we have an expression and a formatter, that overrides 'value' # which may be out of date # get methods will skip the formatter allowing value to come on through # set methods induce recalculation as appropriate to get a cached value if 'expression' in nodeobj[attribute] and formatter is not None: retdict = copy.deepcopy(nodeobj[attribute]) if 'value' in retdict: del retdict['value'] try: retdict['value'] = formatter.format(retdict['expression']) except Exception as e: retdict['broken'] = str(e) return retdict elif 'value' in nodeobj[attribute]: return nodeobj[attribute] elif 'cryptvalue' in nodeobj[attribute] and decrypt: retdict = copy.deepcopy(nodeobj[attribute]) retdict['value'] = decrypt_value(nodeobj[attribute]['cryptvalue']) return retdict return nodeobj[attribute] # my thinking at this point is that noderange and configdata objects # will be constructed and passed as part of a context object to plugins # reasoning being that the main program will handle establishing the # tenant context and then modules need not consider the current tenant # most of the time as things are automatic def _addchange(changeset, node, attrname): if node not in changeset: changeset[node] = {attrname: 1} else: changeset[node][attrname] = 1 def hook_new_configmanagers(callback): """Register callback for new tenants From the point when this function is called until the end, callback may be invoked to indicate a new tenant and callback is notified to perform whatever tasks appropriate for a new tenant :param callback: Function to call for each possible config manager :returns: identifier that can be used to cancel this registration """ #TODO(jbjohnso): actually live up to the promise of ongoing callbacks callback(ConfigManager(None)) try: for tenant in _cfgstore['tenant']: callback(ConfigManager(tenant)) except KeyError: pass class ConfigManager(object): if os.name == 'nt': _cfgdir = os.path.join( os.getenv('SystemDrive'), '\\ProgramData', 'confluent', 'cfg') else: _cfgdir = "/etc/confluent/cfg" _cfgwriter = None _writepending = False _syncrunning = False _syncstate = threading.RLock() _attribwatchers = {} _nodecollwatchers = {} _notifierids = {} @property def _cfgstore(self): if self.tenant is None: return _cfgstore['main'] return _cfgstore['tenant'][self.tenant] def __init__(self, tenant, decrypt=False, username=None): global _cfgstore with _initlock: if _cfgstore is None: init() self.decrypt = decrypt self.current_user = username if tenant is None: self.tenant = None if 'main' not in _cfgstore: _cfgstore['main'] = {} self._bg_sync_to_file() if 'nodegroups' not in self._cfgstore: # This can happen during a clear... it seams... and if so it messes up... self._cfgstore['nodegroups'] = {'everything': {'nodes': set()}} _mark_dirtykey('nodegroups', 'everything', self.tenant) self._bg_sync_to_file() if 'nodes' not in self._cfgstore: self._cfgstore['nodes'] = {} self._bg_sync_to_file() return elif 'tenant' not in _cfgstore: _cfgstore['tenant'] = {tenant: {}} self._bg_sync_to_file() elif tenant not in _cfgstore['tenant']: _cfgstore['tenant'][tenant] = {} self._bg_sync_to_file() self.tenant = tenant if 'nodegroups' not in self._cfgstore: self._cfgstore['nodegroups'] = {'everything': {}} _mark_dirtykey('nodegroups', 'everything', self.tenant) if 'nodes' not in self._cfgstore: self._cfgstore['nodes'] = {} self._bg_sync_to_file() self.wait_for_sync() def get_collective_member(self, name): return get_collective_member(name) @classmethod def check_quorum(cls): return check_quorum() def filter_node_attributes(self, expression, nodes=None): """Filtered nodelist according to expression expression may be: attribute.name=value attribute.name==value attribute.name=~value attribute.name!=value attribute.name!~value == and != do strict equality. The ~ operators do a regular expression. ! negates the sense of the match :param expression: The expression containing the criteria to match :param nodes: Optional iterable set of nodes to limit the check """ exmatch = None yieldmatches = True if nodes is None: nodes = self._cfgstore['nodes'] if '==' in expression: attribute, match = expression.split('==') elif '!=' in expression: attribute, match = expression.split('!=') yieldmatches = False elif '=~' in expression: attribute, match = expression.split('=~') exmatch = re.compile(match) elif '!~' in expression: attribute, match = expression.split('!~') exmatch = re.compile(match) yieldmatches = False elif '=' in expression: attribute, match = expression.split('=') else: raise Exception('Invalid Expression') for node in nodes: try: currval = self._cfgstore['nodes'][node][attribute]['value'] except KeyError: # Let's treat 'not set' as being an empty string for this path currval = '' if exmatch: if yieldmatches: if exmatch.search(currval): yield node else: if not exmatch.search(currval): yield node else: if yieldmatches: if match == currval: yield node else: if match != currval: yield node def filter_nodenames(self, expression, nodes=None): """Filter nodenames by regular expression :param expression: Regular expression for matching nodenames :param nodes: Optional iterable of candidates """ if nodes is None: nodes = self._cfgstore['nodes'] expression = re.compile(expression) for node in nodes: if expression.search(node): yield node def watch_attributes(self, nodes, attributes, callback): """ Watch a list of attributes for changes on a list of nodes. The attributes may be literal, or a filename style wildcard like 'net*.switch' :param nodes: An iterable of node names to be watching :param attributes: An iterable of attribute names to be notified about :param callback: A callback to process a notification Returns an identifier that can be used to unsubscribe from these notifications using remove_watcher """ notifierid = random.randint(0, sys.maxint) while notifierid in self._notifierids: notifierid = random.randint(0, sys.maxint) self._notifierids[notifierid] = {'attriblist': []} if self.tenant not in self._attribwatchers: self._attribwatchers[self.tenant] = {} attribwatchers = self._attribwatchers[self.tenant] for node in nodes: if node not in attribwatchers: attribwatchers[node] = {} for attribute in attributes: self._notifierids[notifierid]['attriblist'].append( (node, attribute)) if attribute not in attribwatchers[node]: attribwatchers[node][attribute] = { notifierid: callback } else: attribwatchers[node][attribute][notifierid] = callback if '*' in attribute: currglobs = attribwatchers[node].get('_attrglobs', set([])) currglobs.add(attribute) attribwatchers[node]['_attrglobs'] = currglobs return notifierid def watch_nodecollection(self, callback): """ Watch the nodecollection for addition or removal of nodes. A watcher is notified prior after node has been added and before node is actually removed. :param callback: Function to call when a node is added or removed Returns an identifier that can be used to unsubscribe from these notifications using remove_watcher """ # first provide an identifier for the calling code to # use in case of cancellation. # I anticipate no more than a handful of watchers of this sort, so # this loop should not have to iterate too many times notifierid = random.randint(0, sys.maxint) while notifierid in self._notifierids: notifierid = random.randint(0, sys.maxint) # going to track that this is a nodecollection type watcher, # but there is no additional data associated. self._notifierids[notifierid] = set(['nodecollection']) if self.tenant not in self._nodecollwatchers: self._nodecollwatchers[self.tenant] = {} self._nodecollwatchers[self.tenant][notifierid] = callback return notifierid def remove_watcher(self, watcher): # identifier of int would be a collection watcher if watcher not in self._notifierids: raise Exception("Invalid") # return if 'attriblist' in self._notifierids[watcher]: attribwatchers = self._attribwatchers[self.tenant] for nodeattrib in self._notifierids[watcher]['attriblist']: node, attrib = nodeattrib del attribwatchers[node][attrib][watcher] elif 'nodecollection' in self._notifierids[watcher]: del self._nodecollwatchers[self.tenant][watcher] else: raise Exception("Completely not a valid place to be") del self._notifierids[watcher] def list_users(self): try: return list(self._cfgstore['users']) except KeyError: return [] def get_user(self, name): """Get user information from DB :param name: Name of the user Returns a dictionary describing parameters of a user. These parameters may include numeric id (id), certificate thumbprint (certthumb), password hash (passhash, which currently is going to be PBKDF2 derived) full name (displayname), ... """ try: return copy.deepcopy(self._cfgstore['users'][name]) except KeyError: return None def get_usergroup(self, groupname): """Get user group information from DB :param groupname: Name of the group Returns a dictionary describing parameters of a user group. This may include the role for users in the group to receive if no more specific information is found. """ try: return copy.deepcopy(self._cfgstore['usergroups'][groupname]) except KeyError: return None def set_usergroup(self, groupname, attributemap): """Set usergroup attribute(s) :param groupname: the name of teh group to modify :param attributemap: The mapping of keys to values to set """ for attribute in attributemap: self._cfgstore['usergroups'][attribute] = attributemap[attribute] _mark_dirtykey('usergroups', groupname, self.tenant) def create_usergroup(self, groupname, role="Administrator"): if 'usergroups' not in self._cfgstore: self._cfgstore['usergroups'] = {} groupname = groupname.encode('utf-8') if groupname in self._cfgstore['usergroups']: raise Exception("Duplicate groupname requested") self._cfgstore['usergroups'][groupname] = {'role': role} _mark_dirtykey('usergroups', groupname, self.tenant) def set_user(self, name, attributemap): """Set user attribute(s) :param name: The login name of the user :param attributemap: A dict of key values to set """ if cfgleader: return exec_on_leader('_rpc_master_set_user', self.tenant, name, attributemap) if cfgstreams: exec_on_followers('_rpc_set_user', self.tenant, name) self._true_set_user(name, attributemap) def _true_set_user(self, name, attributemap): user = self._cfgstore['users'][name] for attribute in attributemap: if attribute == 'password': salt = os.urandom(8) #TODO: WORKERPOOL, offload password set to a worker crypted = KDF.PBKDF2( attributemap[attribute], salt, 32, 10000, lambda p, s: HMAC.new(p, s, SHA256).digest() ) user['cryptpass'] = (salt, crypted) else: user[attribute] = attributemap[attribute] _mark_dirtykey('users', name, self.tenant) self._bg_sync_to_file() def del_user(self, name): if cfgleader: return exec_on_leader('_rpc_master_del_user', self.tenant, name) if cfgstreams: exec_on_followers('_rpc_del_user', self.tenant, name) self._true_del_user(name) def _true_del_user(self, name): if name in self._cfgstore['users']: del self._cfgstore['users'][name] _mark_dirtykey('users', name, self.tenant) self._bg_sync_to_file() def create_user(self, name, role="Administrator", uid=None, displayname=None, attributemap=None): """Create a new user :param name: The login name of the user :param role: The role the user should be considered. Can be "Administrator" or "Technician", defaults to "Administrator" :param uid: Custom identifier number if desired. Defaults to random. :param displayname: Optional long format name for UI consumption """ if cfgleader: return exec_on_leader('_rpc_master_create_user', self.tenant, name, role, uid, displayname, attributemap) if cfgstreams: exec_on_followers('_rpc_create_user', self.tenant, name, role, uid, displayname, attributemap) self._true_create_user(name, role, uid, displayname, attributemap) def _true_create_user(self, name, role="Administrator", uid=None, displayname=None, attributemap=None): if 'idmap' not in _cfgstore['main']: _cfgstore['main']['idmap'] = {} if uid is None: uid = _generate_new_id() else: if uid in _cfgstore['main']['idmap']: raise Exception("Duplicate id requested") if 'users' not in self._cfgstore: self._cfgstore['users'] = {} name = name.encode('utf-8') if name in self._cfgstore['users']: raise Exception("Duplicate username requested") self._cfgstore['users'][name] = {'id': uid} if displayname is not None: self._cfgstore['users'][name]['displayname'] = displayname _cfgstore['main']['idmap'][uid] = { 'tenant': self.tenant, 'username': name } if attributemap is not None: self._true_set_user(name, attributemap) _mark_dirtykey('users', name, self.tenant) _mark_dirtykey('idmap', uid) self._bg_sync_to_file() def is_node(self, node): return node in self._cfgstore['nodes'] def is_nodegroup(self, nodegroup): return nodegroup in self._cfgstore['nodegroups'] def get_groups(self, sizesort=False): if sizesort: return reversed( sorted(self._cfgstore['nodegroups'], key=lambda x: len( self._cfgstore['nodegroups'][x]['nodes']))) return iter(self._cfgstore['nodegroups']) def list_nodes(self): try: return iter(self._cfgstore['nodes']) except KeyError: return [] def get_nodegroup_attributes(self, nodegroup, attributes=()): cfgnodeobj = self._cfgstore['nodegroups'][nodegroup] if not attributes: attributes = cfgnodeobj nodeobj = {} for attribute in attributes: if attribute.startswith('_'): continue if attribute not in cfgnodeobj: continue nodeobj[attribute] = _decode_attribute(attribute, cfgnodeobj, decrypt=self.decrypt) return nodeobj def expand_attrib_expression(self, nodelist, expression): if type(nodelist) in (unicode, str): nodelist = (nodelist,) for node in nodelist: cfgobj = self._cfgstore['nodes'][node] fmt = _ExpressionFormat(cfgobj, node) yield (node, fmt.format(expression)) def get_node_attributes(self, nodelist, attributes=(), decrypt=None): if decrypt is None: decrypt = self.decrypt retdict = {} if isinstance(nodelist, str) or isinstance(nodelist, unicode): nodelist = [nodelist] if isinstance(attributes, str) or isinstance(attributes, unicode): attributes = [attributes] relattribs = attributes for node in nodelist: if node not in self._cfgstore['nodes']: continue cfgnodeobj = self._cfgstore['nodes'][node] nodeobj = {} if len(attributes) == 0: relattribs = cfgnodeobj for attribute in relattribs: if attribute.startswith('_'): # skip private things continue if '*' in attribute: for attr in fnmatch.filter(list(cfgnodeobj), attribute): nodeobj[attr] = _decode_attribute(attr, cfgnodeobj, decrypt=decrypt) if attribute not in cfgnodeobj: continue # since the formatter is not passed in, the calculator is # skipped. The decryption, however, we want to do only on # demand nodeobj[attribute] = _decode_attribute(attribute, cfgnodeobj, decrypt=decrypt) retdict[node] = nodeobj return retdict def _node_added_to_group(self, node, group, changeset): try: nodecfg = self._cfgstore['nodes'][node] groupcfg = self._cfgstore['nodegroups'][group] except KeyError: # something did not exist, nothing to do return for attrib in groupcfg: self._do_inheritance(nodecfg, attrib, node, changeset) _addchange(changeset, node, attrib) def _node_removed_from_group(self, node, group, changeset): try: nodecfg = self._cfgstore['nodes'][node] except KeyError: # node did not exist, nothing to do return for attrib in nodecfg.keys(): if attrib.startswith("_"): continue if attrib == 'groups': continue try: if nodecfg[attrib]['inheritedfrom'] == group: del nodecfg[attrib] # remove invalid inherited data self._do_inheritance(nodecfg, attrib, node, changeset) _addchange(changeset, node, attrib) _mark_dirtykey('nodes', node, self.tenant) except KeyError: # inheritedfrom not set, move on pass def _do_inheritance(self, nodecfg, attrib, nodename, changeset, srcgroup=None): # for now, just do single inheritance # TODO: concatenating inheritance if requested if attrib in ('nodes', 'groups'): #not attributes that should be considered here return if attrib in nodecfg and 'inheritedfrom' not in nodecfg[attrib]: return # already has a non-inherited value set, nothing to do # if the attribute is not set, this will search for a candidate # if it is set, but inheritedfrom, search for a replacement, just # in case if not 'groups' in nodecfg: return for group in nodecfg['groups']: if attrib in self._cfgstore['nodegroups'][group]: if srcgroup is not None and group != srcgroup: # skip needless deepcopy return nodecfg[attrib] = \ copy.deepcopy(self._cfgstore['nodegroups'][group][attrib]) nodecfg[attrib]['inheritedfrom'] = group self._refresh_nodecfg(nodecfg, attrib, nodename, changeset=changeset) _mark_dirtykey('nodes', nodename, self.tenant) return if srcgroup is not None and group == srcgroup: # break out return def _sync_groups_to_node(self, groups, node, changeset): for group in self._cfgstore['nodegroups']: if group not in groups: if node in self._cfgstore['nodegroups'][group]['nodes']: self._cfgstore['nodegroups'][group]['nodes'].discard(node) self._node_removed_from_group(node, group, changeset) _mark_dirtykey('nodegroups', group, self.tenant) for group in groups: if group not in self._cfgstore['nodegroups']: self._cfgstore['nodegroups'][group] = {'nodes': set([node])} _mark_dirtykey('nodegroups', group, self.tenant) elif node not in self._cfgstore['nodegroups'][group]['nodes']: self._cfgstore['nodegroups'][group]['nodes'].add(node) _mark_dirtykey('nodegroups', group, self.tenant) # node was not already in given group, perform inheritence fixup self._node_added_to_group(node, group, changeset) def _sync_nodes_to_group(self, nodes, group, changeset): for node in self._cfgstore['nodes']: if node not in nodes and 'groups' in self._cfgstore['nodes'][node]: if group in self._cfgstore['nodes'][node]['groups']: self._cfgstore['nodes'][node]['groups'].remove(group) self._node_removed_from_group(node, group, changeset) for node in nodes: if node not in self._cfgstore['nodes']: self._cfgstore['nodes'][node] = {'groups': [group]} _mark_dirtykey('nodes', node, self.tenant) elif group not in self._cfgstore['nodes'][node]['groups']: self._cfgstore['nodes'][node]['groups'].insert(0, group) _mark_dirtykey('nodes', node, self.tenant) else: continue # next node, this node already in self._node_added_to_group(node, group, changeset) def add_group_attributes(self, attribmap): self.set_group_attributes(attribmap, autocreate=True) def set_group_attributes(self, attribmap, autocreate=False): if cfgleader: # currently config slave to another return exec_on_leader('_rpc_master_set_group_attributes', self.tenant, attribmap, autocreate) if cfgstreams: exec_on_followers('_rpc_set_group_attributes', self.tenant, attribmap, autocreate) self._true_set_group_attributes(attribmap, autocreate) def _true_set_group_attributes(self, attribmap, autocreate=False): changeset = {} for group in attribmap: if group == '': raise ValueError('"{0}" is not a valid group name'.format( group)) if autocreate: try: noderange._parser.parseString( '({0})'.format(group)).asList() except noderange.pp.ParseException as pe: raise ValueError('"{0}" is not a valid group name'.format( group)) if not autocreate and group not in self._cfgstore['nodegroups']: raise ValueError("{0} group does not exist".format(group)) for attr in attribmap[group]: # first do a pass to normalize out any aliased attribute names if attr in _attraliases: newattr = _attraliases[attr] attribmap[group][newattr] = attribmap[group][attr] del attribmap[group][attr] if 'noderange' in attribmap[group]: if len(attribmap[group]) > 1: raise ValueErorr('noderange attribute must be set by itself') for attr in attribmap[group]: if attr in _attraliases: newattr = _attraliases[attr] attribmap[group][newattr] = attribmap[group][attr] del attribmap[group][attr] if attr not in ('nodes', 'noderange'): attrval = fixup_attribute(attr, attribmap[group][attr]) if attribute_is_invalid(attr, attrval): errstr = "{0} attribute is invalid".format(attr) raise ValueError(errstr) attribmap[group][attr] = attrval if attr == 'nodes': if isinstance(attribmap[group][attr], dict): currnodes = list(self.get_nodegroup_attributes( group, ['nodes']).get('nodes', [])) if attribmap[group][attr].get('prepend', False): newnodes = noderange.NodeRange(attribmap[group][attr][ 'prepend'], config=self).nodes attribmap[group][attr] = list( newnodes) + currnodes elif attribmap[group][attr].get('remove', False): delnodes = noderange.NodeRange( attribmap[group][attr]['remove'], config=self).nodes attribmap[group][attr] = [ x for x in currnodes if x not in delnodes] if not isinstance(attribmap[group][attr], list): if type(attribmap[group][attr]) is unicode or type(attribmap[group][attr]) is str: attribmap[group][attr] = noderange.NodeRange( attribmap[group][attr], config=self).nodes else: raise ValueError("nodes attribute on group must be list") for node in attribmap[group]['nodes']: if node not in self._cfgstore['nodes']: raise ValueError( "{0} node does not exist to add to {1}".format( node, group)) for group in attribmap: group = group.encode('utf-8') if group not in self._cfgstore['nodegroups']: self._cfgstore['nodegroups'][group] = {'nodes': set()} cfgobj = self._cfgstore['nodegroups'][group] if 'noderange' in attribmap[group] and attribmap[group]['noderange']: if cfgobj['nodes']: raise ValueError('Cannot set both nodes and noderange on group') if set(cfgobj) - set(['noderange', 'nodes']): raise ValueError('Cannot set noderange on a group with attributes') elif 'noderange' in cfgobj and cfgobj['noderange']: raise ValueError('Attributes cannot be set on a group with a noderange') for attr in attribmap[group]: if attr == 'nodes': newdict = set(attribmap[group][attr]) elif (isinstance(attribmap[group][attr], str) or isinstance(attribmap[group][attr], unicode) or isinstance(attribmap[group][attr], bool)): newdict = {'value': attribmap[group][attr]} else: newdict = attribmap[group][attr] if 'value' in newdict and attr.startswith("secret."): newdict['cryptvalue'] = crypt_value(newdict['value']) del newdict['value'] cfgobj[attr] = newdict if attr == 'nodes': self._sync_nodes_to_group(group=group, nodes=attribmap[group]['nodes'], changeset=changeset) elif attr != 'noderange': # update inheritence for node in cfgobj['nodes']: nodecfg = self._cfgstore['nodes'][node] self._do_inheritance(nodecfg, attr, node, changeset, srcgroup=group) _addchange(changeset, node, attr) _mark_dirtykey('nodegroups', group, self.tenant) self._notif_attribwatchers(changeset) self._bg_sync_to_file() def clear_group_attributes(self, groups, attributes): if cfgleader: return exec_on_leader('_rpc_master_clear_group_attributes', self.tenant, groups, attributes) if cfgstreams: exec_on_followers('_rpc_clear_group_attributes', self.tenant, groups, attributes) self._true_clear_group_attributes(groups, attributes) def _true_clear_group_attributes(self, groups, attributes): changeset = {} realattributes = [] for attrname in list(attributes): if attrname in _attraliases: realattributes.append(_attraliases[attrname]) else: realattributes.append(attrname) attributes = realattributes if type(groups) in (str, unicode): groups = (groups,) for group in groups: group = group.encode('utf-8') try: groupentry = self._cfgstore['nodegroups'][group] except KeyError: continue for attrib in attributes: if attrib == 'nodes': groupentry['nodes'] = set() self._sync_nodes_to_group( group=group, nodes=(), changeset=changeset) else: try: del groupentry[attrib] except KeyError: pass for node in groupentry['nodes']: nodecfg = self._cfgstore['nodes'][node] try: delnodeattrib = ( nodecfg[attrib]['inheritedfrom'] == group) except KeyError: delnodeattrib = False if delnodeattrib: del nodecfg[attrib] self._do_inheritance(nodecfg, attrib, node, changeset) _addchange(changeset, node, attrib) _mark_dirtykey('nodes', node, self.tenant) _mark_dirtykey('nodegroups', group, self.tenant) self._notif_attribwatchers(changeset) self._bg_sync_to_file() def _refresh_nodecfg(self, cfgobj, attrname, node, changeset): exprmgr = None if 'expression' in cfgobj[attrname]: # evaluate now if exprmgr is None: exprmgr = _ExpressionFormat(cfgobj, node) cfgobj[attrname] = _decode_attribute(attrname, cfgobj, formatter=exprmgr) if ('_expressionkeys' in cfgobj and attrname in cfgobj['_expressionkeys']): if exprmgr is None: exprmgr = _ExpressionFormat(cfgobj, node) self._recalculate_expressions(cfgobj, formatter=exprmgr, node=node, changeset=changeset) def _notif_attribwatchers(self, nodeattrs): if self.tenant not in self._attribwatchers: return notifdata = {} attribwatchers = self._attribwatchers[self.tenant] for node in nodeattrs: if node not in attribwatchers: continue attribwatcher = attribwatchers[node] # usually, we will only look at the specific attribute keys that # have had change flagged, so set up to iterate through only those checkattrs = nodeattrs[node] if '_nodedeleted' in nodeattrs[node]: # in the case of a deleted node, we want to iterate through # *all* attributes that the node might have had set prior # to deletion, to make all watchers aware of the removed # node and take appropriate action checkattrs = attribwatcher globattrs = {} for attrglob in attribwatcher.get('_attrglobs', []): for matched in fnmatch.filter(list(checkattrs), attrglob): globattrs[matched] = attrglob for attrname in checkattrs: if attrname == '_attrglobs': continue watchkey = attrname # the attrib watcher could still have a glob if attrname not in attribwatcher: if attrname in globattrs: watchkey = globattrs[attrname] else: continue for notifierid in attribwatcher[watchkey]: if notifierid in notifdata: if node in notifdata[notifierid]['nodeattrs']: notifdata[notifierid]['nodeattrs'][node].append( attrname) else: notifdata[notifierid]['nodeattrs'][node] = [ attrname] else: notifdata[notifierid] = { 'nodeattrs': {node: [attrname]}, 'callback': attribwatcher[watchkey][notifierid] } for watcher in notifdata.itervalues(): callback = watcher['callback'] eventlet.spawn_n(_do_notifier, self, watcher, callback) def del_nodes(self, nodes): if cfgleader: # slaved to a collective return exec_on_leader('_rpc_master_del_nodes', self.tenant, nodes) if cfgstreams: exec_on_followers('_rpc_del_nodes', self.tenant, nodes) self._true_del_nodes(nodes) def _true_del_nodes(self, nodes): if self.tenant in self._nodecollwatchers: for watcher in self._nodecollwatchers[self.tenant].itervalues(): watcher(added=[], deleting=nodes, configmanager=self) changeset = {} for node in nodes: # set a reserved attribute for the sake of the change notification # framework to trigger on changeset[node] = {'_nodedeleted': 1} node = node.encode('utf-8') if node in self._cfgstore['nodes']: self._sync_groups_to_node(node=node, groups=[], changeset=changeset) del self._cfgstore['nodes'][node] _mark_dirtykey('nodes', node, self.tenant) self._notif_attribwatchers(changeset) self._bg_sync_to_file() def del_groups(self, groups): if cfgleader: return exec_on_leader('_rpc_master_del_groups', self.tenant, groups) if cfgstreams: exec_on_followers('_rpc_del_groups', self.tenant, groups) self._true_del_groups(groups) def _true_del_groups(self, groups): changeset = {} for group in groups: if group in self._cfgstore['nodegroups']: self._sync_nodes_to_group(group=group, nodes=[], changeset=changeset) del self._cfgstore['nodegroups'][group] _mark_dirtykey('nodegroups', group, self.tenant) self._notif_attribwatchers(changeset) self._bg_sync_to_file() def clear_node_attributes(self, nodes, attributes): if cfgleader: return exec_on_leader('_rpc_master_clear_node_attributes', self.tenant, nodes, attributes) if cfgstreams: exec_on_followers('_rpc_clear_node_attributes', self.tenant, nodes, attributes) self._true_clear_node_attributes(nodes, attributes) def _true_clear_node_attributes(self, nodes, attributes): # accumulate all changes into a changeset and push in one go changeset = {} realattributes = [] for attrname in list(attributes): if attrname in _attraliases: realattributes.append(_attraliases[attrname]) else: realattributes.append(attrname) attributes = realattributes for node in nodes: node = node.encode('utf-8') try: nodek = self._cfgstore['nodes'][node] except KeyError: continue recalcexpressions = False for attrib in attributes: if attrib in nodek and 'inheritedfrom' not in nodek[attrib]: # if the attribute is set and not inherited, # delete it and check for inheritence to backfil data del nodek[attrib] self._do_inheritance(nodek, attrib, node, changeset) _addchange(changeset, node, attrib) _mark_dirtykey('nodes', node, self.tenant) if ('_expressionkeys' in nodek and attrib in nodek['_expressionkeys']): recalcexpressions = True if recalcexpressions: exprmgr = _ExpressionFormat(nodek, node) self._recalculate_expressions(nodek, formatter=exprmgr, node=node, changeset=changeset) self._notif_attribwatchers(changeset) self._bg_sync_to_file() def add_node_attributes(self, attribmap): for node in attribmap: if 'groups' not in attribmap[node]: attribmap[node]['groups'] = [] self.set_node_attributes(attribmap, autocreate=True) def set_node_attributes(self, attribmap, autocreate=False): if cfgleader: # currently config slave to another return exec_on_leader('_rpc_master_set_node_attributes', self.tenant, attribmap, autocreate) if cfgstreams: exec_on_followers('_rpc_set_node_attributes', self.tenant, attribmap, autocreate) self._true_set_node_attributes(attribmap, autocreate) def _true_set_node_attributes(self, attribmap, autocreate): # TODO(jbjohnso): multi mgr support, here if we have peers, # pickle the arguments and fire them off in eventlet # flows to peers, all should have the same result newnodes = [] changeset = {} # first do a sanity check of the input upfront # this mitigates risk of arguments being partially applied for node in attribmap: node = node.encode('utf-8') if node == '': raise ValueError('"{0}" is not a valid node name'.format(node)) if autocreate: try: noderange._parser.parseString( '({0})'.format(node)).asList() except noderange.pp.ParseException as pe: raise ValueError( '"{0}" is not a valid node name'.format(node)) if autocreate is False and node not in self._cfgstore['nodes']: raise ValueError("node {0} does not exist".format(node)) for attrname in list(attribmap[node]): if attrname in _attraliases: truename = _attraliases[attrname] attribmap[node][truename] = attribmap[node][attrname] del attribmap[node][attrname] for attrname in attribmap[node]: attrval = attribmap[node][attrname] try: if (allattributes.node[attrname]['type'] == 'list' and type(attrval) in (str, unicode)): attrval = attrval.split(",") except KeyError: pass if attrname == 'groups': if isinstance(attribmap[node]['groups'], dict): currgroups = self.get_node_attributes( node, 'groups').get(node, {}).get('groups', []) if attribmap[node]['groups'].get('prepend', False): newgroups = attribmap[node]['groups'][ 'prepend'].split(',') attribmap[node]['groups'] = newgroups + currgroups elif attribmap[node]['groups'].get('remove', False): delgroups = attribmap[node]['groups'][ 'remove'].split(',') newgroups = [ x for x in currgroups if x not in delgroups] attribmap[node]['groups'] = newgroups elif type(attribmap[node]['groups']) != list: attribmap[node]['groups']=attribmap[node]['groups'].split(",") for group in attribmap[node]['groups']: if group not in self._cfgstore['nodegroups']: raise ValueError( "group {0} does not exist".format(group)) if ('everything' in self._cfgstore['nodegroups'] and 'everything' not in attribmap[node]['groups']): attribmap[node]['groups'].append('everything') else: attrval = fixup_attribute(attrname, attrval) if attribute_is_invalid(attrname, attrval): errstr = "{0} attribute on node {1} is invalid".format( attrname, node) raise ValueError(errstr) attribmap[node][attrname] = attrval for node in attribmap: node = node.encode('utf-8') exprmgr = None if node not in self._cfgstore['nodes']: newnodes.append(node) self._cfgstore['nodes'][node] = {} cfgobj = self._cfgstore['nodes'][node] recalcexpressions = False for attrname in attribmap[node]: if (isinstance(attribmap[node][attrname], str) or isinstance(attribmap[node][attrname], unicode) or isinstance(attribmap[node][attrname], bool)): newdict = {'value': attribmap[node][attrname]} else: newdict = attribmap[node][attrname] if 'value' in newdict and attrname.startswith("secret."): newdict['cryptvalue'] = crypt_value(newdict['value']) del newdict['value'] cfgobj[attrname] = newdict if attrname == 'groups': self._sync_groups_to_node(node=node, groups=attribmap[node]['groups'], changeset=changeset) if ('_expressionkeys' in cfgobj and attrname in cfgobj['_expressionkeys']): recalcexpressions = True if 'expression' in cfgobj[attrname]: # evaluate now if exprmgr is None: exprmgr = _ExpressionFormat(cfgobj, node) cfgobj[attrname] = _decode_attribute(attrname, cfgobj, formatter=exprmgr) # if any code is watching these attributes, notify # them of the change _addchange(changeset, node, attrname) _mark_dirtykey('nodes', node, self.tenant) if recalcexpressions: if exprmgr is None: exprmgr = _ExpressionFormat(cfgobj, node) self._recalculate_expressions(cfgobj, formatter=exprmgr, node=node, changeset=changeset) self._notif_attribwatchers(changeset) if newnodes: if self.tenant in self._nodecollwatchers: nodecollwatchers = self._nodecollwatchers[self.tenant] for watcher in nodecollwatchers.itervalues(): eventlet.spawn_n(_do_add_watcher, watcher, newnodes, self) self._bg_sync_to_file() #TODO: wait for synchronization to suceed/fail??) def _load_from_json(self, jsondata, sync=True): """Load fresh configuration data from jsondata :param jsondata: String of jsondata :return: """ dumpdata = json.loads(jsondata) tmpconfig = {} for confarea in _config_areas: if confarea not in dumpdata: continue tmpconfig[confarea] = {} for element in dumpdata[confarea]: newelement = copy.deepcopy(dumpdata[confarea][element]) try: noderange._parser.parseString( '({0})'.format(element)).asList() except noderange.pp.ParseException as pe: raise ValueError( '"{0}" is not a supported name, it must be renamed or ' 'removed from backup to restore'.format(element)) for attribute in dumpdata[confarea][element]: if newelement[attribute] == '*REDACTED*': raise Exception( "Unable to restore from redacted backup") elif attribute == 'cryptpass': passparts = newelement[attribute].split('!') newelement[attribute] = tuple([base64.b64decode(x) for x in passparts]) elif 'cryptvalue' in newelement[attribute]: bincrypt = newelement[attribute]['cryptvalue'] bincrypt = tuple([base64.b64decode(x) for x in bincrypt.split('!')]) newelement[attribute]['cryptvalue'] = bincrypt elif attribute in ('nodes', '_expressionkeys'): # A group with nodes # delete it and defer until nodes are being added # which will implicitly fill this up # Or _expressionkeys attribute, which will similarly # be rebuilt del newelement[attribute] tmpconfig[confarea][element] = newelement # We made it through above section without an exception, go ahead and # replace # Start by erasing the dbm files if present for confarea in _config_areas: try: os.unlink(os.path.join(self._cfgdir, confarea)) except OSError as e: if e.errno == 2: pass # Now we have to iterate through each fixed up element, using the # set attribute to flesh out inheritence and expressions _cfgstore['main']['idmap'] = {} for confarea in _config_areas: self._cfgstore[confarea] = {} if confarea not in tmpconfig: continue if confarea == 'nodes': self.set_node_attributes(tmpconfig[confarea], True) elif confarea == 'nodegroups': self.set_group_attributes(tmpconfig[confarea], True) elif confarea == 'users': for user in tmpconfig[confarea]: uid = tmpconfig[confarea].get('id', None) displayname = tmpconfig[confarea].get('displayname', None) self.create_user(user, uid=uid, displayname=displayname) if 'cryptpass' in tmpconfig[confarea][user]: self._cfgstore['users'][user]['cryptpass'] = \ tmpconfig[confarea][user]['cryptpass'] _mark_dirtykey('users', user, self.tenant) if sync: self._bg_sync_to_file() def _dump_to_json(self, redact=None): """Dump the configuration in json form to output password is used to protect the 'secret' attributes in liue of the actual in-configuration master key (which will have no clear form in the dump :param redact: If True, then sensitive password data will be redacted. Other values may be used one day to redact in more complex and interesting ways for non-secret data. """ dumpdata = {} for confarea in _config_areas: if confarea not in self._cfgstore: continue dumpdata[confarea] = {} for element in self._cfgstore[confarea]: dumpdata[confarea][element] = \ copy.deepcopy(self._cfgstore[confarea][element]) for attribute in self._cfgstore[confarea][element]: if 'inheritedfrom' in dumpdata[confarea][element][attribute]: del dumpdata[confarea][element][attribute] elif (attribute == 'cryptpass' or 'cryptvalue' in dumpdata[confarea][element][attribute]): if redact is not None: dumpdata[confarea][element][attribute] = '*REDACTED*' else: if attribute == 'cryptpass': target = dumpdata[confarea][element][attribute] else: target = dumpdata[confarea][element][attribute]['cryptvalue'] cryptval = [] for value in target: cryptval.append(base64.b64encode(value)) if attribute == 'cryptpass': dumpdata[confarea][element][attribute] = '!'.join(cryptval) else: dumpdata[confarea][element][attribute]['cryptvalue'] = '!'.join(cryptval) elif isinstance(dumpdata[confarea][element][attribute], set): dumpdata[confarea][element][attribute] = \ list(dumpdata[confarea][element][attribute]) return json.dumps( dumpdata, sort_keys=True, indent=4, separators=(',', ': ')) @classmethod def _read_from_path(cls): global _cfgstore global _txcount _cfgstore = {} rootpath = cls._cfgdir try: with open(os.path.join(rootpath, 'transactioncount'), 'r') as f: txbytes = f.read() if len(txbytes) == 8: _txcount = struct.unpack('!Q', txbytes)[0] except IOError: pass _load_dict_from_dbm(['collective'], os.path.join(rootpath, "collective")) _load_dict_from_dbm(['globals'], os.path.join(rootpath, "globals")) for confarea in _config_areas: _load_dict_from_dbm(['main', confarea], os.path.join(rootpath, confarea)) try: for tenant in os.listdir(os.path.join(rootpath, 'tenants')): for confarea in _config_areas: _load_dict_from_dbm( ['main', tenant, confarea], os.path.join(rootpath, tenant, confarea)) except OSError: pass @classmethod def wait_for_sync(cls, fullsync=False): if cls._cfgwriter is not None: cls._cfgwriter.join() cls._bg_sync_to_file(fullsync) if cls._cfgwriter is not None: cls._cfgwriter.join() @classmethod def shutdown(cls): cls.wait_for_sync() sys.exit(0) @classmethod def _bg_sync_to_file(cls, fullsync=False): if statelessmode: return with cls._syncstate: if (cls._syncrunning and cls._cfgwriter is not None and cls._cfgwriter.isAlive()): cls._writepending = True return if cls._syncrunning: # This suggests an unclean write attempt, # do a fullsync as a recovery fullsync = True cls._syncrunning = True # if the thread is exiting, join it to let it close, just in case if cls._cfgwriter is not None: cls._cfgwriter.join() cls._cfgwriter = threading.Thread(target=cls._sync_to_file, args=(fullsync,)) cls._cfgwriter.start() @classmethod def _sync_to_file(cls, fullsync=False): with _synclock: if statelessmode: return _mkpath(cls._cfgdir) with open(os.path.join(cls._cfgdir, 'transactioncount'), 'w') as f: f.write(struct.pack('!Q', _txcount)) if (fullsync or 'dirtyglobals' in _cfgstore and 'globals' in _cfgstore): if fullsync: # globals is not a given to be set.. dirtyglobals = _cfgstore['globals'] else: with _dirtylock: dirtyglobals = copy.deepcopy(_cfgstore['dirtyglobals']) del _cfgstore['dirtyglobals'] globalf = dbm.open(os.path.join(cls._cfgdir, "globals"), 'c', 384) # 0600 try: for globalkey in dirtyglobals: if globalkey in _cfgstore['globals']: globalf[globalkey] = \ cPickle.dumps(_cfgstore['globals'][globalkey]) else: if globalkey in globalf: del globalf[globalkey] finally: globalf.close() if fullsync or 'collectivedirty' in _cfgstore: collectivef = dbm.open(os.path.join(cls._cfgdir, "collective"), 'c', 384) try: if fullsync: colls = _cfgstore['collective'] else: with _dirtylock: colls = copy.deepcopy(_cfgstore['collectivedirty']) del _cfgstore['collectivedirty'] for coll in colls: if coll in _cfgstore['collective']: collectivef[coll] = cPickle.dumps( _cfgstore['collective'][coll]) else: if coll in collectivef: del globalf[coll] finally: collectivef.close() if fullsync: pathname = cls._cfgdir currdict = _cfgstore['main'] for category in currdict: _mkpath(pathname) dbf = dbm.open(os.path.join(pathname, category), 'c', 384) # 0600 try: for ck in currdict[category]: dbf[ck] = cPickle.dumps(currdict[category][ck]) finally: dbf.close() elif 'dirtykeys' in _cfgstore: with _dirtylock: currdirt = copy.deepcopy(_cfgstore['dirtykeys']) del _cfgstore['dirtykeys'] for tenant in currdirt: dkdict = currdirt[tenant] if tenant is None: pathname = cls._cfgdir currdict = _cfgstore['main'] else: pathname = os.path.join(cls._cfgdir, 'tenants', tenant) currdict = _cfgstore['tenant'][tenant] for category in dkdict: _mkpath(pathname) dbf = dbm.open(os.path.join(pathname, category), 'c', 384) # 0600 try: for ck in dkdict[category]: if ck not in currdict[category]: if ck in dbf: del dbf[ck] else: dbf[ck] = cPickle.dumps(currdict[category][ck]) finally: dbf.close() willrun = False with cls._syncstate: if cls._writepending: cls._writepending = False willrun = True else: cls._syncrunning = False if willrun: return cls._sync_to_file() def _recalculate_expressions(self, cfgobj, formatter, node, changeset): for key in cfgobj: if not isinstance(cfgobj[key], dict): continue if 'expression' in cfgobj[key]: cfgobj[key] = _decode_attribute(key, cfgobj, formatter=formatter) _addchange(changeset, node, key) elif ('cryptvalue' not in cfgobj[key] and 'value' not in cfgobj[key]): # recurse for nested structures, with some hint that # it might indeed be a nested structure self._recalculate_expressions(cfgobj[key], formatter, node, changeset) def _restore_keys(jsond, password, newpassword=None, sync=True): # the jsond from the restored file, password (if any) used to protect # the file, and newpassword to use, (also check the service.cfg file) global _masterkey global _masterintegritykey if isinstance(jsond, dict): keydata = jsond else: keydata = json.loads(jsond) cryptkey = _parse_key(keydata['cryptkey'], password) integritykey = _parse_key(keydata['integritykey'], password) conf.init_config() cfg = conf.get_config() if cfg.has_option('security', 'externalcfgkey'): keyfilename = cfg.get('security', 'externalcfgkey') with open(keyfilename, 'r') as keyfile: newpassword = keyfile.read() set_global('master_privacy_key', _format_key(cryptkey, password=newpassword), sync) set_global('master_integrity_key', _format_key(integritykey, password=newpassword), sync) _masterkey = cryptkey _masterintegritykey = integritykey if sync: ConfigManager.wait_for_sync() def _dump_keys(password, dojson=True): if _masterkey is None or _masterintegritykey is None: init_masterkey() cryptkey = _format_key(_masterkey, password=password) if 'passphraseprotected' in cryptkey: cryptkey = '!'.join(map(base64.b64encode, cryptkey['passphraseprotected'])) else: cryptkey = '*unencrypted:{0}'.format(base64.b64encode( cryptkey['unencryptedvalue'])) integritykey = _format_key(_masterintegritykey, password=password) if 'passphraseprotected' in integritykey: integritykey = '!'.join(map(base64.b64encode, integritykey['passphraseprotected'])) else: integritykey = '*unencrypted:{0}'.format(base64.b64encode( integritykey['unencryptedvalue'])) keydata = {'cryptkey': cryptkey, 'integritykey': integritykey} if dojson: return json.dumps(keydata, sort_keys=True, indent=4, separators=(',', ': ')) return keydata def restore_db_from_directory(location, password): try: with open(os.path.join(location, 'keys.json'), 'r') as cfgfile: keydata = cfgfile.read() json.loads(keydata) _restore_keys(keydata, password) except IOError as e: if e.errno == 2: raise Exception("Cannot restore without keys, this may be a " "redacted dump") try: moreglobals = json.load(open(os.path.join(location, 'globals.json'))) for globvar in moreglobals: set_global(globvar, moreglobals[globvar]) except IOError as e: if e.errno != 2: raise try: collective = json.load(open(os.path.join(location, 'collective.json'))) _cfgstore['collective'] = {} for coll in collective: add_collective_member(coll, collective[coll]['address'], collective[coll]['fingerprint']) except IOError as e: if e.errno != 2: raise with open(os.path.join(location, 'main.json'), 'r') as cfgfile: cfgdata = cfgfile.read() ConfigManager(tenant=None)._load_from_json(cfgdata) def dump_db_to_directory(location, password, redact=None, skipkeys=False): if not redact and not skipkeys: with open(os.path.join(location, 'keys.json'), 'w') as cfgfile: cfgfile.write(_dump_keys(password)) cfgfile.write('\n') with open(os.path.join(location, 'main.json'), 'w') as cfgfile: cfgfile.write(ConfigManager(tenant=None)._dump_to_json(redact=redact)) cfgfile.write('\n') if 'collective' in _cfgstore: with open(os.path.join(location, 'collective.json'), 'w') as cfgfile: cfgfile.write(json.dumps(_cfgstore['collective'])) cfgfile.write('\n') bkupglobals = get_globals() if bkupglobals: json.dump(bkupglobals, open(os.path.join(location, 'globals.json'), 'w')) try: for tenant in os.listdir( os.path.join(ConfigManager._cfgdir, '/tenants/')): with open(os.path.join(location, 'tenants', tenant, 'main.json'), 'w') as cfgfile: cfgfile.write(ConfigManager(tenant=tenant)._dump_to_json( redact=redact)) cfgfile.write('\n') except OSError: pass def get_globals(): bkupglobals = {} for globvar in _cfgstore['globals']: if globvar.endswith('_key'): continue bkupglobals[globvar] = _cfgstore['globals'][globvar] return bkupglobals def init(stateless=False): global _cfgstore if stateless: _cfgstore = {} return try: ConfigManager._read_from_path() except IOError: _cfgstore = {} # some unit tests worth implementing: # set group attribute on lower priority group, result is that node should not # change # after that point, then unset on the higher priority group, lower priority # group should get it then # rinse and repeat for set on node versus set on group # clear group attribute and assure than it becomes unset on all nodes # set various expressions
{ "repo_name": "whowutwut/confluent", "path": "confluent_server/confluent/config/configmanager.py", "copies": "1", "size": "95783", "license": "apache-2.0", "hash": 1321130713339152000, "line_mean": 39.9504061565, "line_max": 129, "alpha_frac": 0.5543676853, "autogenerated": false, "ratio": 4.428657296097652, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0013055173152822, "num_lines": 2339 }
"""802.1x implementation for FAUCET.""" # Copyright (C) 2013 Nippon Telegraph and Telephone Corporation. # Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer. # Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd. # Copyright (C) 2015--2017 The Contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import eventlet eventlet.monkey_patch() # pylint: disable=wrong-import-position from ryu.lib import hub # noqa from chewie import chewie # noqa from faucet.valve_util import kill_on_exception # noqa def get_mac_str(valve_index, port_num): """Gets the mac address string for the valve/port combo Args: valve_index (int): The internally used id of the valve. port_num (int): port number Returns: str """ two_byte_port_num = ("%04x" % port_num) two_byte_port_num_formatted = two_byte_port_num[:2] + ':' + two_byte_port_num[2:] return '00:00:00:%02x:%s' % (valve_index, two_byte_port_num_formatted) class FaucetDot1x: # pylint: disable=too-many-instance-attributes """Wrapper for experimental Chewie 802.1x authenticator.""" exc_logname = None def __init__(self, logger, exc_logname, metrics, send_flow_msgs): self.logger = logger self.metrics = metrics self.exc_logname = exc_logname self.mac_to_port = {} # {"00:00:00:00:00:02" : (valve_0, port_1)} self.dp_id_to_valve_index = {} self.thread = None self._send_flow_msgs = send_flow_msgs self._valves = None self._dot1x_speaker = None self._auth_acl_name = None self._noauth_acl_name = None def _create_dot1x_speaker(self, dot1x_intf, chewie_id, radius_ip, radius_port, radius_secret): """ Args: dot1x_intf (str): chewie_id (str): radius_ip (str): radius_port (int): radius_secret (str): Returns: Chewie """ _chewie = chewie.Chewie( # pylint: disable=too-many-function-args dot1x_intf, self.logger, self.auth_handler, self.failure_handler, self.logoff_handler, radius_ip, radius_port, radius_secret, chewie_id) self.thread = hub.spawn(_chewie.run) self.thread.name = 'chewie' return _chewie def _get_valve_and_port(self, port_id): """Finds the valve and port that this address corresponds to Args: port_id: is a macaddress string""" valve, port = self.mac_to_port[port_id] return (valve, port) def _get_acls(self, datapath): """Returns tuple of acl values""" auth_acl = datapath.acls.get(self._auth_acl_name) noauth_acl = datapath.acls.get(self._noauth_acl_name) return (auth_acl, noauth_acl) # Loggin Methods def log_auth_event(self, valve, port_num, mac_str, status): """Log an authentication attempt event""" self.metrics.inc_var('dp_dot1x_{}'.format(status), valve.dp.base_prom_labels()) self.metrics.inc_var('port_dot1x_{}'.format(status), valve.dp.port_labels(port_num)) self.logger.info( '{} from MAC {} on {}'.format(status.capitalize(), mac_str, port_num)) valve.dot1x_event({'AUTHENTICATION': {'dp_id': valve.dp.dp_id, 'port': port_num, 'eth_src': mac_str, 'status': status}}) def log_port_event(self, event_type, port_type, valve, port_num): # pylint: disable=no-self-use """Log a dot1x port event""" valve.dot1x_event({event_type: {'dp_id': valve.dp.dp_id, 'port': port_num, 'port_type': port_type}}) @kill_on_exception(exc_logname) def auth_handler(self, address, port_id, *args, **kwargs): # pylint: disable=unused-argument """Callback for when a successful auth happens.""" address_str = str(address) valve, dot1x_port = self._get_valve_and_port(port_id) port_num = dot1x_port.number self.log_auth_event(valve, port_num, address_str, 'success') flowmods = self._get_login_flowmod(dot1x_port, valve, address_str, kwargs.get('vlan_name', None), kwargs.get('filter_id', None)) if flowmods: self._send_flow_msgs(valve, flowmods) @kill_on_exception(exc_logname) def logoff_handler(self, address, port_id): """Callback for when an EAP logoff happens.""" address_str = str(address) valve, dot1x_port = self._get_valve_and_port(port_id) port_num = dot1x_port.number self.log_auth_event(valve, port_num, address_str, 'logoff') flowmods = self._get_logoff_flowmod(dot1x_port, valve, address_str) if flowmods: self._send_flow_msgs(valve, flowmods) @kill_on_exception(exc_logname) def failure_handler(self, address, port_id): """Callback for when a EAP failure happens.""" address_str = str(address) valve, dot1x_port = self._get_valve_and_port(port_id) port_num = dot1x_port.number self.log_auth_event(valve, port_num, address_str, 'failure') flowmods = self._get_logoff_flowmod(dot1x_port, valve, address_str) if flowmods: self._send_flow_msgs(valve, flowmods) def set_mac_str(self, valve, valve_index, port_num): """ Args: valve (Valve): valve_index (int): port_num (int): Returns: str """ mac_str = get_mac_str(valve_index, port_num) port = valve.dp.ports[port_num] self.mac_to_port[mac_str] = (valve, port) return mac_str def nfv_sw_port_up(self, dp_id, dot1x_ports, nfv_sw_port): """Setup the dot1x forward port acls when the nfv_sw_port comes up. Args: dp_id (int): dot1x_ports (Iterable of Port objects): nfv_sw_port (Port): Returns: list of flowmods """ # TODO Come back to. Should this be down? self._dot1x_speaker.port_down( get_mac_str(self.dp_id_to_valve_index[dp_id], nfv_sw_port.number)) valve = self._valves[dp_id] self.log_port_event("PORT_UP", 'nfv', valve, nfv_sw_port.number) ret = [] for port in dot1x_ports: ret.extend(self.create_flow_pair( dp_id, port, nfv_sw_port, valve)) return ret def port_up(self, dp_id, dot1x_port, nfv_sw_port): """Setup the dot1x forward port acls. Args: dp_id (int): dot1x_port (Port): nfv_sw_port (Port): Returns: list of flowmods """ port_num = dot1x_port.number mac_str = get_mac_str(self.dp_id_to_valve_index[dp_id], port_num) self._dot1x_speaker.port_up(mac_str) valve = self._valves[dp_id] self.log_port_event("PORT_UP", 'supplicant', valve, port_num) # Dealing with ACLs flowmods = [] flowmods.extend(self.create_flow_pair( dp_id, dot1x_port, nfv_sw_port, valve)) flowmods.extend(self._add_unauthenticated_flowmod(dot1x_port, valve)) if dot1x_port.dot1x_mab: self.logger.info("Port % is using Mac Auth Bypass", dot1x_port.number) flowmods.append(self.create_mab_flow(dp_id, dot1x_port, nfv_sw_port, valve)) return flowmods def create_mab_flow(self, dp_id, dot1x_port, nfv_sw_port, valve): """Creates a flow that mirrors UDP packets from port 68 (DHCP) from the supplicant to the nfv port Args: dp_id (int): dot1x_port (Port): nfv_sw_port (Port): valve (Valve): Returns: list """ acl_manager = valve.acl_manager if dot1x_port.running(): valve_index = self.dp_id_to_valve_index[dp_id] mac = get_mac_str(valve_index, dot1x_port.number) return acl_manager.create_mab_flow(dot1x_port.number, nfv_sw_port.number, mac) return [] def create_flow_pair(self, dp_id, dot1x_port, nfv_sw_port, valve): """Creates the pair of flows that redirects the eapol packets to/from the supplicant and nfv port Args: dp_id (int): dot1x_port (Port): nfv_sw_port (Port): valve (Valve): Returns: list """ acl_manager = valve.acl_manager if dot1x_port.running(): valve_index = self.dp_id_to_valve_index[dp_id] mac = get_mac_str(valve_index, dot1x_port.number) return acl_manager.create_dot1x_flow_pair( dot1x_port.number, nfv_sw_port.number, mac) return [] def port_down(self, dp_id, dot1x_port, nfv_sw_port): """ Remove the acls added by FaucetDot1x.get_port_acls Args: dp_id (int): dot1x_port (Port): nfv_sw_port (Port): Returns: list of flowmods """ valve_index = self.dp_id_to_valve_index[dp_id] port_num = dot1x_port.number mac = get_mac_str(valve_index, port_num) self._dot1x_speaker.port_down(mac) valve = self._valves[dp_id] acl_manager = valve.acl_manager self.log_port_event("PORT_DOWN", 'supplicant', valve, port_num) flowmods = [] flowmods.extend(self._del_authenticated_flowmod(dot1x_port, valve, mac)) flowmods.extend(self._del_unauthenticated_flowmod(dot1x_port, valve)) # NOTE: The flow_pair are not included in unauthed flowmod flowmods.extend(acl_manager.del_mab_flow(dot1x_port.number, nfv_sw_port.number, mac)) flowmods.extend(acl_manager.del_dot1x_flow_pair(dot1x_port.number, nfv_sw_port.number, mac)) return flowmods def reset(self, valves): """Set up a dot1x speaker.""" self._valves = valves dot1x_valves = [ valve for valve in valves.values() if valve.dp.dot1x and valve.dp.dot1x_ports()] assert len(dot1x_valves) < 255, 'dot1x not supported for > 255 DPs' if not dot1x_valves: return first_valve = dot1x_valves[0] dot1x_intf = first_valve.dp.dot1x['nfv_intf'] radius_ip = first_valve.dp.dot1x['radius_ip'] radius_port = first_valve.dp.dot1x['radius_port'] radius_secret = first_valve.dp.dot1x['radius_secret'] self._auth_acl_name = first_valve.dp.dot1x.get('auth_acl') self._noauth_acl_name = first_valve.dp.dot1x.get('noauth_acl') self._dot1x_speaker = self._create_dot1x_speaker( dot1x_intf, first_valve.dp.faucet_dp_mac, radius_ip, radius_port, radius_secret) for valve_index, valve in enumerate(dot1x_valves, start=0): self.dp_id_to_valve_index[valve.dp.dp_id] = valve_index for dot1x_port in valve.dp.dot1x_ports(): self.set_mac_str(valve, valve_index, dot1x_port.number) self.logger.info( 'dot1x enabled on %s (%s) port %s, NFV interface %s' % ( valve.dp, valve_index, dot1x_port, dot1x_intf)) valve.dot1x_event({'ENABLED': {'dp_id': valve.dp.dp_id}}) def _get_logoff_flowmod(self, dot1x_port, valve, mac_str): """Return flowmods required to logoff port""" flowmods = [] flowmods.extend( self._del_authenticated_flowmod(dot1x_port, valve, mac_str)) flowmods.extend( self._add_unauthenticated_flowmod(dot1x_port, valve)) return flowmods def _get_login_flowmod(self, dot1x_port, valve, # pylint: disable=too-many-arguments mac_str, vlan_name, acl_name): """Return flowmods required to login port""" flowmods = [] flowmods.extend( self._del_unauthenticated_flowmod(dot1x_port, valve)) flowmods.extend( self._add_authenticated_flowmod(dot1x_port, valve, mac_str, vlan_name, acl_name)) return flowmods def _add_authenticated_flowmod(self, dot1x_port, valve, # pylint: disable=too-many-arguments mac_str, vlan_name, acl_name): """Return flowmods for successful authentication on port""" port_num = dot1x_port.number flowmods = [] acl_manager = valve.acl_manager acl = valve.dp.acls.get(acl_name, None) if dot1x_port.dot1x_dyn_acl and acl: self.logger.info("DOT1X_DYN_ACL: Adding ACL '{0}' for port '{1}'".format( acl_name, port_num)) self.logger.debug("DOT1X_DYN_ACL: ACL contents: '{0}'".format(str(acl.__dict__))) flowmods.extend(acl_manager.add_port_acl(acl, port_num, mac_str)) elif dot1x_port.dot1x_acl: auth_acl, _ = self._get_acls(valve.dp) self.logger.info("DOT1X_PRE_ACL: Adding ACL '{0}' for port '{1}'".format( acl_name, port_num)) self.logger.debug("DOT1X_PRE_ACL: ACL contents: '{0}'".format(str(auth_acl.__dict__))) flowmods.extend(acl_manager.add_port_acl(auth_acl, port_num, mac_str)) else: flowmods.extend(acl_manager.add_authed_mac(port_num, mac_str)) if vlan_name: flowmods.extend(valve.add_dot1x_native_vlan(port_num, vlan_name)) return flowmods def _del_authenticated_flowmod(self, dot1x_port, valve, mac_str): """Return flowmods for deleting authentication flows from a port""" flowmods = [] port_num = dot1x_port.number acl_manager = valve.acl_manager if dot1x_port.dot1x_acl: auth_acl, _ = self._get_acls(valve.dp) flowmods.extend(acl_manager.del_port_acl(auth_acl, port_num, mac_str)) elif dot1x_port.dot1x_dyn_acl: flowmods.extend(acl_manager.del_authed_mac(port_num, mac_str, strict=False)) else: flowmods.extend(acl_manager.del_authed_mac(port_num, mac_str)) flowmods.extend(valve.del_dot1x_native_vlan(port_num)) return flowmods def _add_unauthenticated_flowmod(self, dot1x_port, valve, mac_str=None): """Return flowmods default on a port""" flowmods = [] acl_manager = valve.acl_manager if dot1x_port.dot1x_acl: _, noauth_acl = self._get_acls(valve.dp) flowmods.extend(acl_manager.add_port_acl(noauth_acl, dot1x_port.number, mac_str)) return flowmods def _del_unauthenticated_flowmod(self, dot1x_port, valve, mac_str=None): """Return flowmods for deleting default / unauthenticated flows from a port""" flowmods = [] acl_manager = valve.acl_manager if dot1x_port.dot1x_acl: _, noauth_acl = self._get_acls(valve.dp) flowmods.extend(acl_manager.del_port_acl(noauth_acl, dot1x_port.number, mac_str)) return flowmods
{ "repo_name": "gizmoguy/faucet", "path": "faucet/faucet_dot1x.py", "copies": "1", "size": "15708", "license": "apache-2.0", "hash": -202668157598071100, "line_mean": 37.312195122, "line_max": 100, "alpha_frac": 0.5881079705, "autogenerated": false, "ratio": 3.2779632721202003, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.43660712426202, "avg_score": null, "num_lines": null }
# 8031_v1.py # Shuai Jack Zhao # hdfs file system # in_port=int(inport), eth_dst=dst_mac, eth_src=src_mac, eth_type=0x0800, ipv4_src=src_ip, # ipv4_dst=dst_ip, ip_proto=6, tcp_dst=dst_port, tcp_src=src_port # # tcp_src=54310 # updated install_flow_between_switches functions # update UDP traffic function from ryu.base import app_manager from ryu.controller import ofp_event from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER from ryu.controller.handler import set_ev_cls from ryu.ofproto import ofproto_v1_3 from ryu.lib.packet import packet from ryu.lib.packet import ethernet, ipv4, arp, tcp, udp from ryu.controller import dpset from ryu.lib.packet.lldp import LLDP_MAC_NEAREST_BRIDGE # from ryu.lib.packet.ether_types import ETH_TYPE_LLDP import os import time from utilityLib_v1 import Utilites # import myswitch_v13 # from ryu.app.wsgi import ControllerBase, WSGIApplication, route # output ovs switch hostname and DPID pairs # updated by OFP_SWITCHES_LIST_SCRIPT OFP_SWITCHES_LIST = \ './network-data2/ofp_switches_list.db' OFP_SWITCHES_LIST_PREVIOUS = \ './network-data2/ofp_switches_list_prev.db' OFP_SWITCHES_LIST_SCRIPT = \ './scripts/remote_ovs_operation_topo_2/get_switch_ofpbr_datapath_id.sh' OFP_MAC_TO_PORT = './network-data2/ofp_mac_to_port.db' OFP_LINK_PORT = './network-data2/ofp_link_port.db' OFP_HOST_SWITCHES_LIST = './network-data2/ofp_host_switches_list.db' # upadate by host_tracker.py OFP_HOST_SWITCHES_LIST_BACK = \ './network-data2/ofp_host_switches_list_backup.db' OFP_SINGLE_SHOREST_PATH = './network-data2/ofp_single_shortest_path.db' OFP_ALL_PAIRS_SHOREST_PATH = './network-data2/ofp_all_pairs_shortest_path.db' OFP_ALL_SIMPLE_PATH = './network-data2/ofp_all_simple_path.db' OFP_ALL_PATHS_SHOREST_PATH = './network-data2/ofp_all_paths_shortest_path.db' OFP_IPERF_LOG = \ './network-data2/ofp_iperf_log.db' OFP_SSH_LOG = \ './network-data2/ofp_ssh_log.db' ICMP_PRIORITY = 3 IPERF_PRIORITY = 4 SSH_PRIORITY = 5 HDFS_PRIORITY = 6 RESOURCE_TRACKER_PRIORITY = 7 RESOURCE_TRACKER_IDLE_TIMER = 100 RESOURCE_TRACKER_HARD_TIMER = 0 IDLE_TIMER = 120 HARD_TIMER = 0 SSH_KEY_LEARNING_TIMER = 1 SSH_TRACK_LIMIT = 10 SSH_IDLE_TIMER = 10 SSH_HARD_TIMER = 0 HDFS_IDLE_TIMER = 0 HDFS_HARD_TIMER = 0 class HDFSController(app_manager.RyuApp): OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION] _CONTEXTS = { 'dpset': dpset.DPSet, } def __init__(self, *args, **kwargs): super(HDFSController, self).__init__(*args, **kwargs) self.mac_to_port = {} self.dpset = kwargs['dpset'] self.datapaths = {} # create thread for traffic monitoring # self.monitor_thread = hub.spawn(self._monitor) self.hostname_list = {} self.dpid_datapathObj = {} self.ssh_learning = {} self.ssh_track_list = {} self.util = Utilites() # self._update_switch_dpid_list() ################################################################### # ofp_event.EventOFPSwitchFeatures #################################################################### @set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER) def switch_features_handler(self, ev): # self._update_switch_dpid_list() self.logger.debug("switch_features_handler: ") datapath = ev.msg.datapath dpid = datapath.id # save datapath object into dpid_datapath # here dpid is a integer, not Hex number self.dpid_datapathObj[dpid] = ev.msg.datapath ################################################################### # EventOFPPacketIn handler #################################################################### @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER) def _packet_in_handler(self, ev): # If you hit this you might want to increase # the "miss_send_length" of your switch if ev.msg.msg_len < ev.msg.total_len: self.logger.debug("packet truncated: only %s of %s bytes", ev.msg.msg_len, ev.msg.total_len) msg = ev.msg datapath = msg.datapath pkt = packet.Packet(data=msg.data) pkt_ethernet = pkt.get_protocol(ethernet.ethernet) eth = pkt.get_protocols(ethernet.ethernet)[0] dst_mac = eth.dst if dst_mac == LLDP_MAC_NEAREST_BRIDGE: return if not pkt_ethernet: return else: pass self.logger.debug("HDFSController: Packet-In:") # self.logger.info("\tether_packet: at %s %s " % (self.util.hostname_Check(datapath.id), pkt_ethernet)) pkt_arp = pkt.get_protocol(arp.arp) if pkt_arp: return pkt_tcp = pkt.get_protocol(tcp.tcp) # pkt_udp = pkt.get_protocol(udp.udp) if pkt_tcp: # self.logger.info("\tTCP_packet: at %s %s " % (self.util.hostname_Check(datapath.id), pkt_tcp)) pkt_ipv4 = pkt.get_protocol(ipv4.ipv4) src_ip = pkt_ipv4.src dst_ip = pkt_ipv4.dst in_port = msg.match['in_port'] src_mac = eth.src # parser = datapath.ofproto_parser if pkt_tcp: src_port = pkt_tcp.src_port dst_port = pkt_tcp.dst_port if str(dst_port) == '54310': key = (src_ip, dst_ip, src_mac, dst_mac, dst_port) self.logger.debug("HDFSController: Packet-In:") self.logger.info("\t############################# 8031 Traffic #####################################") self.logger.info("\tAt %s from %s to %s from src_port %s to dst_port %s from port %s src_mac %s dst_mac %s" % (self.util.hostname_Check(datapath.id), src_ip, dst_ip, src_port, dst_port, in_port, src_mac, dst_mac)) if key not in self.ssh_learning.keys(): # self.logger.info("\t############################# HDFS Traffic #####################################") # self.logger.info("\tAt %s from %s to %s from src_port %s to dst_port %s from port %s src_mac %s dst_mac %s" % # (self.util.hostname_Check(datapath.id), src_ip, dst_ip, src_port, dst_port, in_port, src_mac, dst_mac)) # this valuw will be used at a timer, This entry will be cleard after 1 second value = time.time() self.ssh_learning[key] = value elif key in self.ssh_learning.keys(): if time.time() - self.ssh_learning[key] >= SSH_KEY_LEARNING_TIMER: self.logger.info("\t(src_ip, dst_ip, src_mac, dst_mac, dst_port, in_port) TIMEOUT from self.hdfs_learning dict!!!") del self.ssh_learning[key] self.ssh_learning[key] = time.time() else: return else: return src_dpid_name = self.util.hostname_Check(datapath.id) # self.logger.info("\tInstall SSH flow between IP address %s and %s \n\tsleeping for 5 s ........................" % (src_ip, dst_ip)) # time.sleep(5) # find dstination datapath id from host_tracker file dst_dpid_name = self.util.return_dst_dpid_hostname(dst_ip, dst_mac) if dst_dpid_name == None: self.logger.info("\tcould not find destination switch..............") return self.logger.info("\tInstall HDFS flow between %s and %s" % (dst_dpid_name, src_dpid_name)) # Now only consider two end hosts # hosts = [src_mac, dst_mac] hosts = [dst_mac, src_mac] # find shortest path between two switches, a list of hostnames ['s1','s2','s3'] shortest_path = self.util.return_shortest_path(src_dpid_name, dst_dpid_name) # install flows between hosts and switch self.install_flows_for_hosts_and_attached_switches(hosts, shortest_path, src_ip, dst_ip, src_port, dst_port, src_mac, dst_mac, msg) if len(shortest_path) == 1: self.util.install_flows_for_same_switch_v2( shortest_path, 'TCP', src_ip, dst_ip, src_mac, dst_mac, src_port, dst_port, self.dpid_datapathObj, HDFS_IDLE_TIMER, HDFS_HARD_TIMER, msg) else: # install flows between hosts and switch self.install_flows_for_hosts_and_attached_switches(hosts, shortest_path, src_ip, dst_ip, src_port, dst_port, src_mac, dst_mac, msg) # install flow for the rest of switches if the length of shortest path is greater than 2 if len(shortest_path) > 2: # self.util.install_flows_for_rest_of_switches( # shortest_path, 'TCP', SSH_PRIORITY, src_ip, dst_ip, src_mac, dst_mac, self.dpid_datapathObj, SSH_IDLE_TIMER, SSH_HARD_TIMER) self.util.install_flows_for_rest_of_switches( shortest_path, 'TCP', HDFS_PRIORITY, dst_ip, src_ip, dst_mac, src_mac, self.dpid_datapathObj, SSH_IDLE_TIMER, SSH_HARD_TIMER) def install_flows_for_hosts_and_attached_switches(self, hosts, shortest_path, src_ip, dst_ip, src_port, dst_port, src_mac, dst_mac, msg): count = 0 for h_mac in hosts: if count < len(hosts) and count == 0: self.util.install_flow_between_host_and_switch_for_TCP_UDP( h_mac, 'TCP', HDFS_PRIORITY, shortest_path[count:count + 2], count, dst_ip, src_ip, dst_port, src_port, dst_mac, src_mac, self.dpid_datapathObj, SSH_IDLE_TIMER, SSH_HARD_TIMER, msg) count += 1 elif count < len(hosts) and count == 1: self.util.install_flow_between_host_and_switch_for_TCP_UDP( h_mac, 'TCP', HDFS_PRIORITY, list([shortest_path[len(shortest_path) - 1], shortest_path[len(shortest_path) - 2]]), count, dst_ip, src_ip, dst_port, src_port, dst_mac, src_mac, self.dpid_datapathObj, SSH_IDLE_TIMER, SSH_HARD_TIMER, msg) count += 1
{ "repo_name": "umkcdcrg01/ryu_openflow", "path": "ryu/app/8031_v1.py", "copies": "1", "size": "10373", "license": "apache-2.0", "hash": -4237235051510313500, "line_mean": 46.3652968037, "line_max": 161, "alpha_frac": 0.5693627687, "autogenerated": false, "ratio": 3.4680708793045802, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.453743364800458, "avg_score": null, "num_lines": null }
# 803. Bricks Falling When Hit # Applies the hit backwards and merges connected components on the go. # O(M*N + M*N + M*N*4*log(M*N) + len(hits)*4*log(M*N)) class Solution: def hitBricks(self, grid: List[List[int]], hits: List[List[int]]) -> List[int]: BRICK, SPACE = 1, 0 CELLING = (-1, -1) M = len(grid) N = len(grid[0]) # Initializes a disjoint set for each brick. father = dict() size = dict() for x in range(M): for y in range(N): if grid[x][y] == BRICK: t = (x, y) father[t] = t size[t] = 1 father[CELLING] = CELLING size[CELLING] = 1 # Calculates the final status. final = copy.deepcopy(grid) for (x, y) in hits: final[x][y] = SPACE # Connects the disjoint sets on the final status, includes the celling. def findRoot(t): root = t if t == father[t] else findRoot(father[t]) father[t] = root return root def merge(t1, t2): t1 = findRoot(t1) t2 = findRoot(t2) if t1 == t2: return if random.randint(1, 2) == 1: father[t2] = t1 size[t1] += size[t2] else: father[t1] = t2 size[t2] += size[t1] for x in range(M): for y in range(N): if final[x][y] == BRICK: for (nx, ny) in [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)]: if nx == -1: merge((x, y), CELLING) continue if 0 <= nx and nx < M and 0 <= ny and ny < N: if final[nx][ny] == BRICK: merge((x, y), (nx, ny)) # Adds bricks backwards. ans = [] for (x, y) in reversed(hits): if not (grid[x][y] == BRICK): ans.append(0) continue before = size[findRoot(CELLING)] # Merges the brick with neighbours. final[x][y] = BRICK for (nx, ny) in [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)]: if nx == -1: merge((x, y), CELLING) continue if 0 <= nx and nx < M and 0 <= ny and ny < N: if final[nx][ny] == BRICK: merge((x, y), (nx, ny)) after = size[findRoot(CELLING)] if after == before: ans.append(0) else: ans.append(after - before - 1) return reversed(ans)
{ "repo_name": "digiter/Arena", "path": "803-bricks-falling-when-hit.py", "copies": "1", "size": "2751", "license": "mit", "hash": -1914627596297139700, "line_mean": 32.1445783133, "line_max": 85, "alpha_frac": 0.4063976736, "autogenerated": false, "ratio": 3.577373211963589, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.44837708855635894, "avg_score": null, "num_lines": null }
"""807. Max Increase to Keep City Skyline https://leetcode.com/problems/max-increase-to-keep-city-skyline/ In a 2 dimensional array grid, each value grid[i][j] represents the height of a building located there. We are allowed to increase the height of any number of buildings, by any amount (the amounts can be different for different buildings). Height 0 is considered to be a building as well.  At the end, the "skyline" when viewed from all four directions of the grid, i.e. top, bottom, left, and right, must be the same as the skyline of the original grid. A city's skyline is the outer contour of the rectangles formed by all the buildings when viewed from a distance. See the following example. What is the maximum total sum that the height of the buildings can be increased? Example: Input: grid = [[3,0,8,4],[2,4,5,7],[9,2,6,3],[0,3,1,0]] Output: 35 Explanation: The grid is: [ [3, 0, 8, 4], ⁠ [2, 4, 5, 7], ⁠ [9, 2, 6, 3], ⁠ [0, 3, 1, 0] ] The skyline viewed from top or bottom is: [9, 4, 8, 7] The skyline viewed from left or right is: [8, 7, 9, 3] The grid after increasing the height of buildings without affecting skylines is: gridNew = [ [8, 4, 8, 7], ⁠ [7, 4, 7, 7], ⁠ [9, 4, 8, 7], ⁠ [3, 3, 3, 3] ] Notes: 1 < grid.length = grid[0].length <= 50. All heights grid[i][j] are in the range [0, 100]. All buildings in grid[i][j] occupy the entire grid cell: that is, they are a 1 x 1 x grid[i][j] rectangular prism. """ from typing import List class Solution: def max_increase_keeping_skyline(self, grid: List[List[int]]) -> int: left_right_skyline = [] top_bottom_skyline = [] ans = 0 grid_2 = [] for i in range(len(grid[0])): temp = [] for nums in grid: top_bottom_skyline.append(max(nums)) temp.append(nums[i]) grid_2.append(temp) for nums in grid_2: left_right_skyline.append(max(nums)) for i in range(len(grid)): for j in range(len(grid[0])): ans += min(left_right_skyline[i], top_bottom_skyline[j]) - grid[i][j] return ans
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/max_increase_to_keep_city_skyline.py", "copies": "1", "size": "2215", "license": "mit", "hash": -4207374421860167000, "line_mean": 28.6351351351, "line_max": 77, "alpha_frac": 0.6165070679, "autogenerated": false, "ratio": 3.020661157024793, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4137168224924793, "avg_score": null, "num_lines": null }
#80############################################################################# class Game: def __init__(self,p1,p2,g_id): self.p1 = p1 self.p2 = p2 self.g_id = g_id self.open_cells = [] for cg in cell_groups: for c in cg: self.open_cells.append(list(c)) self.p1_cells = [] self.p2_cells = [] def import_string(self,bstr): if len(bstr)!=50: return len(bstr) cells = [] ls = ['*','s','t','a','r'] for cg in cell_groups: cells+=list(cg) cells.sort(key=lambda c: str(ls.index(c[0]))+c[1:]) self.p1_cells = [] self.p2_cells = [] self.open_cells = [] for i in range(50): if bstr[i]=='0': self.open_cells.append(cells[i]) elif bstr[i]=='1': self.p1_cells.append(cells[i]) else: self.p2_cells.append(cells[i]) def export_string(self): num_ls = [] cells = [] ls = ['*','s','t','a','r'] for cg in cell_groups: cells+=list(cg) cells.sort(key=lambda c: str(ls.index(c[0]))+c[1:]) for i in range(50): if cells[i] in self.open_cells: num_ls.append('0') elif cells[i] in self.p1_cells: num_ls.append('1') else: # p2 num_ls.append('2') return ''.join(num_ls) def move(self,player,cell): self.open_cells.remove(cell) if player==self.p1: self.p1_cells.append(cell) else: self.p2_cells.append(cell) def is_over(self): return len(self.open_cells)==0 def get_regions(self): b = Board() regions = {} regions[self.p1] = b.get_regions(self.p1_cells) regions[self.p2] = b.get_regions(self.p2_cells) regions[None] = b.get_regions(self.open_cells) return regions def get_stars(self): regions = self.get_regions() stars = {self.p1:[], self.p2:[]} for region in regions[self.p1]+regions[self.p2]: if (len(list(filter(lambda c: c in peris,region)))) >=2: if region in regions[self.p1]: stars[self.p1].append(region) else: stars[self.p2].append(region) return stars def calc_score(self): if not self.is_over(): return None score = {self.p1:0,self.p2:0} stars = self.get_stars() star_peris = {self.p1:[],self.p2:[]} for peri in list(cell_groups[3]): pp = self.p1 if peri in self.p1_cells else self.p2 op = self.p1 if pp == self.p2 else self.p2 in_star = False for star in stars[pp]: if peri in star: in_star = True score[pp] += 1 if in_star else 0 score[op] += 0 if in_star else 1 # is this really it? # so far have calculated all peri scores p1_qks = 0 for quark in ['*40','s40','t40','a40','r40']: if quark in self.p1_cells: p1_qks += 1 score[self.p1 if p1_qks>2 else self.p2] += 1 reward = -2*(len(stars[self.p1])-len(stars[self.p2])) score[self.p1] += reward score[self.p2] += reward return score def get_winner(self): score = self.calc_score() if score[self.p1]>score[self.p2]: return self.p1 else: return self.p2 cell_groups = [['s10','t10','a10','r10','*10'], ['s20','t20','a20','r20','*20', 's21','t21','a21','r21','*21'], ['s30','t30','a30','r30','*30', 's31','t31','a31','r31','*31', 's32','t32','a32','r32','*32'], ['s40','t40','a40','r40','*40', 's41','t41','a41','r41','*41', 's42','t42','a42','r42','*42', 's43','t43','a43','r43','*43']] class EdgeList: """ List of edges for graph class """ def __init__(self, vertex=None): self.vertex = vertex self.edge_list = [] def __str__(self): return str(self.vertex)+': '+str(self.edge_list) def add_edge(self, vtx): if vtx not in self.edge_list: self.edge_list.append(vtx) class Graph: """ generic graph class supports directed and undirected """ def __init__(self, is_directed=False, is_reversed=False): self.edge_lists = [] self.directed = is_directed self.reverse = is_reversed def _get_edge_list(self,vert): tmp_edge_list = list(filter((lambda v: v.vertex == vert), self.edge_lists)) if tmp_edge_list != []: #print(tmp_edge_list[0]) #print("edge list ^\n") return tmp_edge_list[0] def make_directed(self): self.directed = True def make_undirected(self): self.directed = False def get_verts(self): vt_ls = [] for el in self.edge_lists: vt_ls.append(el.vertex) return vt_ls def has_vertex(self,vtx): tmp_verts = list(filter((lambda el: el.vertex == vtx), self.edge_lists)) return len(tmp_verts)!=0 def has_edge(self,v1,v2): #return (self.get_edges(v1).edge_list.count(v2)!=0) v1_edges = self.get_edges(v1) return v2 in v1_edges def get_edges(self,vert): elarr = list(filter(lambda x: x.vertex == vert, self.edge_lists)) if len(elarr)==0: return [] el = elarr[0] return el.edge_list def add_vertex(self, vtx_name): self.edge_lists.append(EdgeList(vtx_name)) def add_edge(self, vtx1, vtx2): if vtx1==vtx2: return if not self.directed: for el in self.edge_lists: if el.vertex == vtx1: el.add_edge(vtx2) if el.vertex == vtx2: el.add_edge(vtx1) elif not self.reverse: for el in self.edge_lists: if el.vertex == vtx1: el.add_edge(vtx2) else: for el in self.edge_lists: if el.vertex == vtx2: el.add_edge(vtx1) def get_regions(self,vtx_ls): vls = list(vtx_ls) regions = [] while(len(vls)!=0): not_checked = [vls.pop(0)] this_region = [not_checked[0]] while(len(not_checked)!=0): curr = not_checked.pop(0) nbrs = self.get_edges(curr) for nbr in nbrs: if nbr in vls: vls.remove(nbr) if nbr not in not_checked: not_checked.append(nbr) if nbr not in this_region: this_region.append(nbr) regions.append(this_region) return regions class Board(Graph): def __init__(self,big=False): super().__init__() self.peris = list(cell_groups[3]) self.quarks = ['s40','t40','a40','r40','*40'] ls = ['*','s','t','a','r'] next_one = lambda c: ls[(ls.index(c)+1) % 5] prev_one = lambda c: ls[(ls.index(c)-1) % 5] next_chr = lambda c: chr(ord(c)+1) prev_chr = lambda c: chr(ord(c)-1) for cell in cell_groups[0]: self.add_vertex(cell) for cell in cell_groups[0]: for other in cell_groups[0]: self.add_edge(cell,other) # first layer done for cell in cell_groups[1]: self.add_vertex(cell) for cell in self.get_verts(): for other in self.get_verts(): if cell[0]==other[0]: self.add_edge(cell,other) for cell in cell_groups[1]: if cell[2]=='1': self.add_edge(cell,next_one(cell[0])+'20') self.add_edge(cell,next_one(cell[0])+'10') # second layer done for cell in cell_groups[2]: self.add_vertex(cell) for cell in cell_groups[2]: if cell[2]=='0': self.add_edge(cell,prev_one(cell[0])+'32') self.add_edge(cell,cell[0]+'31') self.add_edge(cell,cell[0]+'20') elif cell[2]=='1': self.add_edge(cell,cell[0]+'32') self.add_edge(cell,cell[0]+'20') self.add_edge(cell,cell[0]+'21') else: self.add_edge(cell,cell[0]+'21') self.add_edge(cell,next_one(cell[0])+'20') # third layer done (?) for cell in cell_groups[3]: self.add_vertex(cell) for cell in cell_groups[3]: if cell[2]=='0': self.add_edge(cell,prev_one(cell[0])+'43') self.add_edge(cell,cell[0]+'41') self.add_edge(cell,cell[0]+'30') elif cell[2]=='3': self.add_edge(cell,cell[0]+'42') self.add_edge(cell,next_one(cell[0])+'30') self.add_edge(cell,cell[0]+'32') else: self.add_edge(cell,cell[0:2]+next_chr(cell[2])) self.add_edge(cell,cell[0]+'3'+cell[2]) self.add_edge(cell,cell[0]+'3'+prev_chr(cell[2])) # done with small board # this actually probably works in general... for larger boards
{ "repo_name": "eli173/star", "path": "game.py", "copies": "1", "size": "9697", "license": "mit", "hash": -6871080945560442000, "line_mean": 34.134057971, "line_max": 80, "alpha_frac": 0.4665360421, "autogenerated": false, "ratio": 3.4168428470754053, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.43833788891754055, "avg_score": null, "num_lines": null }
#80############################################################################# import sqlite3 import json from flask import Flask, request, session, g, redirect, \ url_for, abort, render_template, flash from contextlib import closing import bcrypt import game #cfg DATABASE = 'db.db' # DEBUG = True app = Flask(__name__) app.config.from_object(__name__) app.secret_key = b'\t\x00\x8dSAc\x1fM\x9e\x1d0!\x94\x90\xe0\x90\xda\xac\x1a\xdf\xaa3\xd5Q' def connect_db(): return sqlite3.connect(app.config['DATABASE']) @app.before_request def before_request(): g.db = connect_db() @app.teardown_request def teardown_request(exception): db = getattr(g, 'db', None) if db is not None: db.close() def get_username(uid): qr = g.db.execute('select username from users where id=?',(uid,)).fetchall() if qr == []: return None return qr[0][0] @app.route('/games') def games(): if 'logged_in' not in session: return redirect(url_for("index")) uname = session['username'] uid = g.db.execute('select id from users where username=?',(uname,)) uid = uid.fetchall()[0][0] games = g.db.execute('select * from games where player1=? or player2=?', (uid,uid)).fetchall() glist = [] for game in games: if game[3] == uname:# 3 is p1 glist.append((get_username(game[4]),game[2]==uid,game[0])) else: glist.append((get_username(game[3]),game[2]!=uid,game[0])) app.logger.debug(glist) games_waiting = g.db.execute('select * from waiting where player=?', (uid,)).fetchall() waiting = False if len(games_waiting)!=0: waiting = True app.logger.debug(waiting) return render_template('games.html',waiting=waiting,glist=glist) @app.route('/newgame') def newgame(): if 'logged_in' not in session: return redirect(url_for("index")) app.logger.debug(session['username']) uid = g.db.execute('select id from users where username=?', (session['username'],)).fetchall()[0][0] waiting = g.db.execute('select * from waiting').fetchall() for game in waiting: if uid==game[1]: return redirect(url_for("games")) for game in waiting: opp_id = game[1] if opp_id!=uid: g.db.execute('delete from waiting where id=?', (game[0],)) g.db.execute('insert into games (player1, player2, whose_turn) values (?,?,?)',(uid,opp_id,uid)) g.db.commit() game_id = g.db.execute('select id from games where player1=? and player2=?',(uid,opp_id)).fetchall() return redirect(url_for("play",game_id=game_id[0][0])) g.db.execute('insert into waiting (player) values (?)',(uid,)) g.db.commit() return redirect(url_for("games")) # how to tell if on waitlist? @app.route('/play/<int:game_id>') def play(game_id):# whose turn? g.game_id = game_id db_gm = g.db.execute('select * from games where id=?',(game_id,)).fetchall() if db_gm == []: return redirect(url_for("games")) the_gm = game.Game(db_gm[0][3],db_gm[0][4],db_gm[0][0]) the_gm.import_string(db_gm[0][1]) app.logger.debug(db_gm[0][1]) waiting = False whose_turn = db_gm[0][2] uid = session['uid'] if whose_turn!=uid: waiting = True g.color_table = {} cell_list = [] for cg in game.cell_groups: cell_list += cg for cell in cell_list: curr_color = "ffff00" if cell in the_gm.p1_cells: curr_color = "ff0000" elif cell in the_gm.p2_cells: curr_color = "0000ff" g.color_table[cell] = curr_color return render_template('play.html',waiting=waiting) @app.route('/forefeit/<int:game_id>') def forefeit(game_id): g_q = g.db.execute('select (player1,player2) from games where id=?', (game_id,)).fetchall() p1 = g_q[0][2] p2 = g_q[0][3] winner = None loser = None if p1==session['uid']: winner = p2 loser = p1 else: winner = p1 loser = p2 g.db.execute("delete from games where id=?",(game_id,)) g.db.execute("update users set wins=wins+1 where id=?",(winner,)) g.db.execute("update users set losses=losses+1 where id=?",(loser,)) g.db.commit() return redirect(url_for("games")) @app.route('/submit/<int:game_id>/<move>') def submit(game_id, move): # get game from db db_gm = g.db.execute('select * from games where id=?',(game_id,)) gdata = db_gm.fetchall() if gdata == []: return redirect(url_for("index")) the_gm = game.Game(gdata[0][3],gdata[0][4],game_id) the_gm.import_string(gdata[0][1]) # check right user curr_user = session['username'] user_id_q = g.db.execute('select id from users where username=?', (curr_user,)).fetchall() if user_id_q == []: return redirect(url_for("play",game_id=game_id)) app.logger.debug(move) uid = user_id_q[0][0] app.logger.debug(uid) app.logger.debug(gdata) if uid!=gdata[0][2]: # don't need to do more than this? return redirect(url_for("play",game_id=game_id)) # checks move valid app.logger.debug(the_gm.open_cells) if move not in the_gm.open_cells: return redirect(url_for("play",game_id=game_id)) # do it? the_gm.move(uid,move) app.logger.debug(the_gm.is_over()) if the_gm.is_over(): winner = the_gm.get_winner() loser = the_gm.p1 if winner==the_gm.p2 else the_gm.p2 g.db.execute('delete from games where id=?', (the_gm.g_id,)) g.db.execute('update users set wins=wins+1 where id=?', (winner,)) g.db.execute('update users set losses=losses+1 where id=?', (loser,)) g.db.commit() return redirect(url_for("games")) estr = the_gm.export_string() app.logger.debug(game_id) opp_id = the_gm.p1 if the_gm.p2==uid else the_gm.p2 g.db.execute('update games set board=? where id=?',(estr,game_id)) g.db.execute('update games set whose_turn=? where id=?', (opp_id,game_id)) g.db.commit() return redirect(url_for("play",game_id=game_id)) @app.route('/logout') def logout(): session.pop('logged_in',None) return redirect(url_for("index")) @app.route('/login', methods=['POST']) def login(): if request.method == 'POST': user = g.db.execute('select * from users where username=?', (request.form.get('username'),)).fetchall() user_exists = len(user)!=0 if request.form.get('login')!=None: if request.form.get('username')==None: return redirect(url_for("index")) if not user_exists: flash(u'No account with this username exists','login error') return redirect(url_for("index")) pw_sql = g.db.execute('select pw_hash from users where username=?', (request.form.get('username'),)) pw_hash = pw_sql.fetchall()[0][0] pw_plain = request.form.get('password').encode('UTF-8') if bcrypt.hashpw(pw_plain, pw_hash) == pw_hash: #app.logger.debug('success!') session['logged_in'] = True session['username'] = request.form.get('username') session['uid'] = user[0][0] return redirect(url_for("games")) else: flash(u'Wrong Password','login error') return redirect(url_for("index")) elif request.form.get('register')!=None: if(user_exists): flash(u'Username already taken','login error') return redirect(url_for("index")) pw_plain = request.form.get('password').encode('UTF-8') pw_hash = bcrypt.hashpw(pw_plain, bcrypt.gensalt()) g.db.execute('insert into users (username, pw_hash) values (?, ?)', (request.form.get('username'),pw_hash)) g.db.commit() return redirect(url_for("games")) @app.route('/') def index(): return render_template('index.html') if __name__ == '__main__': app.run
{ "repo_name": "eli173/star", "path": "star.py", "copies": "1", "size": "8368", "license": "mit", "hash": -7590449738419306000, "line_mean": 33.2950819672, "line_max": 112, "alpha_frac": 0.5611854685, "autogenerated": false, "ratio": 3.337854008775429, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4399039477275429, "avg_score": null, "num_lines": null }
# 8.1 fruit = "banana" letter = fruit[1] print(letter) letter = fruit[0] print(letter) print() # 8.2 fruit = "banana" print(len(fruit)) length = len(fruit) last = fruit[length - 1] print(last) print() print(fruit[-1]) print(fruit[-2]) print() print(fruit[:1]) print(fruit[0]) print() # 8.3 index = 0 while index < len(fruit): letter = fruit[index] print(letter) index += 1 print() '''Exercise 1 - Write a function that takes a string as an argument and displays the letters backward, one per line.''' def reversal_printer(str): index = len(str) - 1 while index < len(str): character = str[index] print(character) index -= 1 if index < 0: break reversal_printer("Oklahoma") print() reversal_printer("Mississippi") print() reversal_printer("Roll Tide!") print() prefixes = "JKLMNOPQ" suffix = 'ack' for letter in prefixes: print(letter + suffix) print() # Exercise 2 def duck_printer(): prefixes = "JKLMNOPQ" suffix = 'ack' index = 0 while index < len(prefixes): print(prefixes[index] + suffix) index += 1 if index == len(prefixes) - 1: print(prefixes[index] + "u" + suffix) break duck_printer() print() # 8.4 String slices s = "Monty Python" print(s[0:5]) print(s[6:12]) print() fruit = "banana" print(fruit[:3]) print(fruit[3:]) #Exercise 3 #Given that fruit is a string, what does fruit[:] mean? print(fruit[:], "should be from beginning to end, a copy") print() # 8.5 Strings are immutable immutabilty_test_string = "Strings are Immutable" # immutabilty_test_string[0] = "F" new_string = "X" + immutabilty_test_string[1:] print(new_string) print(immutabilty_test_string) print() # 8.6 Searching # Exercise 4 def find_else(word, letter, start): index = start while index < len(word): if word[index] == letter: return index index = index + 1 return -1 print(find_else("Oklahoma", "a", 1), "= first match") # 8.7 print() # Exercise 5 def count(word, letter): count = 0 for i in word: if i == letter: count += 1 return count print(count("Oklahomans", "a"), "results found") print(count("sheree", "e"), "results found") print() # Exercise 6 def count_better(word_to_parse, letter_to_find, starting_point): index = starting_point counts = 0 while index < len(word_to_parse): if word_to_parse[index] == letter_to_find: counts += 1 index += 1 else: index += 1 if index > len(word_to_parse): break return counts print(count_better("Michelle Obamaaaaaa", "a", 1), "count") print() # 8.8 String methods word = "Banana" new_word = word.upper() print(new_word) index = word.find("a") print(index) print(word.find("na")) print(word.find("na", 3)) name = "Bob" print(name.find("B", 1, 2)) # this is a failure because it's not in the range specified # Exercise 7 print() new_stringy = "banana" print(new_stringy.count("a")) print() my_name_string = "sheree maria pena-dominguez" print(my_name_string.count("e")) print() # Exercise 8 print(my_name_string.capitalize()) print(my_name_string.center(50, "*")) print(my_name_string.count("e")) my_name_string = "Sheree María Peña-Domínguez" #print(my_name_string.decode) #looks to be 2 only, looks like I was looking at the wrong docs! whupz print("Moving On") print(my_name_string.casefold()) print(my_name_string.encode()) print("The sum of 5 + 6 is {0} and {1}".format(5 + 6, "foo")) print() # 8.9 The in operator print("a" in "Sheree Maria Pena") print("foo" in "there is a seed in my banana") print() def in_both(first_word, second_word): for letter in first_word: if letter in second_word: print(letter) print(in_both("Sheree Maria Pena", "Charles Daniel Fried")) print() #8.10 String comparison words = "bananas" if words == "bananas": print("OK, bananas") print() if words < 'bananas': print('Your word,' + word + ', comes before bananas.') elif words > 'bananas': print('Your word,' + word + ', comes after bananas.') else: print('All right, bananas.') #8.13 Exercises print() #Exercise 10 def is_palindrome(str): cleaned_up_string = str.replace(" ", "").casefold() return cleaned_up_string[::-1] == cleaned_up_string[:] print(is_palindrome("sheree maria pena")) print(is_palindrome("tacocat")) print(is_palindrome("Mr Owl ate my metal worm")) print()
{ "repo_name": "UWPCE-PythonCert/IntroPython2016", "path": "students/sheree/session_02/homework/TP2-Ch8-Scratch.py", "copies": "3", "size": "4493", "license": "unlicense", "hash": -7309770340004775000, "line_mean": 18.6069868996, "line_max": 119, "alpha_frac": 0.631403118, "autogenerated": false, "ratio": 2.861695347355003, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.49930984653550037, "avg_score": null, "num_lines": null }
#81. 题目:809*??=800*??+9*??+1 其中??代表的两位数,8*??的结果为两位数,9*??的结果为3位数。求??代表的两位数,及809*??后的结果。 # #82 题目:八进制转换为十进制 def convert8to10(n): lenN = len(str(n)) sumN = 0 for i in range(lenN): sumN += 8 ** i * int(str(n)[lenN-1-i]) print('this is the 8 to 10 : %d' % sumN) convert8to10(122) #83. 题目:求0—7所能组成的奇数个数。 def odd_num(n): if n == 0: return 1 elif n == 1: return 7 else: return odd_num(n-1) * 8 def count_odd_num(): l = [] for i in range(1,9): l.append(odd_num(i-1) * 4) print(sum(l)) count_odd_num() #84 题目:连接字符串。 def join_str(split, array): print('this is to join the str: ', split.join(array)) x = ['a', 'b', 'dd', 'good', 'lucky', 'day'] join_str(',', x) #85 题目:输入一个正整数,然后判断最少几个 9 除于该数的结果为整数。 def numerator(n): x = 9 while(True): if x % n == 0: print('this is the numerator: %d' % x) break x = x * 10 + 9 numerator(13) #87. 题目:回答结果(结构体变量传递)。 class GoodDay: m = 0 n = 0 def struct_do(goodDay): goodDay.m = 20 goodDay.n = 50 x = GoodDay() x.m = 100 x.n = 20 print('this is the x.m %d and x.n %d' % (x.m, x.n)) struct_do(x) print('this is the x.m %d and x.n %d' % (x.m, x.n)) # 88. 题目:读取7个数(1—50)的整数值,每读取一个值,程序打印出该值个数的*。 def print_start(n): print(n * '*') print_start(22) # 89. 题目:某个公司采用公用电话传递数据,数据是四位的整数,在传递过程中是加密的, # 加密规则如下:每位数字都加上5,然后用和除以10的余数代替该数字,再将第一位和第四位交换,第二位和第三位交换。 def encode_num(n): x = str(n) n = '' for i in range(len(x)): n += str((int(x[i]) + 5) % 10) return int(n[::-1]) print(encode_num(1234)) # 90 题目:列表使用实例。 def do_list(): testList = ['10086', '小姐姐', '你好', '工号', [1, 2, 3, 4, 5]] print('this is the len to list : %d ' % len(testList)) print('this is the order read the element for list : ', testList[1:]) testList.append('大家好') print('add the element to list: ', testList) print('pop the last element : ', testList.pop(-1)) x = testList.pop(-1) print('pop the last element : ', x) do_list()
{ "repo_name": "cwenao/python_web_learn", "path": "base100/base100/base_81-90.py", "copies": "1", "size": "2604", "license": "apache-2.0", "hash": -3839624689703383600, "line_mean": 14.0217391304, "line_max": 86, "alpha_frac": 0.542953668, "autogenerated": false, "ratio": 1.8956999085086916, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.2938653576508692, "avg_score": null, "num_lines": null }
# 832 SLoC v = FormValidator( firstname=Unicode(), surname=Unicode(required="Please enter your surname"), age=Int(greaterthan(18, "You must be at least 18 to proceed"), required=False), ) input_data = { 'firstname': u'Fred', 'surname': u'Jones', 'age': u'21', } v.process(input_data) == {'age': 21, 'firstname': u'Fred', 'surname': u'Jones'} input_data = { 'firstname': u'Fred', 'age': u'16', } v.process(input_data) # raises ValidationError # ValidationError([('surname', 'Please enter your surname'), ('age', 'You must be at least 18 to proceed')]) #assert_true # raise if not value #assert_false # raise if value #test # raise if callback(value) #minlen #maxlen #greaterthan #lessthan #notempty #matches # regex #equals #is_in looks_like_email # basic, but kudos for not using a regex! maxwords minwords CustomType # for subclassing PassThrough # return value Int # int(value) or raise ValidationError Float # as Int Decimal # as Int Unicode # optionally strip, unicode(value), no encoding Bool # default (undefined) is False by default Calculated # return callback(*source_fields) DateTime Date
{ "repo_name": "marrow/schema", "path": "example/thirdparty/formalize.py", "copies": "1", "size": "1141", "license": "mit", "hash": 364993844450605800, "line_mean": 20.9423076923, "line_max": 108, "alpha_frac": 0.6958808063, "autogenerated": false, "ratio": 3.1005434782608696, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.42964242845608697, "avg_score": null, "num_lines": null }
''' 83. Remove Duplicates from Sorted List - LeetCode https://leetcode.com/problems/remove-duplicates-from-sorted-list/description/''' # Definition for singly-linked list. class ListNode(object): def __init__(self, x): self.val = x self.next = None def traversal(node): while True : print node.val, node = node.next if node is None: print "" break class Solution(object): def deleteDuplicates(self, head): """ :type head: ListNode :rtype: ListNode """ if head is None: return None current_head = head while current_head.next is not None: if current_head.val == current_head.next.val: current_head.next = current_head.next.next continue current_head = current_head.next return head a = ListNode(1) p = a for i in [1,2,2,3,3,3]: p.next = ListNode(i) p = p.next traversal(a) s = Solution() traversal(s.deleteDuplicates(a))
{ "repo_name": "heyf/cloaked-octo-adventure", "path": "leetcode/083_remove-duplicates-from-sorted-list.py", "copies": "1", "size": "1053", "license": "mit", "hash": 6660080637553384000, "line_mean": 23.511627907, "line_max": 80, "alpha_frac": 0.5688509022, "autogenerated": false, "ratio": 3.6689895470383274, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4737840449238327, "avg_score": null, "num_lines": null }
__author__ = 'Libao Jin' __date__ = 'December 15, 2015' # Definition for singly-linked list. class ListNode(object): def __init__(self, x): self.val = x self.next = None class Solution(object): def deleteDuplicates(self, head): """ :type head: ListNode :rtype: ListNode """ if head is None or head.next is None: return head values = [] while head.next is not None: values.append(head.val) head = head.next values.append(head.val) s_v = set(values) # print(values, s_v) node_value = list(s_v) node_value.sort() if node_value == 1: return ListNode(values[0]) else: for i in range(len(node_value) - 1): if i == 0: head = ListNode(node_value[i]) node = head node.next = ListNode(node_value[i+1]) # print(node.val, node.next.val) node = node.next else: node.next = ListNode(node_value[i+1]) node = node.next # print(node.val) return head if __name__ == '__main__': s = Solution() a = ListNode(-1) b = ListNode(1) c = ListNode(2) d = ListNode(2) e = ListNode(2) f = ListNode(3) g = ListNode(4) h = ListNode(5) a.next = b b.next = c c.next = d d.next = e e.next = f f.next = g g.next = h # print(s.deleteDuplicates(i)) t = s.deleteDuplicates(a) while t.next is not None: print(t.val) t = t.next print(t.val) print(s.deleteDuplicates(ListNode(5)).val)
{ "repo_name": "jinlibao/LeetCode-Solutions", "path": "solutions/083_Remove_Duplicates_from_Sorted_List.py", "copies": "2", "size": "1796", "license": "mit", "hash": -804682146169030900, "line_mean": 23.9444444444, "line_max": 57, "alpha_frac": 0.4838530067, "autogenerated": false, "ratio": 3.4671814671814674, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9942675420383524, "avg_score": 0.0016718106995884771, "num_lines": 72 }
# 840. Magic Squares In Grid # Difficulty: Easy # A 3 x 3 magic square is a 3 x 3 grid filled with distinct numbers from 1 to # 9 such that each row, column, and both diagonals all have the same sum. # Given an grid of integers, how many 3 x 3 "magic square" subgrids are # there? (Each subgrid is contiguous). # # Example 1: # # Input: [[4,3,8,4], # [9,5,1,9], # [2,7,6,2]] # Output: 1 # Explanation: # The following subgrid is a 3 x 3 magic square: # 438 # 951 # 276 # # while this one is not: # 384 # 519 # 762 # # In total, there is only one magic square inside the given grid. # Note: # # 1 <= grid.length <= 10 # 1 <= grid[0].length <= 10 # 0 <= grid[i][j] <= 15 class Solution: def is_magic_square(self, grid, curr_x_i, curr_x_j, curr_y_i, curr_y_j): row_sum = dict.fromkeys(range(curr_x_i, curr_y_i + 1), 0) col_sum = dict.fromkeys(range(curr_x_j, curr_y_j + 1), 0) l_diag_sum = 0 r_diag_sum = 0 for i in range(curr_x_i, curr_y_i + 1): for j in range(curr_x_j, curr_y_j + 1): if grid[i][j] < 1 or grid[i][j] > 9: return False row_sum[i] += grid[i][j] col_sum[j] += grid[i][j] if i - curr_x_i == j - curr_x_j: l_diag_sum += grid[i][j] if i - curr_x_i == 2 - j + curr_x_j: r_diag_sum += grid[i][j] # print(row_sum) # print(col_sum) # print(l_diag_sum) # print(r_diag_sum) r_set = set(row_sum.values()) c_set = set(col_sum.values()) return len(r_set) == 1 and len(c_set) == 1 and ( l_diag_sum == r_diag_sum == r_set.pop() == c_set.pop()) def numMagicSquaresInside(self, grid): """ :type grid: List[List[int]] :rtype: int """ result = 0 curr_x_i = 0 curr_x_j = 0 curr_y_i = 2 curr_y_j = 2 if curr_y_i >= len(grid) or curr_y_j >= len(grid[0]): return result while True: if self.is_magic_square(grid, curr_x_i, curr_x_j, curr_y_i, curr_y_j): result += 1 if curr_y_i == len(grid) - 1 and curr_y_j == len(grid[0]) - 1: return result elif curr_y_j == len(grid[0]) - 1: curr_x_i += 1 curr_x_j = 0 curr_y_i += 1 curr_y_j = 2 else: curr_x_j += 1 curr_y_j += 1 class RefSolution: def numMagicSquaresInside(self, grid): """ :type grid: List[List[int]] :rtype: int """ def is_magic(sq): s = sum(sq[0]) return set(x for row in sq for x in row) == set( range(1, 10)) and all(sum(row) == s for row in sq) and all( sum(sq[i][j] for i in range(3)) == s for j in range(3)) and sum(sq[i][i] for i in range(3)) == s and sum( sq[i][2 - i] for i in range(3)) == s count = 0 for i in range(len(grid) - 2): for j in range(len(grid[0]) - 2): mgrid = [[grid[k][m] for m in range(j, j + 3)] for k in range(i, i + 3)] if is_magic(mgrid): count += 1 return count if __name__ == '__main__': # grid = [[4, 3, 8, 4], [9, 5, 1, 9], [2, 7, 6, 2]] # grid = [[10, 3, 5], # [1, 6, 11], # [7, 9, 2]] grid = [[3, 2, 9, 2, 7], [6, 1, 8, 4, 2], [7, 5, 3, 2, 7], [2, 9, 4, 9, 6], [4, 3, 8, 2, 5]] sol = Solution() print(sol.numMagicSquaresInside(grid)) ref_sol = RefSolution() print(ref_sol.numMagicSquaresInside(grid))
{ "repo_name": "kingdaa/LC-python", "path": "lc/840_Magic_Squares_In_Grid.py", "copies": "1", "size": "3843", "license": "mit", "hash": -5501377790172970000, "line_mean": 29.0234375, "line_max": 77, "alpha_frac": 0.4496487119, "autogenerated": false, "ratio": 2.9538816295157573, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8903530341415757, "avg_score": 0, "num_lines": 128 }
# 841. Keys and Rooms # Difficulty: Medium # There are N rooms and you start in room 0. Each room has a distinct number # in 0, 1, 2, ..., N-1, and each room may have some keys to access the next # room. # # Formally, each room i has a list of keys rooms[i], and each key rooms[i][j] # is an integer in [0, 1, ..., N-1] where N = rooms.length. A key rooms[i][ # j] = v opens the room with number v. # # Initially, all the rooms start locked (except for room 0). # # You can walk back and forth between rooms freely. # # Return true if and only if you can enter every room. # # Example 1: # # Input: [[1],[2],[3],[]] # Output: true # Explanation: # We start in room 0, and pick up key 1. # We then go to room 1, and pick up key 2. # We then go to room 2, and pick up key 3. # We then go to room 3. Since we were able to go to every room, we return true. # Example 2: # # Input: [[1,3],[3,0,1],[2],[0]] # Output: false # Explanation: We can't enter the room with number 2. # Note: # # 1 <= rooms.length <= 1000 # 0 <= rooms[i].length <= 1000 # The number of keys in all rooms combined is at most 3000. class Solution: def canVisitAllRooms(self, rooms): """ :type rooms: List[List[int]] :rtype: bool """ if rooms is None: return 0 visited = set([0]) to_visit = set(rooms[0]) while len(to_visit) > 0: room = to_visit.pop() if room not in visited: visited.add(room) to_visit.update(rooms[room]) return len(visited) == len(rooms) class RefSolution(object): def dfs(self, curr_room, rooms, visited): visited.add(curr_room) for next_room in rooms[curr_room]: if next_room not in visited: self.dfs(next_room, rooms, visited) def canVisitAllRooms(self, rooms): """ :type rooms: List[List[int]] :rtype: bool """ N = len(rooms) visited = set() self.dfs(0, rooms, visited) return len(visited) == N if __name__ == '__main__': rooms1 = [[1], [2], [3], []] rooms2 = [[1, 3], [3, 0, 1], [2], [0]] sol = Solution() print(sol.canVisitAllRooms(rooms1)) print(sol.canVisitAllRooms(rooms2))
{ "repo_name": "kingdaa/LC-python", "path": "lc/841_Keys_and_Rooms.py", "copies": "1", "size": "2263", "license": "mit", "hash": -6255693191801790000, "line_mean": 26.9382716049, "line_max": 80, "alpha_frac": 0.5771100309, "autogenerated": false, "ratio": 3.1213793103448277, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.41984893412448276, "avg_score": null, "num_lines": null }
# 842. Split Array into Fibonacci Sequence # Difficulty: Medium # Given a string S of digits, such as S = "123456579", we can split it into a # Fibonacci-like sequence [123, 456, 579]. # # Formally, a Fibonacci-like sequence is a list F of non-negative integers # such that: # # 0 <= F[i] <= 2^31 - 1, (that is, each integer fits a 32-bit signed integer # type); # F.length >= 3; # and F[i] + F[i+1] = F[i+2] for all 0 <= i < F.length - 2. # Also, note that when splitting the string into pieces, each piece must not # have extra leading zeroes, except if the piece is the number 0 itself. # # Return any Fibonacci-like sequence split from S, or return [] if it cannot # be done. # # Example 1: # # Input: "123456579" # Output: [123,456,579] # Example 2: # # Input: "11235813" # Output: [1,1,2,3,5,8,13] # Example 3: # # Input: "112358130" # Output: [] # Explanation: The task is impossible. # Example 4: # # Input: "0123" # Output: [] # Explanation: Leading zeroes are not allowed, so "01", "2", "3" is not valid. # Example 5: # # Input: "1101111" # Output: [110, 1, 111] # Explanation: The output [11, 0, 11, 11] would also be accepted. # Note: # # 1 <= S.length <= 200 # S contains only digits. class Solution: def splitIntoFibonacci(self, S): """ :type S: str :rtype: List[int] """ INT_MAX = 2 ** 31 - 1 def dfs(S, index, path): if index == len(S) and len(path) >= 3: return True for i in range(index, len(S)): if S[index] == "0" and i > index: break num = int(S[index:i + 1]) if num > INT_MAX: break l = len(path) if l >= 2 and num > path[l - 1] + path[l - 2]: break if len(path) < 2 or ( num == path[l - 1] + path[l - 2]): path.append(num) if dfs(S, i + 1, path): return True path.pop() return False res = [] dfs(S, 0, res) return res if __name__ == '__main__': s1 = "123456579" s2 = "11235813" s3 = "112358130" s4 = "0123" s5 = "1101111" sol = Solution() print(sol.splitIntoFibonacci(s1)) print(sol.splitIntoFibonacci(s2)) print(sol.splitIntoFibonacci(s3)) print(sol.splitIntoFibonacci(s4)) print(sol.splitIntoFibonacci(s5))
{ "repo_name": "kingdaa/LC-python", "path": "lc/842_Split_Array_into_Fibonacci_Sequence.py", "copies": "1", "size": "2475", "license": "mit", "hash": 6509765307642917000, "line_mean": 25.902173913, "line_max": 78, "alpha_frac": 0.5329292929, "autogenerated": false, "ratio": 3.0517879161528976, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9082042950943857, "avg_score": 0.0005348516218081435, "num_lines": 92 }
# 845. Longest Mountain in Array # Difficulty: Medium # Let's call any (contiguous) subarray B (of A) a mountain if the following # properties hold: # # B.length >= 3 # There exists some 0 < i < B.length - 1 such that B[0] < B[1] < ... B[i-1] < # B[i] > B[i+1] > ... > B[B.length - 1] # (Note that B could be any subarray of A, including the entire array A.) # # Given an array A of integers, return the length of the longest mountain. # # Return 0 if there is no mountain. # # # # Example 1: # # Input: [2,1,4,7,3,2,5] # Output: 5 # Explanation: The largest mountain is [1,4,7,3,2] which has length 5. # Example 2: # # Input: [2,2,2] # Output: 0 # Explanation: There is no mountain. # # # Note: # # 0 <= A.length <= 10000 # 0 <= A[i] <= 10000 class Solution: def longestMountain(self, A): """ :type A: List[int] :rtype: int """ if A is None or len(A) < 3: return 0 up = True max_len = 0 i = j = k = 0 while j < len(A): if up: while j + 1 < len(A) and A[j] < A[j + 1]: j = j + 1 if j > i: up = False k = j else: i = j = k = j + 1 if not up: while k + 1 < len(A) and A[k] > A[k + 1]: k = k + 1 if k > j: max_len = max(max_len, k - i + 1) i = j = k else: i = j = k = k + 1 up = True return max_len def longestMountain2(self, A): up = down = res = 0 for i in range(1, len(A)): if (down and A[i] > A[i - 1]) or A[i] == A[i - 1]: up = down = 0 if A[i] > A[i - 1]: up += 1 if A[i] < A[i - 1]: down += 1 if up and down: res = max(res, up + down + 1) return res if __name__ == '__main__': sol = Solution() A1 = [2, 1, 4, 7, 3, 2, 5] A2 = [2, 2, 2] A3 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] print(sol.longestMountain(A1)) print(sol.longestMountain(A2)) print(sol.longestMountain(A3))
{ "repo_name": "kingdaa/LC-python", "path": "lc/845_Longest_Mountain_in_Array.py", "copies": "1", "size": "2235", "license": "mit", "hash": -7922733793334629000, "line_mean": 24.6896551724, "line_max": 77, "alpha_frac": 0.4286353468, "autogenerated": false, "ratio": 2.98, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.89086353468, "avg_score": 0, "num_lines": 87 }
# 846. Hand of Straights # Difficulty:Medium # Alice has a hand of cards, given as an array of integers. # # Now she wants to rearrange the cards into groups so that each group is size # W, and consists of W consecutive cards. # # Return true if and only if she can. # # # # Example 1: # # Input: hand = [1,2,3,6,2,3,4,7,8], W = 3 # Output: true # Explanation: Alice's hand can be rearranged as [1,2,3],[2,3,4],[6,7,8]. # Example 2: # # Input: hand = [1,2,3,4,5], W = 4 # Output: false # Explanation: Alice's hand can't be rearranged into groups of 4. # # # Note: # # 1 <= hand.length <= 10000 # 0 <= hand[i] <= 10^9 # 1 <= W <= hand.length import collections class Solution: def isNStraightHand(self, hand, W): """ :type hand: List[int] :type W: int :rtype: bool """ counter = collections.Counter(hand) deque = collections.deque() last_checked, opened = -1, 0 for i in sorted(counter): if opened > counter[i] or opened > 0 and i > last_checked + 1: return False deque.append(counter[i] - opened) opened, last_checked = counter[i], i if len(deque) == W: opened -= deque.popleft() return opened == 0 if __name__ == '__main__': sol = Solution() # print(sol.isNStraightHand([1, 2, 3, 6, 2, 3, 4, 7, 8], 3)) # print(sol.isNStraightHand([1, 2, 3, 4, 5], 4)) # print(sol.isNStraightHand([1, 1, 2, 2, 3, 3], 3)) # print(sol.isNStraightHand([1,2,3,2,3,4], 3)) print(sol.isNStraightHand([1, 2, 3], 3))
{ "repo_name": "kingdaa/LC-python", "path": "lc/846_Hand_of_Straights.py", "copies": "1", "size": "1587", "license": "mit", "hash": -1677375679698364000, "line_mean": 25.8983050847, "line_max": 77, "alpha_frac": 0.5671077505, "autogenerated": false, "ratio": 2.88021778584392, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.894732553634392, "avg_score": 0, "num_lines": 59 }
# 849. Basic Calculator III class Solution: """ @param s: the expression string @return: the answer """ def calculate(self, s): tokens = [] x = 0 while True: while x < len(s) and s[x] == " ": x += 1 if x == len(s): break if s[x] in ["+", "-", "*", "/", "(", ")"]: tokens.append(s[x]) x += 1 elif s[x].isdigit(): y = x + 1 while y < len(s) and s[y].isdigit(): y += 1 tokens.append(s[x:y]) x = y # print(tokens) stack = [] postfix = [] for token in tokens: if token.isdigit(): postfix.append(token) elif token == "(": stack.append(token) elif token == ")": while stack[-1] != "(": postfix.append(stack.pop()) stack.pop() # Pops "(" elif token in ["*", "/"]: while stack and (stack[-1] in ["*", "/"]): postfix.append(stack.pop()) stack.append(token) elif token in ["+", "-"]: while stack and (stack[-1] in ["+", "-", "*", "/"]): postfix.append(stack.pop()) stack.append(token) while stack: postfix.append(stack.pop()) # print(postfix) stack = [] for token in postfix: if token.isdigit(): stack.append(int(token)) else: b = stack.pop() a = stack.pop() if token == "+": c = a + b elif token == "-": c = a - b elif token == "*": c = a * b elif token == "/": c = a // b stack.append(c) return stack[-1]
{ "repo_name": "digiter/Arena", "path": "lintcode/849.py", "copies": "1", "size": "2008", "license": "mit", "hash": 8040945692250128000, "line_mean": 27.6857142857, "line_max": 68, "alpha_frac": 0.3451195219, "autogenerated": false, "ratio": 4.532731376975169, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 70 }
"""84. Largest Rectangle in Histogram https://leetcode.com/problems/largest-rectangle-in-histogram/ Given n non-negative integers representing the histogram's bar height where the width of each bar is 1, find the area of largest rectangle in the histogram. _ |^ _| | | | | | | | | | _ | _ | | |_| | | | |_| | | | | |___|_|_|_|_|_|_|____> Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3]. The largest rectangle is shown in the shaded area, which has area = 10 unit. Example: Input: [2,1,5,6,2,3] Output: 10 """ from typing import List class Solution: def largest_rectangle_area_1(self, heights: List[int]) -> int: # time complexity: O(N) max_area = 0 stack = [] i, length = 0, len(heights) while i < length: if not stack or (heights[stack[-1]] <= heights[i]): stack.append(i) i += 1 else: top = stack.pop() cur_area = heights[top] * (i - stack[-1] - 1 if stack else i) max_area = max(max_area, cur_area) while stack: top = stack.pop() cur_area = heights[top] * (i - stack[-1] - 1 if stack else i) max_area = max(max_area, cur_area) return max_area def largest_rectangle_area_2(self, heights: List[int]) -> int: # time complexity: O(N) if not heights: return 0 length = len(heights) left_border_idx = [0] * length right_border_idx = [0] * length for i in range(length): p = i - 1 while p >= 0 and heights[p] >= heights[i]: p = left_border_idx[p] left_border_idx[i] = p for i in range(length - 1, -1, -1): p = i + 1 while p < length and heights[p] >= heights[i]: p = right_border_idx[p] right_border_idx[i] = p max_area = 0 for i in range(length): max_area = max(max_area, heights[i] * (right_border_idx[i] - left_border_idx[i] - 1)) return max_area def brute_force(self, heights: List[int]) -> int: # time complexity: O(N^2) length = len(heights) max_area = 0 for i in range(length): left = right = i while left - 1 >= 0 and heights[left - 1] >= heights[i]: left -= 1 while right + 1 < length and heights[right + 1] >= heights[i]: right += 1 max_area = max(max_area, (right - left + 1) * heights[i]) return max_area
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/largest_rectangle_in_histogram.py", "copies": "1", "size": "2700", "license": "mit", "hash": 601026996907734500, "line_mean": 29, "line_max": 77, "alpha_frac": 0.492962963, "autogenerated": false, "ratio": 3.4005037783375314, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4393466741337531, "avg_score": null, "num_lines": null }
#8.4 Open the file romeo.txt and read it line by line. For each line, split the line into a #list of words using the split() function. #The program should build a list of words. #For each word on each line check to see if the word is already in the list and if not append it to the list. # When the program completes, sort and print the resulting words in alphabetical order. #You can download the sample data at http://www.pythonlearn.com/code/romeo.txt fname = raw_input("Enter file name: ") fhand = None try: fhand = open(fname) except: print 'File cannot be opened.', fname exit() #list constructor lst = [] read_file = fhand.read().strip() while ( False ): listwords = read_file.split() listwords in lst lst.append(listwords) print lst #for line in read_file: # listwords = read_file.split() # # if listwords in lst == False: # lst.append(listwords) #for line in fh # sline = split() # lst.append(sline) #lst.append(b_words) #while # lst.append() #for line in fh: #print line.rstrip()
{ "repo_name": "missulmer/Pythonstudy", "path": "coursera_python_specialization/8_4.py", "copies": "1", "size": "1047", "license": "cc0-1.0", "hash": -2358535765805576700, "line_mean": 19.96, "line_max": 109, "alpha_frac": 0.6867239733, "autogenerated": false, "ratio": 3.2215384615384615, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9307764376385845, "avg_score": 0.020099611690523295, "num_lines": 50 }
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import serial import bk8500functions import time ser = serial.Serial() ser.baudrate = 9600 ser.port = 'COM4' print(ser) ser.open() print(ser.is_open) #enable remote mode cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x20 cmd[3]=1 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd, ser) #Set voltage limit cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x22 cmd[3]=0x66 cmd[4]=0x3f cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #Set current limit cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x24 cmd[3]=0xE0 cmd[4]=0x79 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #set power limit cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x26 cmd[3]=0xCA cmd[4]=0x41 cmd[5]=0x03 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #Read the model,serial,FW ver cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x6A cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #set CR mode cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x28 cmd[3]=0x3 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #set CR value cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x30 cmd[3]=0xFF cmd[4]=0xFF cmd[5]=0x01 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #setup transient stuff cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x38 cmd[3]=0xFF cmd[4]=0xFF cmd[5]=0x05 cmd[6]=0x00 cmd[7]=0x10 cmd[8]=0x1F cmd[9]=0xFF cmd[10]=0xFF cmd[11]=0x01 cmd[12]=0x00 cmd[13]=0x10 cmd[14]=0x1F cmd[15]=0x01 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #read the CR transient parameters back cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x39 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #set trigger cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x58 cmd[3]=0x02 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #Set mode to TRAN cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x5D cmd[3]=0x02 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #read input cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x5F cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #enable input cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x21 cmd[3]=0x01 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #delay so I can see it happen! time.sleep(4) #read input cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x5F cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #trigger transient cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x5A cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #delay so I can see it happen! time.sleep(4) #disable input cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x21 cmd[3]=0x00 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) #read trigger source cmd=[0]*26 cmd[0]=0xAA cmd[2]=0x59 cmd[3]=0x00 cmd[25]=bk8500functions.csum(cmd) bk8500functions.cmd8500(cmd,ser) ser.close()
{ "repo_name": "BKPrecisionCorp/BK-8500-Electronic-Load", "path": "python/8500CR_Transient_BusTrig.py", "copies": "1", "size": "3306", "license": "apache-2.0", "hash": 4228848545652204500, "line_mean": 18.3333333333, "line_max": 76, "alpha_frac": 0.7398669087, "autogenerated": false, "ratio": 2.2863070539419086, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8262701449711998, "avg_score": 0.052694502585982336, "num_lines": 171 }
# 854. K-Similar Strings # Difficulty:Hard # Strings A and B are K-similar (for some non-negative integer K) if we can # swap the positions of two letters in A exactly K times so that the # resulting string equals B. # # Given two anagrams A and B, return the smallest K for which A and B are # K-similar. # # Example 1: # # Input: A = "ab", B = "ba" # Output: 1 # Example 2: # # Input: A = "abc", B = "bca" # Output: 2 # Example 3: # # Input: A = "abac", B = "baca" # Output: 2 # Example 4: # # Input: A = "aabc", B = "abca" # Output: 2 # Note: # # 1 <= A.length == B.length <= 20 # A and B contain only lowercase letters from the set {'a', 'b', 'c', 'd', # 'e', 'f'} from collections import deque class Solution: def kSimilarity(self, A, B): """ :type A: str :type B: str :rtype: int """ def swap(S, i, j): chars = list(S) tmp = chars[i] chars[i] = chars[j] chars[j] = tmp return ''.join(chars) if A == B: return 0 if len(A) != len(B) or set(A) != set(B): return -1 res, n, visited, queue = 0, len(A), {A}, deque([A]) while len(queue) > 0: k = len(queue) res += 1 for i in range(k): s = queue.popleft() i = 0 while i < n and s[i] == B[i]: i += 1 for j in range(i + 1, n): if s[j] == B[j] or s[i] != B[j]: continue ns = swap(s, i, j) if ns == B: return res if ns not in visited: queue.append(ns) return res
{ "repo_name": "kingdaa/LC-python", "path": "lc/854_K-Similar_Strings.py", "copies": "1", "size": "1762", "license": "mit", "hash": 5316921240372352000, "line_mean": 23.4722222222, "line_max": 75, "alpha_frac": 0.4472190692, "autogenerated": false, "ratio": 3.3058161350844277, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.42530352042844277, "avg_score": null, "num_lines": null }
# 859. Buddy Strings # Difficulty:Easy # Given two strings A and B of lowercase letters, return true if and only if # we can swap two letters in A so that the result equals B. # # Example 1: # # Input: A = "ab", B = "ba" # Output: true # Example 2: # # Input: A = "ab", B = "ab" # Output: false # Example 3: # # Input: A = "aa", B = "aa" # Output: true # Example 4: # # Input: A = "aaaaaaabc", B = "aaaaaaacb" # Output: true # Example 5: # # Input: A = "", B = "aa" # Output: false # # Note: # # 0 <= A.length <= 20000 # 0 <= B.length <= 20000 # A and B consist only of lowercase letters. class Solution: def buddyStrings(self, A, B): """ :type A: str :type B: str :rtype: bool """ if len(A) != len(B): return False if A == B and len(set(A)) != len(A): return True diff = 0 count = 0 for i in range(len(A)): if A[i] != B[i]: diff ^= ord(A[i]) ^ ord(B[i]) count += 1 return diff == 0 and count == 2
{ "repo_name": "kingdaa/LC-python", "path": "lc/859_Buddy_Strings.py", "copies": "1", "size": "1060", "license": "mit", "hash": -5872625245243433000, "line_mean": 19.7843137255, "line_max": 76, "alpha_frac": 0.5047169811, "autogenerated": false, "ratio": 3.0113636363636362, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9016080617463637, "avg_score": 0, "num_lines": 51 }
# 85% accuracy import numpy as np import glob import os import re import h5py import scipy.misc np.random.seed(777) from keras.models import Sequential from keras.layers import Dense, Dropout, Activation, Flatten, Conv2D, pooling, BatchNormalization from keras.utils import np_utils from sklearn.model_selection import StratifiedKFold from sklearn.cross_validation import train_test_split from sklearn.model_selection import LeavePOut path = './Desktop/COVER_SONG/chroma_data_training/CP_1000ms_training_s2113_d2113_170106223452.h5' f1 = h5py.File(path) datasetNames=[n for n in f1.keys()] FX = f1['X'] idxDis_train = f1['idxDis_train'] idxDis_validate = f1['idxDis_validate'] idxSim_train = f1['idxSim_train'] idxSim_validate = f1['idxSim_validate'] X=[] Y=[] for i in range(np.shape(idxSim_train)[0]): a=[idxSim_train[i][0], idxSim_train[i][1]] X.append(scipy.misc.imread('./Desktop/KAKAO_ALL_PAIR_TRAIN/'+'{:0=4}'.format((int)(min(a)))+'_'+'{:0=4}'.format((int)(max(a)))+'_S.jpg')) Y.append(1) for i in range(np.shape(idxDis_train)[0]): a=[idxDis_train[i][0], idxDis_train[i][1]] X.append(scipy.misc.imread('./Desktop/KAKAO_ALL_PAIR_TRAIN/'+'{:0=4}'.format((int)(min(a)))+'_'+'{:0=4}'.format((int)(max(a)))+'_D.jpg')) Y.append(0) X = np.asarray(X) X = X.reshape(X.shape[0], 180, 180, 1) X = X.astype('float32') X/=np.max(X) Y=np_utils.to_categorical(Y,2) # X <= N x 180 x 180 x 1 data (cover pair N1, non-cover pair N2, N1+N2 = N) # Y <= N x (1 or 0) data as one-hot (cover pair N1, non-cover pair N2, N1+N2 = N) PP=np.zeros(4226) YY=np.zeros(4226) YY[0:2112]=1 seed=7 kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=seed) iter=0 score_result = [] for train, test in kfold.split(PP, YY): model = Sequential() model.add(Conv2D(filters=32, kernel_size=(5,5), strides=1, padding='valid', activation='relu', input_shape=(180,180,1))) model.add(BatchNormalization()) model.add(Conv2D(32,kernel_size=(3,3), strides=1, padding='valid', activation='relu')) model.add(pooling.MaxPooling2D(pool_size=(2,2))) model.add(BatchNormalization()) model.add(Conv2D(32,kernel_size=(3,3), strides=1, padding='valid', activation='relu')) model.add(Conv2D(16,kernel_size=(3,3), strides=1, padding='valid', activation='relu')) model.add(pooling.MaxPooling2D(pool_size=(2,2))) model.add(BatchNormalization()) model.add(Conv2D(32,kernel_size=(3,3), strides=1, padding='valid', activation='relu')) model.add(Conv2D(16,kernel_size=(3,3), strides=1, padding='valid', activation='relu')) model.add(pooling.MaxPooling2D(pool_size=(2,2))) model.add(BatchNormalization()) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(256,activation='relu')) model.add(BatchNormalization()) model.add(Dropout(0.5)) model.add(Dense(2, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) model.fit(X[train], Y[train], batch_size=16, nb_epoch=100, verbose=1) score = model.evaluate(X[test], Y[test], verbose=0) print(model.metrics_names) print(score) score_result.append(score) np.savetxt('./Desktop/result2.txt',score_result)
{ "repo_name": "thkim107/sim", "path": "CNN_cover_song_NEW.py", "copies": "1", "size": "3128", "license": "mit", "hash": 1836655132479682000, "line_mean": 30.9183673469, "line_max": 138, "alpha_frac": 0.7036445013, "autogenerated": false, "ratio": 2.6088407005838197, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.38124852018838196, "avg_score": null, "num_lines": null }
# 85. Maximal Rectangle class Solution: def maximalRectangle(self, matrix: List[List[str]]) -> int: Elem = collections.namedtuple("Elem", ["position", "height"]) if (not matrix) or (not matrix[0]): return 0 heights = [0] * len(matrix[0]) ans = 0 for row in matrix: n = len(row) heights = [0 if row[i] == "0" else heights[i] + 1 for i in range(n)] stack = [] left = [0] * n for x in range(n): while stack and stack[-1].height >= heights[x]: stack.pop() left[x] = stack[-1].position if stack else -1 stack.append(Elem(position=x, height=heights[x])) stack = [] right = [0] * n for x in range(n - 1, -1, -1): while stack and stack[-1].height >= heights[x]: stack.pop() right[x] = stack[-1].position if stack else n stack.append(Elem(position=x, height=heights[x])) for x in range(n): width = right[x] - (left[x] + 1) ans = max(ans, heights[x] * width) return ans
{ "repo_name": "digiter/Arena", "path": "85-maximal-rectangle.py", "copies": "1", "size": "1205", "license": "mit", "hash": 6435183265621438000, "line_mean": 34.4411764706, "line_max": 80, "alpha_frac": 0.465560166, "autogenerated": false, "ratio": 3.7538940809968846, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9717638705965658, "avg_score": 0.00036310820624546115, "num_lines": 34 }
"""85. Maximal Rectangle https://leetcode.com/problems/maximal-rectangle/ Given a 2D binary matrix filled with 0's and 1's, find the largest rectangle containing only 1's and return its area. Example: Input: [ ⁠ ["1","0","1","0","0"], ⁠ ["1","0","1","1","1"], ⁠ ["1","1","1","1","1"], ⁠ ["1","0","0","1","0"] ] Output: 6 """ from typing import List class Solution: def maximal_rectangle(self, matrix: List[List[str]]) -> int: """ Time complexity O(N^2) :param matrix: :return: """ if not matrix or not matrix[0]: return 0 rows, cols = len(matrix), len(matrix[0]) heights = [0] * (cols + 1) max_area = 0 for row in range(rows): stack = [] for col in range(cols + 1): if col < cols and matrix[row][col] == "1": heights[col] += 1 else: heights[col] = 0 while stack and heights[stack[-1]] >= heights[col]: height = heights[stack.pop()] width = col - stack[-1] - 1 if stack else col cur_area = height * width max_area = max(max_area, cur_area) stack.append(col) return max_area
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/maximal_rectangle.py", "copies": "1", "size": "1300", "license": "mit", "hash": 6248749042626007000, "line_mean": 25.9166666667, "line_max": 76, "alpha_frac": 0.4821981424, "autogenerated": false, "ratio": 3.530054644808743, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9512252787208744, "avg_score": 0, "num_lines": 48 }
# 8.5 # Liu Li # 18 Nov, 2015 ''' Open the file mbox-short.txt and read it line by line. When you find a line that starts with 'From ' like the following line: From stephen.marquard@uct.ac.za Sat Jan 5 09:14:16 2008 You will parse the From line using split() and print out the second word in the line (i.e. the entire address of the person who sent the message). Then print out a count at the end. Hint: make sure not to include the lines that start with 'From:'. You can download the sample data at http://www.pythonlearn.com/code/mbox-short.txt ''' # Use the file name mbox-short.txt as the file name # fname = raw_input("Enter file name: ") #if len(fname) < 1 : fname = "mbox-short.txt" import re fname = "mbox-short.txt" with open(fname) as fh: count = 0 for line in fh: if re.search("From ", line): email = re.findall('[^ ]+@[^ ]+', line)[0] print email count += 1 print "There were", count, "lines in the file with From as the first word" ######################################################################### # Desired Output # # zqian@umich.edu # rjlowe@iupui.edu # cwen@iupui.edu # cwen@iupui.edu # gsilver@umich.edu # gsilver@umich.edu # zqian@umich.edu # gsilver@umich.edu # wagnermr@iupui.edu # zqian@umich.edu # antranig@caret.cam.ac.uk # gopal.ramasammycook@gmail.com # david.horwitz@uct.ac.za # david.horwitz@uct.ac.za # david.horwitz@uct.ac.za # david.horwitz@uct.ac.za # stephen.marquard@uct.ac.za # louis@media.berkeley.edu # louis@media.berkeley.edu # ray@media.berkeley.edu # cwen@iupui.edu # cwen@iupui.edu # cwen@iupui.edu # There were 27 lines in the file with From as the first word
{ "repo_name": "ll0816/PythonForEverybody", "path": "Python-Data-Structure/Ass8.5.py", "copies": "1", "size": "1668", "license": "mit", "hash": 7285127201538093000, "line_mean": 29.3272727273, "line_max": 181, "alpha_frac": 0.6636690647, "autogenerated": false, "ratio": 2.7986577181208054, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8958034206762907, "avg_score": 0.0008585152115796433, "num_lines": 55 }
"""8.5 Open the file mbox-short.txt and read it line by line. When you find a line that starts with 'From ' like the following line: From stephen.marquard@uct.ac.za Sat Jan 5 09:14:16 2008 You will parse the From line using split() and print out the second word in the line (i.e. the entire address of the person who sent the message). Then print out a count at the end. Hint: make sure not to include the lines that start with 'From:'. You can download the sample data at http://www.pythonlearn.com/code/mbox-short.txt""" fname = raw_input("Enter file name: ") fh = None try: fh = open(fname) except: print 'File cannot be opened.', fname exit() lst = [] #list constructor count = 0 for line in fh: if line.startswith('From:'): line = line.rstrip() words = line.split() count = count + 1 for word in words: if '@' in word: lst.append(word) for element in lst: print element print "There were %d lines in the file with From as the first word" % count
{ "repo_name": "missulmer/Pythonstudy", "path": "coursera_python_specialization/8_5.py", "copies": "1", "size": "1043", "license": "cc0-1.0", "hash": 5516316851001657000, "line_mean": 26.4473684211, "line_max": 132, "alpha_frac": 0.6615532119, "autogenerated": false, "ratio": 3.5719178082191783, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4733471020119178, "avg_score": null, "num_lines": null }
# 86 fps 10/27/2012 #106 fps 3/1/2013 # 39 fps 10/3/2017 - no psyco support in Python3 from toast.scene_graph import Component, Scene from toast.sprite import Sprite from toast.animation import Animation from toast.image_sheet import ImageSheet from toast.resource_loader import ResourceLoader from toast.emitter import Emitter from toast.math.vector import Vector2D from toast.gradient import Gradient from toast.timer import Timer import random import pygame from examples.demo_game import DemoGame class EndGameAfter(Component): def __init__(self, milliseconds=0): super(EndGameAfter, self).__init__() self.__life_timer = Timer(milliseconds) def update(self, milliseconds=0): super(EndGameAfter, self).update(milliseconds) if self.__life_timer.is_time_up(): pygame.event.post(pygame.event.Event(pygame.locals.QUIT)) class Particle(Sprite): def __init__(self, image, lifetime): super(Particle, self).__init__(image) self.lifetime = Timer(int(lifetime)) self.__velocity = Vector2D.from_angle(random.randrange(80.0, 100.0)) * -1.65 sheet = ImageSheet(ResourceLoader.load('data//puffs.png'), (32, 32)) puff = [(sheet[0], int(lifetime * 0.1)), (sheet[1], int(lifetime * 0.15)), (sheet[2], int(lifetime * 0.3)), (sheet[3], int(lifetime * 2.0))] self.animation = Animation('puff', puff) self.add(self.animation) def update(self, milliseconds=0): super(Particle, self).update(milliseconds) self.position += self.__velocity * (milliseconds / 1000.0) * 60 if self.lifetime.is_time_up(): self.lifetime.reset() self.remove() class EmitterPerformanceTest(Scene): def __init__(self): super(EmitterPerformanceTest, self).__init__() bg = Gradient.createVerticalGradient((20, 15), (255,255,255), (228, 139, 165), (111,86,117)) bg = pygame.transform.scale(bg, (320, 240)) self.add(Sprite(bg, (160, 120))) num_emitters = 8 for i in range(num_emitters): e = Emitter(Particle, (ImageSheet(ResourceLoader.load('data//puffs.png'), (32, 32))[0], 1000), 40, self.onCreate) e.position = 40 + (i * (256 / (num_emitters - 1))), 216 self.add(e) self.add(EndGameAfter(1000 * 30)) def onCreate(self, emitter, particle): particle.position = Vector2D(emitter.position) particle.position += (random.random() - 0.5) * 2.0 * 8, (random.random() - 0.5) * 2.0 * 16 particle.animation.play('puff', 0) if (random.random() < 0.3): particle.lifetime = Timer(random.randint(1000, 1800)) game = DemoGame((640, 480), EmitterPerformanceTest) game.run()
{ "repo_name": "JoshuaSkelly/Toast", "path": "examples/emitter/emitter_performance_test.py", "copies": "2", "size": "2893", "license": "mit", "hash": -6150066314620411000, "line_mean": 35.1625, "line_max": 125, "alpha_frac": 0.6066367093, "autogenerated": false, "ratio": 3.571604938271605, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.020816270845843476, "num_lines": 80 }
# 882. Reachable Nodes In Subdivided Graph class Solution: def reachableNodes(self, edges: List[List[int]], maxMoves: int, n: int) -> int: graph = [[] for _ in range(n)] cnt = [[] for _ in range(n)] for e in edges: graph[e[0]].append(e[1]) graph[e[1]].append(e[0]) cnt[e[0]].append(e[2]) cnt[e[1]].append(e[2]) # Bellman–Ford to calculate the minimal distance for each node in the original graph. inf = 3000 * 10**4 * 10**4 + 5 dist = [inf] * n dist[0] = 0 que = collections.deque() que.append(0) while que: x = que.popleft() for i in range(len(graph[x])): y = graph[x][i] c = cnt[x][i] if dist[x] + c + 1 < dist[y]: dist[y] = dist[x] + c + 1 que.append(y) # Calculates how many nodes are reachable in the original graph. ans = 0 for x in range(n): if dist[x] <= maxMoves: ans += 1 # Calculates how many new nodes are reachable. for e in edges: x, y, c = e[0], e[1], e[2] extraX = max(0, maxMoves - dist[x]) extraY = max(0, maxMoves - dist[y]) ans += min(c, extraX + extraY) return ans
{ "repo_name": "digiter/Arena", "path": "882.py", "copies": "1", "size": "1427", "license": "mit", "hash": 3177672949798333000, "line_mean": 32.1395348837, "line_max": 93, "alpha_frac": 0.4435087719, "autogenerated": false, "ratio": 3.607594936708861, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4551103708608861, "avg_score": null, "num_lines": null }
"""8-bit string definitions for Python 2/3 compatibility Defines the following which allow for dealing with Python 3 breakages: STR_IS_BYTES STR_IS_UNICODE Easily checked booleans for type identities _NULL_8_BYTE An 8-bit byte with NULL (0) value as_8_bit( x, encoding='utf-8') Returns the value as the 8-bit version unicode -- always pointing to the unicode type bytes -- always pointing to the 8-bit bytes type """ import sys STR_IS_BYTES = True if sys.version_info[:2] < (2,6): # no bytes, traditional setup... bytes = str else: bytes = bytes if sys.version_info[:2] < (3,0): # traditional setup, with bytes defined... unicode = unicode _NULL_8_BYTE = '\000' def as_8_bit( x, encoding='utf-8' ): if isinstance( x, unicode ): return x.encode( encoding ) return bytes( x ) else: # new setup, str is now unicode... STR_IS_BYTES = False _NULL_8_BYTE = bytes( '\000','latin1' ) def as_8_bit( x, encoding='utf-8' ): if isinstance( x,unicode ): return x.encode(encoding) return str(x).encode( encoding ) unicode = str STR_IS_UNICODE = not STR_IS_BYTES
{ "repo_name": "frederica07/Dragon_Programming_Process", "path": "PyOpenGL-3.0.2/OpenGL/_bytes.py", "copies": "1", "size": "1239", "license": "bsd-2-clause", "hash": 2610254569464124000, "line_mean": 24.8125, "line_max": 70, "alpha_frac": 0.602905569, "autogenerated": false, "ratio": 3.54, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4642905569, "avg_score": null, "num_lines": null }
#8CH GPIO TOGGLE import RPi.GPIO as GPIO PIN_A = 3 PIN_B = 5 PIN_C = 7 PIN_D = 11 PIN_E = 13 PIN_F = 15 PIN_G = 19 PIN_H = 21 PIN_I = 23 CH_MAX = 9 class SlGpio : try: #while True : def __init__ (self) : print("init") self.CH_MAX = CH_MAX self.GPIO = GPIO self.GPIO.setmode(GPIO.BOARD) global PIN_A, PIN_B, PIN_C, PIN_D, PIN_E, PIN_F, PIN_G, PIN_H self.ch = [PIN_A, PIN_B, PIN_C, PIN_D, PIN_E, PIN_F, PIN_G, PIN_H] self.ch_val = [False, False, False, False, False, False, False, False] for var in range(CH_MAX): print("The pin {0} was set as OUTPUT".format(self.ch[var])) self.GPIO.setup(self.ch[var],GPIO.OUT) # self.GPIO.setmode(GPIO.BOARD) # self.GPIO.setup(self.pi,GPIO.OUT) def set (self, ch, val) : if ch >= CH_MAX or ch < 0: print("The CH is invalid") return False else: self.ch_val[ch]=val return True def update(self) : for var in range(CH_MAX): GPIO.output(self.ch[var],self.ch_val[var]) except KeyboardInterrupt : GPIO.cleanup()
{ "repo_name": "DiamondOhana/jphacks", "path": "python_main/sonilab/sl_gpio.py", "copies": "2", "size": "1266", "license": "mit", "hash": 7365615152756967000, "line_mean": 24.8367346939, "line_max": 82, "alpha_frac": 0.4960505529, "autogenerated": false, "ratio": 3.028708133971292, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4524758686871292, "avg_score": null, "num_lines": null }
#8CH GPIO TOGGLE # import RPi.GPIO as GPIO PIN_A = 3 PIN_B = 5 PIN_C = 7 PIN_D = 11 PIN_E = 13 PIN_F = 15 PIN_G = 19 PIN_H = 21 PIN_I = 23 CH_MAX = 9 class SlGpio : try: #while True : def __init__ (self) : print("init") self.CH_MAX = CH_MAX # self.GPIO = GPIO # self.GPIO.setmode(GPIO.BOARD) global PIN_A, PIN_B, PIN_C, PIN_D, PIN_E, PIN_F, PIN_G, PIN_H, PIN_I self.ch = [PIN_A, PIN_B, PIN_C, PIN_D, PIN_E, PIN_F, PIN_G, PIN_H, PIN_I] self.ch_val = [False, False, False, False, False, False, False, False, False] for var in range(CH_MAX): print("The pin {0} was set as OUTPUT".format(self.ch[var])) # self.GPIO.setup(self.ch[var],GPIO.OUT) # self.GPIO.setmode(GPIO.BOARD) # self.GPIO.setup(self.pi,GPIO.OUT) def set (self, ch, val) : if ch >= CH_MAX or ch < 0: print("The CH is invalid") return False else: self.ch_val[ch]=val return True def update(self) : for var in range(CH_MAX): if self.ch_val[var]: print "O", else: print"-", print " " # GPIO.output(self.ch[var],self.ch_val[var]) except KeyboardInterrupt : pass # GPIO.cleanup()
{ "repo_name": "DiamondOhana/jphacks", "path": "rpi_main/sonilab/sl_gpio2.py", "copies": "2", "size": "1455", "license": "mit", "hash": -6377689930922320000, "line_mean": 24.9821428571, "line_max": 89, "alpha_frac": 0.4666666667, "autogenerated": false, "ratio": 3.1357758620689653, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9543987104512321, "avg_score": 0.011691084851328868, "num_lines": 56 }
# -8- coding: utf-8 -*- import re from urlparse import urljoin import util _titleptn = re.compile(ur''' (?P<dat>\d+\.(?:dat|cgi)) # datファイル名 (?:(?:<>)|,) # 区切り文字(<>または,) (?P<title>.+) # タイトル \((?P<num>\d{1,4})\) # レス数 ''', re.VERBOSE | re.UNICODE) class Board(object): ''' 板を管理するクラス ''' def __init__(self, board_url=None, title=u''): self.threads = [] self.title = title if board_url: self.createBoard(board_url, title) def createBoard(self, board_file, title): ''' スレッド一覧ファイルからスレッドリストを作成するクラス subject.txtの1行からスレッド情報を取得 http://info.2ch.net/wiki/index.php?monazilla%2Fdevelop%2Fsubject.txt subject.txtの仕様 ================= 0000000000.dat<>スレッドタイトル (レス数) - threads[n]['title'] : スレッドタイトル - threads[n]['url'] : http://server/board/dat/0000000000.dat ''' data, path_tokens, info = util.boardload(board_file) if title: self.title = title for l in data: matched = _titleptn.search(l) if matched: r = matched.groupdict() subject_url = util.pathjoin(path_tokens) datfile = urljoin(subject_url, 'dat/' + r['dat']) thread = dict(title = r['title'], url = datfile) else: print 'no match --> ' + l self.threads.append(thread) class Category(object): ''' カテゴリを管理するクラス name : カテゴリ名 boards : カテゴリに所属する板のリストw ''' def __init__(self, name): self.name = name self.boards = [] def __getitem__(self, key): if isinstance(key, int): return self.boards[key] else: return self.__getattribute__(key)
{ "repo_name": "ymotongpoo/restroom", "path": "python/py2ch/py2ch/bbsboard.py", "copies": "1", "size": "2109", "license": "apache-2.0", "hash": 6866934287043119000, "line_mean": 24.5479452055, "line_max": 76, "alpha_frac": 0.4975871314, "autogenerated": false, "ratio": 2.8516819571865444, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.38492690885865444, "avg_score": null, "num_lines": null }
# *-8 coding: utf-8 -*- """ Parse requirements file, and work out whether there are any updates. """ import requests from semantic_version import Version def package_url(package): """Return fully-qualified URL to package on PyPI (JSON endpoint).""" return u"http://pypi.python.org/pypi/%s/json" % package def package_info(package_url): """Return latest package version from PyPI (as JSON).""" return requests.get(package_url).json().get('info') def package_version(package_info): """Return the latest version from package_version as semver Version.""" return Version(package_info.get('version')) class PackageVersion(object): """A specific version of a package.""" def __init__(self, name, version_string, **kwargs): self.name = name self.version_string = version_string self.uploaded_at = kwargs.pop('uploaded_at', None) def __unicode__(self): return u"Package: %s (%s)" % (self.name, self.version) def __str__(self): return unicode(self).encode('utf-8') @property def version(self): """Return a semantic_version.Version object.""" return Version(self.version_string, partial=True) def diff(self, package_to_compare): """Return string representing the diff between package versions. We're interested in whether this is a major, minor, patch or 'other' update. This method will compare the two versions and return None if they are the same, else it will return a string value indicating the type of diff - 'major', 'minor', 'patch', 'other'. """ version1 = self.version version2 = package_to_compare.version if version1 == version2: return None for v in ('major', 'minor', 'patch'): if getattr(version1, v) != getattr(version2, v): return v return 'other'
{ "repo_name": "yunojuno/pypi-alerts", "path": "pypi_alerts/__init__.py", "copies": "1", "size": "1911", "license": "mit", "hash": -7940523014197847000, "line_mean": 29.3333333333, "line_max": 76, "alpha_frac": 0.6352694924, "autogenerated": false, "ratio": 4.1274298056155505, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5262699298015551, "avg_score": null, "num_lines": null }
''' 8-compare_max_fluxes.py ============================== AIM: Plots the comparison of maximal fluxes. INPUT: files: - <orbit_id>_misc/error_evolution.dat variables: see section PARAMETERS (below) OUTPUT: in all_figures/ : comparison for every orbit_iditude CMD: python 8-compare_max_fluxes.py ISSUES: <none known> REQUIRES:- standard python libraries, specific libraries in resources/ - Structure of the root folder: * <orbit_id>_flux/ --> flux files * <orbit_id>_figures/ --> figures * <orbit_id>_misc/ --> storages of data * all_figures/ --> comparison figures REMARKS: <none> ''' ########################################################################### ### INCLUDES import numpy as np import pylab as plt from resources.routines import * from resources.TimeStepping import * import parameters as param import resources.figures as figures from matplotlib.ticker import MaxNLocator, MultipleLocator, FormatStrFormatter ########################################################################### ### PARAMETERS # Error threshold p = 0.1 # Flux limitation [ph/(px s)] rqmt_flux = 1 # Show plots and detailled analysis ? show = True # Fancy plots ? fancy = True ########################################################################### ### INITIALISATION # File name fot the computed orbit file error_file = 'error_evolution.dat' # Formatted folders definitions orbit_id = 1001 folder_flux, folder_figures, folder_misc = init_folders(orbit_id) folder_figures= 'all_figures/' if fancy: figures.set_fancy() ########################################################################### ### Load which orbits were computed data_800 = np.loadtxt('%d_%ddeg_misc/%s' % (orbit_id,error_file), delimiter=',') data_700 = np.loadtxt('%d_%ddeg_misc/%s' % (orbit_id,error_file), delimiter=',') data_620 = np.loadtxt('%d_%ddeg_misc/%s' % (orbit_id,error_file), delimiter=',') corrected_700 = np.zeros([np.shape(data_800)[0], 2]) corrected_620 = np.zeros([np.shape(data_800)[0], 2]) k = 0 for junk, o, junk, junk, sl, junk in data_800: if np.shape(data_700[data_700[:,0]==o,1])[0] > 0: corrected_700[k] = o, data_700[data_700[:,0]==o,4][0]/sl else: corrected_700[k] = o, data_700[data_700[:,0]<o,4][-1]/sl if np.shape(data_620[data_620[:,1]==o,1])[0] > 0: corrected_620[k] = o, data_620[data_620[:,0]==o,4][0]/sl else: corrected_620[k] = o, data_620[data_620[:,0]<o,4][-1]/sl k += 1 xx = data_800[:,1]/param.last_orbits[orbit_id]*365. xx = figures.convert_date(xx) fig=plt.figure() ax=plt.subplot(111) # zooms ax.yaxis.set_major_locator(MultipleLocator(0.5)) ax.yaxis.set_minor_locator(MultipleLocator(0.1)) #pax.yaxis.set_major_locator(MultipleLocator(1.)) #pax.yaxis.set_minor_locator(MultipleLocator(0.5)) #ax.xaxis.set_major_locator(MultipleLocator(20.)) ax.xaxis.grid(True,'minor') ax.yaxis.grid(True,'minor') ax.xaxis.grid(True,'major',linewidth=2) ax.yaxis.grid(True,'major',linewidth=2) plt.plot(xx, corrected_620[:,1], 'b' , linewidth=2, label='620 km') #pplt.plot(xx, corrected_620[:,1], 'Darkorange' , linewidth=2, label='Worst case') #pplt.plot(xx, corrected_700[:,1], 'g', linewidth=3, label='Current INAF') plt.plot(xx, corrected_700[:,1], 'r' , linewidth=2, label='700 km') fig.autofmt_xdate() plt.legend(loc=2) #plt.ylim([0, 0.022]) plt.ylabel(r'$\mathrm{Relative\ maximum\ stray\ light\ flux\ to\ 800\ km}$') # Saves the figure fname = '%srelative_flux_%d' % (folder_figures,sl_angle) figures.savefig(fname,fig,fancy) fig=plt.figure() ax=plt.subplot(111) # zooms ax.yaxis.set_major_locator(MultipleLocator(0.2)) ax.yaxis.set_minor_locator(MultipleLocator(0.1)) #pax.yaxis.set_major_locator(MultipleLocator(1.)) #pax.yaxis.set_minor_locator(MultipleLocator(0.5)) #ax.xaxis.set_major_locator(MultipleLocator(20.)) ax.xaxis.grid(True,'minor') ax.yaxis.grid(True,'minor') ax.xaxis.grid(True,'major',linewidth=2) ax.yaxis.grid(True,'major',linewidth=2) xx = data_620[:,1]/param.last_orbits[620]*365. xx = data_620[:,1]/param.last_orbits[800]*365. xx = figures.convert_date(xx) #pplt.plot(xx, data_620[:,4], 'b' , linewidth=2, label='620 km') #pplt.plot(xx, data_620[:,4], 'Darkorange' , linewidth=2, label='Worst case') plt.plot(xx, data_620[:,4], 'Indigo' , linewidth=2, label=r'$28^\circ\mathrm{\ INAF}$') #plt.plot(xx, data_620[:,4], 'Darkorange' , linewidth=2, label='Worst case') #pxx = data_700[:,1]/param.last_orbits[700]*365. xx = data_700[:,1]/param.last_orbits[800]*365. xx = figures.convert_date(xx) #pplt.plot(xx, data_700[:,4], 'r', linewidth=3, label='700 km') plt.plot(xx, data_700[:,4], 'g', linewidth=3, label='Current INAF') xx = data_800[:,1]/param.last_orbits[800]*365. xx = figures.convert_date(xx) plt.plot(xx, data_800[:,4], 'k' , linewidth=2, label='800 km') #pplt.plot(xx, data_800[:,4], 'k' , linewidth=2, label='RUAG') xx = data_800[:,1]/param.last_orbits[800]*365. xx = figures.convert_date(xx) plt.xlim([xx[0],xx[-1]]) plt.plot([xx[0],xx[-1]], [rqmt_flux, rqmt_flux], color='r', lw=3) fig.autofmt_xdate() plt.legend(loc=9) locs, labels = plt.yticks() #plt.yticks(locs, map(lambda x: r"$%g$" % (float(x) * 1e2), locs)) #fig.text(0.12, 0.91, r'$\times 10^{-2}$') # r'$\times 10^{-2}$' plt.ylabel(r'$\mathrm{Maximum\ stray\ light\ flux\ }\left[\frac{\mathrm{ph}}{\mathrm{px}\cdot\mathrm{s}}\right]$') # Saves the figure fname = '%sall_fluxes_800_com%d' % (folder_figures,sl_angle) figures.savefig(fname,fig,fancy) if show: plt.show()
{ "repo_name": "kuntzer/SALSA-public", "path": "8_compare_max_fluxes.py", "copies": "1", "size": "5439", "license": "bsd-3-clause", "hash": -8122013035511458000, "line_mean": 30.08, "line_max": 114, "alpha_frac": 0.6390880677, "autogenerated": false, "ratio": 2.7195, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8775440183474141, "avg_score": 0.016629576845171645, "num_lines": 175 }
# 8 (continued). si/eol/images # images is for POST, get_images is for GET # The output is weird. # "species": [{"images": [{"mediaURL": ...}, {"mediaURL": ...}, ...]}] # How do we know which image goes with which species? import sys, unittest, json sys.path.append('./') sys.path.append('../') import webapp import si_eol_get_images.SiEolImagesTester service = webapp.get_service(5004, 'si/eol/images') class TestSiEolImages(SiEolImagesTester): @classmethod def get_service(self): return service @classmethod def http_method(self): return 'GET' # Insert here: edge case tests # Insert here: inputs out of range, leading to error or long delay # Insert here: error-generating conditions # (See ../README.md) def test_example_20(self): x = self.start_request_tests(example_20) self.assert_success(x) # Insert: whether result is what it should be according to docs # There should be at least one image per species self.assertTrue(len(all_images(x)) >= len(example_20.parameters[u'species'])) def all_images(x): return [image for source in x.json()[u'species'] for image in source[u'images']] example_20 = service.get_request('POST', {u'species': [u'Catopuma badia', u'Catopuma temminckii']}) if __name__ == '__main__': webapp.main()
{ "repo_name": "jar398/tryphy", "path": "tests/test_si_eol_images.py", "copies": "1", "size": "1343", "license": "bsd-2-clause", "hash": 7508067493960265000, "line_mean": 28.8444444444, "line_max": 99, "alpha_frac": 0.6582278481, "autogenerated": false, "ratio": 3.3828715365239295, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9513148813731142, "avg_score": 0.00559011417855747, "num_lines": 45 }
8# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from cement.utils.misc import minimal_logger from ebcli.core import io from ebcli.core.abstractcontroller import AbstractBaseController from ebcli.lib import elasticbeanstalk as elasticbeanstalk from ebcli.operations import appversionops, commonops from ebcli.resources.strings import strings, flag_text class AppVersionController(AbstractBaseController): class Meta(AbstractBaseController.Meta): label = 'appversion' description = strings['appversion.info'] arguments = [ (['--delete', '-d'], dict(action='store', help=flag_text['appversion.delete'], metavar='VERSION_LABEL')) ] usage = 'eb appversion <lifecycle> [options ...]' def do_command(self): self.app_name = self.get_app_name() # For appversion, it's fine if environment is not defined self.env_name = self.get_env_name(noerror=True) # if user passed in a app version label to delete if self.app.pargs.delete is not None: version_label_to_delete = self.app.pargs.delete appversionops.delete_app_version_label(self.app_name, version_label_to_delete) return # if none of above, enter interactive mode self.interactive_list_version() def interactive_list_version(self): """Interactive mode which allows user to see previous versions and allow a choice to: - deploy a different version. - delete a certain version Run when the user supplies no argument to the --delete flag. """ app_versions = elasticbeanstalk.get_application_versions(self.app_name)['ApplicationVersions'] appversionops.display_versions(self.app_name, self.env_name, app_versions)
{ "repo_name": "pralexa/awsebcli", "path": "ebcli/controllers/appversion.py", "copies": "3", "size": "2295", "license": "apache-2.0", "hash": 4313240134265184000, "line_mean": 41.5, "line_max": 116, "alpha_frac": 0.7032679739, "autogenerated": false, "ratio": 4.061946902654867, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6265214876554868, "avg_score": null, "num_lines": null }
#-8- encoding: utf-8 -*- import sys import os import socket import argparse import logging class FileSender(object): logger = None def __init__(self, host, port, logger=None): self.host = host self.port = port self.logger = logger or self.getDefaultLogger() @classmethod def getDefaultLogger(cls): if not cls.logger: logger = logging.getLogger(cls.__name__) logger.setLevel(logging.DEBUG) sh = logging.StreamHandler() sh.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(levelname)s - %(asctime)s - %(name)s - %(message)s') sh.setFormatter(formatter) logger.addHandler(sh) cls.logger = logger return cls.logger def send(self, *files): for f in files: self.sendfile(f) def sendfile(self, file): try: stats = os.stat(file) size = stats.st_size except IOError as err: self.logger.error(err.msg) return f = open(file, 'rb') s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.host, self.port)) s.send('{}\t{}'.format(file, size)) ready = s.recv(256) if 'ready' == ready: self.logger.info('start sending {}, {} bytes'.format(file, size)) else: self.logger.error('server not responding correctly') f.close(), s.close() return nbytes = 0 while True: buf = f.read(1024) s.send(buf) nbytes += len(buf) if nbytes >= size: break self.logger.info('{} bytes sent'.format(nbytes)) f.close(), s.close() if '__main__' == __name__: parser = argparse.ArgumentParser() parser.add_argument('file', type=str, help="file to send") parser.add_argument( '-H', '--host', dest='host', type=str, action='store', default='127.0.0.1', help="file server name") parser.add_argument( '-P', '--port', dest='port', type=int, action='store', default=8888, help="file server port") args = parser.parse_args() client = FileSender(args.host, args.port) client.send(args.file)
{ "repo_name": "dlutxx/memo", "path": "python/fileclient.py", "copies": "1", "size": "2374", "license": "mit", "hash": 8794118064522743000, "line_mean": 26.2873563218, "line_max": 77, "alpha_frac": 0.5261162595, "autogenerated": false, "ratio": 3.9698996655518393, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9993717074477126, "avg_score": 0.0004597701149425287, "num_lines": 87 }
# 8 schools model #https://docs.pymc.io/notebooks/Diagnosing_biased_Inference_with_Divergences.html import pymc3 as pm import numpy as np import pandas as pd import matplotlib.pyplot as plt plt.style.use('seaborn-darkgrid') from collections import defaultdict import arviz as az print('Runing on PyMC3 v{}'.format(pm.__version__)) # Data of the Eight Schools Model J = 8 y = np.array([28., 8., -3., 7., -1., 1., 18., 12.]) sigma = np.array([15., 10., 16., 11., 9., 11., 10., 18.]) names=[]; for t in range(8): names.append('theta {}'.format(t)); print(names) # Plot raw data fig, ax = plt.subplots() y_pos = np.arange(8) ax.errorbar(y,y_pos, xerr=sigma, fmt='o') ax.set_yticks(y_pos) ax.set_yticklabels(names) ax.invert_yaxis() # labels read top-to-bottom plt.show() # Centered model with pm.Model() as Centered_eight: mu_alpha = pm.Normal('mu_alpha', mu=0, sigma=5) sigma_alpha = pm.HalfCauchy('sigma_alpha', beta=5) alpha = pm.Normal('alpha', mu=mu_alpha, sigma=sigma_alpha, shape=J) obs = pm.Normal('obs', mu=alpha, sigma=sigma, observed=y) np.random.seed(0) with Centered_eight: trace_centered = pm.sample(1000, chains=4) pm.summary(trace_centered).round(2) # Effective sample size is << 4*1000, especially for tau # Also, PyMC3 gives various warnings about not mixing # Display the total number and percentage of divergent chains diverging = trace_centered['diverging'] print('Number of Divergent Chains: {}'.format(diverging.nonzero()[0].size)) diverging_pct = diverging.nonzero()[0].size / len(trace_centered) * 100 print('Percentage of Divergent Chains: {:.1f}'.format(diverging_pct)) az.plot_autocorr(trace_centered, var_names=['mu_alpha', 'sigma_alpha']); az.plot_forest(trace_centered, var_names="alpha", credible_interval=0.95); # Non-Centered model with pm.Model() as NonCentered_eight: mu_alpha = pm.Normal('mu_alpha', mu=0, sigma=5) sigma_alpha = pm.HalfCauchy('sigma_alpha', beta=5) alpha_offset = pm.Normal('alpha_offset', mu=0, sigma=1, shape=J) alpha = pm.Deterministic('alpha', mu_alpha + sigma_alpha * alpha_offset) #alpha = pm.Normal('alpha', mu=mu_alpha, sigma=sigma_alpha, shape=J) obs = pm.Normal('obs', mu=alpha, sigma=sigma, observed=y) np.random.seed(0) with NonCentered_eight: trace_noncentered = pm.sample(1000, chains=4) pm.summary(trace_noncentered).round(2) # Things look much beteter: r_hat = 1, ESS ~ 4*1000 az.plot_autocorr(trace_noncentered, var_names=['mu', 'tau']); az.plot_forest(trace_noncentered, var_names="alpha", combined=True, credible_interval=0.95); # Plot the "funnel of hell" # Based on # https://github.com/twiecki/WhileMyMCMCGentlySamples/blob/master/content/downloads/notebooks/GLM_hierarchical_non_centered.ipynb for group in range(J): #x = pd.Series(trace_centered['alpha'][:, group], name=f'alpha {group}') #y = pd.Series(trace_centered['sigma_alpha'], name='sigma_alpha') #sns.jointplot(x, y); fig, axs = plt.subplots(ncols=2, sharex=True, sharey=True) x = pd.Series(trace_centered['alpha'][:, group], name=f'alpha {group}') y = pd.Series(trace_centered['sigma_alpha'], name='sigma_alpha') axs[0].plot(x, y, '.'); axs[0].set(title='Centered', ylabel='sigma_alpha', xlabel=f'alpha {group}') x = pd.Series(trace_noncentered['alpha'][:, group], name=f'alpha {group}') y = pd.Series(trace_noncentered['sigma_alpha'], name='sigma_alpha') axs[1].plot(x, y, '.'); axs[1].set(title='Non-centered', ylabel='sigma_alpha', xlabel=f'alpha {group}')
{ "repo_name": "probml/pyprobml", "path": "scripts/schools8_pymc3_old.py", "copies": "1", "size": "3556", "license": "mit", "hash": -1064286880298471000, "line_mean": 31.045045045, "line_max": 129, "alpha_frac": 0.6808211474, "autogenerated": false, "ratio": 2.835725677830941, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4016546825230941, "avg_score": null, "num_lines": null }
# 8 schools model #https://docs.pymc.io/notebooks/Diagnosing_biased_Inference_with_Divergences.html import pymc3 as pm import numpy as np import pandas as pd import matplotlib.pyplot as plt #plt.style.use('seaborn-darkgrid') from collections import defaultdict import arviz as az print('Runing on PyMC3 v{}'.format(pm.__version__)) # Data of the Eight Schools Model J = 8 y = np.array([28., 8., -3., 7., -1., 1., 18., 12.]) sigma = np.array([15., 10., 16., 11., 9., 11., 10., 18.]) sigma2 = np.power(sigma, 2) #pooledMLE = np.mean(y) pooledMLE = np.sum(y / sigma2) / np.sum(1/sigma2) # 7.7 names=[]; for t in range(8): names.append('theta {}'.format(t)); print(names) # Plot raw data fig, ax = plt.subplots() y_pos = np.arange(J) ax.errorbar(y, y_pos, xerr=sigma, fmt='o') ax.set_yticks(y_pos) ax.set_yticklabels(names) ax.invert_yaxis() # labels read top-to-bottom ax.axvline(pooledMLE, color='r', linestyle='-') plt.savefig('../figures/hbayes_schools8_data.pdf', dpi=300) plt.show() # Centered model with pm.Model() as Centered_eight: mu = pm.Normal('mu', mu=0, sigma=5) tau = pm.HalfCauchy('tau', beta=5) theta = pm.Normal('theta', mu=mu, sigma=tau, shape=J) obs = pm.Normal('obs', mu=theta, sigma=sigma, observed=y) np.random.seed(0) with Centered_eight: trace_centered = pm.sample(1000, chains=4, cores=1) print(pm.summary(trace_centered).round(2)) # Effective sample size is << 4*1000, especially for tau # Also, PyMC3 gives various warnings about not mixing # Display the total number and percentage of divergent chains diverging = trace_centered['diverging'] print('Number of Divergent Chains: {}'.format(diverging.nonzero()[0].size)) diverging_pct = diverging.nonzero()[0].size / len(trace_centered) * 100 print('Percentage of Divergent Chains: {:.1f}'.format(diverging_pct)) az.plot_autocorr(trace_centered, var_names=['mu', 'tau']); az.plot_forest(trace_centered, var_names="theta", credible_interval=0.95); # Non-Centered model with pm.Model() as NonCentered_eight: mu = pm.Normal('mu', mu=0, sigma=10) tau = pm.HalfCauchy('tau', beta=5) theta_offset = pm.Normal('theta_offset', mu=0, sigma=1, shape=J) theta = pm.Deterministic('theta', mu + tau * theta_offset) #theta = pm.Normal('theta', mu=mu, sigma=tau, shape=J) obs = pm.Normal('obs', mu=theta, sigma=sigma, observed=y) np.random.seed(0) with NonCentered_eight: trace_noncentered = pm.sample(1000, chains=4) print(pm.summary(trace_noncentered).round(2)) # Things look much beteter: r_hat = 1, ESS ~ 4*1000 az.plot_autocorr(trace_noncentered, var_names=['mu', 'tau']); post_mean = np.mean(trace_noncentered['theta']) hyper_mean = np.mean(trace_noncentered['mu']) # 4.340815448509472 axes = az.plot_forest(trace_noncentered, var_names="theta", combined=True, credible_interval=0.95); y_lims = axes[0].get_ylim() axes[0].vlines(hyper_mean, *y_lims) plt.savefig('../figures/hbayes_schools8_forest.pdf', dpi=300) az.plot_posterior(trace_noncentered['tau'], credible_interval=0.95) plt.savefig('../figures/hbayes_schools8_tau.pdf', dpi=300) # Plot the "funnel of hell" # Based on # https://github.com/twiecki/WhileMyMCMCGentlySamples/blob/master/content/downloads/notebooks/GLM_hierarchical_non_centered.ipynb for group in range(J): #x = pd.Series(trace_centered['theta'][:, group], name=f'theta {group}') #y = pd.Series(trace_centered['tau'], name='tau') #sns.jointplot(x, y); fig, axs = plt.subplots(ncols=2, sharex=True, sharey=True) x = pd.Series(trace_centered['theta'][:, group], name=f'theta {group}') y = pd.Series(trace_centered['tau'], name='tau') axs[0].plot(x, y, '.'); axs[0].set(title='Centered', ylabel='tau', xlabel=f'theta {group}') x = pd.Series(trace_noncentered['theta'][:, group], name=f'theta {group}') y = pd.Series(trace_noncentered['tau'], name='tau') axs[1].plot(x, y, '.'); axs[1].set(title='Non-centered', ylabel='tau', xlabel=f'theta {group}')
{ "repo_name": "probml/pyprobml", "path": "scripts/schools8_pymc3.py", "copies": "1", "size": "4019", "license": "mit", "hash": 6834202812326901000, "line_mean": 31.16, "line_max": 129, "alpha_frac": 0.6730529983, "autogenerated": false, "ratio": 2.739604635310157, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.39126576336101565, "avg_score": null, "num_lines": null }
# 8. si/eol/get_images # TBD: reuse the eol/images tests for this method (similarly to other # similar situations) import sys, unittest, json sys.path.append('./') sys.path.append('../') import webapp service = webapp.get_service(5004, 'si/eol/get_images') class SiEolImagesTester(webapp.WebappTestCase): def test_bad_method(self): request = service.get_request('POST', {}) x = self.start_request_tests(request) # POST method not allowed self.assertEqual(x.status_code, 405) # TBD: check for informativeness def test_no_parameter(self): request = service.get_request('POST', {}) x = self.start_request_tests(request) self.assertTrue(x.status_code == 400) m = x.json().get(u'message') self.assertTrue(u'species' in m, #informative? 'no "species" in "%s"' % m) def test_bad_parameter(self): """What if the supplied parameter name is wrong? Similar to previous""" request = service.get_request('POST', {u'bad_parameter': []}) x = self.start_request_tests(request) self.assertTrue(x.status_code == 400) m = x.json().get(u'message') self.assertTrue(u'species' in m, #informative? 'no "species" in "%s"' % m) def test_bad_value_type(self): """What if the value is a single species name instead of a list? 18 seconds (!) - doc says expected response time 2s - 6s. 76 metadata blobs are returned. TBD: issue.""" print 'Patience, this may take 20 seconds' request = service.get_request('POST', {u'species': u'Nosuchtaxonia mistakea'}) x = self.start_request_tests(request) self.assertTrue(x.status_code % 100 == 4, x.status_code) json.dump(x.to_dict(), sys.stdout, indent=2) # TBD: Change this to a *correct* check for message informativeness. m = x.json().get(u'message') self.assertTrue(u'species' in m, #informative? 'no "species" in "%s"' % m) def test_bad_name(self): request = service.get_request('POST', {u'species': [u'Nosuchtaxonia mistakea']}) x = self.start_request_tests(request) m = x.json().get(u'message') self.assert_success(x, m) # gives a 200, which is acceptable self.assertTrue(u'species' in x.json()) # json.dump(x.to_dict(), sys.stdout, indent=2) self.assertEqual(len(all_images(x)), 0, "number of images") # Too many species? # Insert here: edge case tests # Insert here: inputs out of range, leading to error or long delay # Insert here: error-generating conditions # (See ../README.md) class TestSiEolGetImages(SiEolImagesTester): @classmethod def get_service(self): return service @classmethod def http_method(self): return 'GET' def test_bad_method(self): """What if you do a GET when the service is expecting a POST? (Hoping for 405.)""" request = service.get_request('GET', {}) x = self.start_request_tests(request) # GET method not allowed self.assertEqual(x.status_code, 405) # TBD: check for informativeness json.dump(x.to_dict(), sys.stdout, indent=2) def test_example_19(self): x = self.start_request_tests(example_19) self.assert_success(x) # Insert: whether result is what it should be according to docs def all_images(x): return [image for source in x.json()[u'species'] for image in source[u'images']] example_19 = service.get_request('GET', {u'species': u'Panthera leo|Panthera onca|Panthera pardus'}) if __name__ == '__main__': webapp.main()
{ "repo_name": "jar398/tryphy", "path": "tests/test_si_eol_get_images.py", "copies": "1", "size": "3732", "license": "bsd-2-clause", "hash": -2137508478097817900, "line_mean": 35.5882352941, "line_max": 100, "alpha_frac": 0.6146838156, "autogenerated": false, "ratio": 3.465181058495822, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9564055096942781, "avg_score": 0.003161955430608121, "num_lines": 102 }
"""8th update, removes some NOT NULL constraints to allow SqlAlchemy to handle cascading deletes Revision ID: 3659056767c5 Revises: 563495edabd3 Create Date: 2015-11-23 12:26:17.370260 """ # revision identifiers, used by Alembic. revision = '3659056767c5' down_revision = '563495edabd3' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('requests', 'game_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('scores', 'game_id', existing_type=sa.INTEGER(), nullable=True) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('scores', 'game_id', existing_type=sa.INTEGER(), nullable=False) op.alter_column('requests', 'game_id', existing_type=sa.INTEGER(), nullable=False) ### end Alembic commands ###
{ "repo_name": "Rdbaker/GameCenter", "path": "migrations/versions/3659056767c5_.py", "copies": "2", "size": "1030", "license": "mit", "hash": -4058641323637509600, "line_mean": 27.6111111111, "line_max": 96, "alpha_frac": 0.6262135922, "autogenerated": false, "ratio": 3.759124087591241, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5385337679791241, "avg_score": null, "num_lines": null }
# 8 异常 # --------- 8.1 认识 Traceback --------- # a = 1/0 # print(a) # ZeroDivisionError: division by zero # --------- 8.2 按自己的方式出错 --------- # raise 语句 # raise Exception # Exception,引发一个没有任何错误信息的普通异常 # raise Exception('hyperdrive overload') # Exception: hyperdrive overload,自定义错误信息 # raise ArithmeticError # ArithmeticError # 自定义异常类(直接或间接继承 Exception) # class SomeException(Exception): # pass # --------- 8.3 捕捉异常 --------- # 诱捕或捕捉异常(最有意思的地方) # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 0 # result = int(int(x) / int(y)) # print(result) # ZeroDivisionError: division by zero # 改进一、加入异常捕捉 # try: # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 0 # result = int(int(x) / int(y)) # print(result) # except ZeroDivisionError: # print('The second number can\'t be zero!') # 改进二、加入功能屏蔽开关 # class MuffledCalculator: # muffled = False # # def calc(self, expr): # try: # return eval(expr) # except ZeroDivisionError: # if self.muffled: # print('Division by zero is illega!') # else: # raise # # calculator = MuffledCalculator() # # print(calculator.calc('10/2')) # calculator.muffled = False # print(calculator.calc('10/0')) # calculator.muffled = True # print(calculator.calc('10/0')) # --------- 8.4 不止一个except子句 --------- # try: # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 'a' # result = int(x) / int(y) # print(result) # except ZeroDivisionError: # print('The second number can\'t be zero!') # except TypeError: # print('That wasn\'s a number, was it?') # except ValueError: # print('You should enter a number, please try again.') # --------- 8.5 用一个块捕捉两个异常 --------- # try: # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 'a' # result = int(x) / int(y) # print(result) # except (ZeroDivisionError, ValueError, TypeError): # print('Your number is dogus...') # 这样的话,具体不知道发送了什么异常 # --------- 8.6 捕捉对象 --------- # 如果希望在except子句中访问异常对象本身,可以使用两个参数 # 如果想让程序继续运行,但是又因为某种原因想记录下错误,这个功能就很有用 # try: # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 'a' # result = int(x) / int(y) # print(result) # except (ZeroDivisionError, ValueError, TypeError) as e: # python 3 写法 # print(e) # --------- 8.7 真正的全捕捉 --------- # 如果想用一段代码捕捉所有异常,那么可以在except子句中忽略所有的异常类 # try: # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 'a' # result = int(x) / int(y) # print(result) # except: # 这样写(实际上不推荐这样写) # print('Something wrong happend...') # --------- 8.8 万事大吉 --------- # 有些情况中,一些坏事发生时执行一段代码是很有用的 # 可以像对条件和循环语句那样,给try/except语句加个else子句 # while True: # 只要有错误发生,程序会不断要求重新输入 # try: # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 'a' # result = int(x) / int(y) # print(result) # except: # print('Invaild input. Please try again.') # else: # break # while True: # try: # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 'a' # result = int(x) / int(y) # print(result) # except Exception as e: # print('Invaild input:', e) # 优化:打印错误的输入 # print('Please try again.') # else: # break # --------- 8.9 最后...(Finally子句) --------- # x = None # try: # x = 1/0 # finally: # finally子句肯定会被执行,不管try子句中是否发生异常 # print('Cleaning up...') # del x # 在同一条语句中组合使用try、except、finally和else # try: # x = input('Enter the first number: ') # 12 # y = input('Enter the second number: ') # 'a' # result = int(x) / int(y) # print(result) # except Exception as e: # print('Invaild input:', e) # else: # print('That went well!') # finally: # print('Cleaning up.') # --------- 8.10 异常和函数 --------- # 异常和函数能很自然地一起工作。 # 如果异常在函数内引发而不被处理,它就会传播至(浮到)函数调用的地方。 # 如果在那里也没有处理异常,它就会继续传播,一直到达主程序(全局作用域)。 # 如果那里也没有异常处理程序,程序会带着堆栈跟踪中止 # def faulty(): # raise Exception('Something is wrong') # # # def ignore_exception(): # faulty() # # # def handle_exception(): # try: # faulty() # except: # print('Exception handled') # ignore_exception() # 报错 # handle_exception() # Exception handled,被try-except语句处理 # --------- 8.11 异常之禅 --------- # 如果知道某段代码可能会导致某种异常,而又不希望程序以堆栈跟踪的形式终止, # 那么就根据需要添加try-excep或者try-finally语句(或者它们的组合)进行处理。 # 有些时候,条件语句可以实现和异常处理同样地功能, # 但是条件语句可能在自然性和可读性上差些。 person1 = {'name': 'Tommy', 'age': '12', 'occupation': 'driver'} person2 = {'name': 'Tommy', 'age': '12'} # def describe_person(person): # print('Description of ', person['name']) # print('Age: ', person['age']) # if 'occupation' in person: # print('Occupation', person['occupation']) # # describe_person(person1) # describe_person(person2) def describe_person(person): print('Description of ', person['name']) print('Age: ', person['age']) try: print('Occupation', person['occupation']) except KeyError: pass describe_person(person1) describe_person(person2)
{ "repo_name": "xiezipei/beginning-python-demo", "path": "demo/except.py", "copies": "1", "size": "6548", "license": "mit", "hash": 2291597297727654100, "line_mean": 22.6769911504, "line_max": 86, "alpha_frac": 0.5751401869, "autogenerated": false, "ratio": 2.104642014162077, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.3179782201062077, "avg_score": null, "num_lines": null }
''' 8x4grid-unicornphat.py Animation and single frame creation append for Pimoroni UnicornPHat 8x8 LED matrix''' import pygame import sys import math from pygame.locals import * from led import LED from buttons import Button import png # pypng import unicornhat as uh import copy, time uh.set_layout(uh.PHAT) saved = True warning = False pygame.display.init() pygame.font.init() screen = pygame.display.set_mode((530, 395), 0, 32) pygame.display.set_caption('UnicornPHAT Grid editor') pygame.mouse.set_visible(1) background = pygame.Surface(screen.get_size()) background = background.convert() background.fill((0, 51, 25)) colour = (255,0,0) # Set default colour to red rotation = 0 #uh.rotation(rotation) frame_number = 1 fps = 4 def setColourRed(): global colour colour = (255,0,0) def setColourBlue(): global colour colour = (0,0,255) def setColourGreen(): global colour colour = (0,255,0) def setColourPurple(): global colour colour = (102,0,204) def setColourPink(): global colour colour = (255,0,255) def setColourYellow(): global colour colour = (255,255,0) def setColourOrange(): global colour colour = (255,128,0) def setColourWhite(): global colour colour = (255,255,255) def setColourCyan(): global colour colour = (0,255,255) def clearGrid(): # Clears the pygame LED grid and sets all the leds.lit back to False for led in leds: led.lit = False def buildGrid(): # Takes a grid and builds versions for exporting (png and text) e = [0,0,0] e_png = (0,0,0) grid = [ [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], ] #png_grid =[] #print(grid) png_grid = ['blank','blank','blank','blank','blank','blank','blank','blank'] # png_grid = ['blank','blank','blank','blank'] for led in leds: if led.lit: grid[led.pos[1]][led.pos[0]] = [led.color[0], led.color[1], led.color[2]] grid[led.pos[1]+4][led.pos[0]] = [led.color[0], led.color[1], led.color[2]] if png_grid[led.pos[0]] == 'blank': png_grid[led.pos[0]] = (led.color[0], led.color[1], led.color[2]) else: png_grid[led.pos[0]] = png_grid[led.pos[0]] + (led.color[0], led.color[1], led.color[2]) else: if png_grid[led.pos[0]] == 'blank': png_grid[led.pos[0]] = (0,0,0) else: png_grid[led.pos[0]] = png_grid[led.pos[0]] + (0,0,0) return (grid, png_grid) def piLoad(): # Loads image onto matrix uh.off() for led in leds: if led.lit: uh.set_pixel(led.pos[0], led.pos[1], led.color[0], led.color[1], led.color[2]) #print str(led.pos[0])+ ' ' +str(led.pos[1]) + ' ' + str(led.color[1]) uh.show() def exportGrid(): # Writes png to file global saved grid, png_grid = buildGrid() FILE=open('image8x4.png','wb') w = png.Writer(4,8) w.write(FILE,png_grid) FILE.close() saved = True def exportCons(): # Writes raw list to console grid, png_grid = buildGrid() print(grid) def rotate(): #Rotates image on AstroPi LED matrix global rotation if rotation == 180: rotation = 0 else: rotation = rotation + 180 #ap.set_rotation(rotation) uh.rotation(rotation) play() def handleClick(): global saved global warning pos = pygame.mouse.get_pos() led = findLED(pos, leds) if led: #print('led ' + str(led.pos_y) + ' clicked') led.clicked(colour) saved = False for butt in buttons: if butt.rect.collidepoint(pos): butt.click() #print 'button clicked' if warning: for butt in buttons_warn: if butt.rect.collidepoint(pos): butt.click() def findLED(clicked_pos, leds): # reads leds and checks if clicked position is in one of them x = clicked_pos[0] y = clicked_pos[1] for led in leds: if math.hypot(led.pos_x - x, led.pos_y - y) <= led.radius: return led #print 'hit led' return None def drawEverything(): global warning screen.blit(background, (0, 0)) #draw the leds for led in leds: led.draw() #print(led.pos_x,led.pos_y) for button in buttons: button.draw(screen) font = pygame.font.Font(None,16) frame_text = 'Frame ' text = font.render(frame_text,1,(255,255,255)) screen.blit(text, (5,5)) frame_num_text = str(frame_number) text = font.render(frame_num_text,1,(255,255,255)) screen.blit(text, (18,18)) fps_text = 'Frame rate= ' + str(fps) +' fps' text = font.render(fps_text,1,(255,255,255)) screen.blit(text, (175,10)) # done font = pygame.font.Font(None,18) export_text = 'Animation' # done text = font.render(export_text,1,(255,255,255)) screen.blit(text, (445,15)) # done export_text = 'Single Frame' text = font.render(export_text,1,(255,255,255)) screen.blit(text, (435,120)) # done pygame.draw.circle(screen,colour,(390,345),20,0) #flip the screen if warning: for button in buttons_warn: button.draw(screen) pygame.display.flip() def load_leds_to_animation(): global frame_number global leds for saved_led in animation[frame_number]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True def nextFrame(): global frame_number global leds #print(frame_number) animation[frame_number] = copy.deepcopy(leds) #clearGrid() frame_number+=1 if frame_number in animation: leds =[] for x in range(0, 8): for y in range(0, 4): led = LED(radius=20,pos=(x, y)) #print(' x= ' + str(led.pos_x) + ' y= ' + str(led.pos_y)) leds.append(led) load_leds_to_animation() def prevFrame(): global frame_number global leds #print(frame_number) animation[frame_number] = copy.deepcopy(leds) clearGrid() if frame_number != 1: frame_number-=1 if frame_number in animation: leds =[] for x in range(0, 8): for y in range(0, 4): led = LED(radius=20,pos=(x, y)) leds.append(led) load_leds_to_animation() def delFrame(): global frame_number #print('ani length is ' + str(len(animation)) + ' frame is ' + str(frame_number)) if len(animation) > 1: animation[frame_number] = copy.deepcopy(leds) del animation[frame_number] prevFrame() for shuffle_frame in range(frame_number+1,len(animation)): animation[shuffle_frame] = animation[shuffle_frame+1] del animation[len(animation)] def getLitLEDs(): points = [] for led in leds: if led.lit: points.append(led.pos) return points # Main program body - set up leds and buttons leds = [] for x in range(0, 8): for y in range(0, 4): led = LED(radius=20,pos=(x, y)) leds.append(led) buttons = [] buttons_warn = [] animation={} #global frame_number def play(): global leds global frame_number animation[frame_number] = copy.deepcopy(leds) #print 'length of ani is ' + str(len(animation)) for playframe in range(1,(len(animation)+1)): #print(playframe) leds =[] for x in range(0, 8): for y in range(0, 4): led = LED(radius=20,pos=(x, y)) leds.append(led) for saved_led in animation[playframe]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True piLoad() time.sleep(1.0/fps) frame_number = len(animation) def faster(): global fps fps+=1 def slower(): global fps if fps != 1: fps-=1 def exportAni(): global saved FILE=open('animation8x4.py','w') FILE.write('import unicornhat as uh\n') FILE.write('uh.set_layout(uh.PHAT)\n') FILE.write('import time\n') FILE.write('FRAMES = [\n') global leds global frame_number animation[frame_number] = copy.deepcopy(leds) #print 'length of ani is ' + str(len(animation)) for playframe in range(1,(len(animation)+1)): #print(playframe) leds =[] for x in range(0,8): for y in range(0,4): led = LED(radius=20,pos=(x, y)) leds.append(led) for saved_led in animation[playframe]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True grid, png_grid = buildGrid() #grid = uh.get_pixels() FILE.write(str(grid)) FILE.write(',\n') FILE.write(']\n') FILE.write('for x in FRAMES:\n') FILE.write('\t uh.set_pixels(x)\n') FILE.write('\t uh.show()\n') FILE.write('\t time.sleep('+ str(1.0/fps) + ')\n') FILE.close() saved = True def prog_exit(): print('exit clicked') global warning warning = False #clearGrid() pygame.quit() sys.exit(-1) def save_it(): print('save clicked') global warning exportAni() warning = False def quit(): global saved if saved == False: nosave_warn() else: prog_exit() def importAni(): global leds global frame_number with open('animation8x4.py') as ll: line_count = sum(1 for _ in ll) ll.close() #animation = {} frame_number = 1 file = open('animation8x4.py') for r in range(4): file.readline() for frame in range(line_count-9): buff = file.readline() load_frame = buff.split('], [') #print(load_frame) counter = 1 uh_leds =[] for f in load_frame: if counter == 1: f = f[3:] elif counter == 64: f = f[:-5] elif counter%8 == 0 and counter != 64: f = f[:-1] elif (counter-1)%8 == 0: f = f[1:] y = int((counter-1)/8) x = int((counter-1)%8) #print(counter,x,y) led = LED(radius=20,pos=(x, y)) #print(' x= ' + str(led.pos_x) + ' y= ' + str(led.pos_y)) #print(f) if f == '0, 0, 0': led.lit = False else: led.lit = True f_colours = f.split(',') #print(f_colours) led.color = [int(f_colours[0]),int(f_colours[1]),int(f_colours[2])] uh_leds.append(led) counter+=1 leds = [] for pp in range(32): leds.append(uh_leds[pp]) animation[frame_number] = copy.deepcopy(leds) frame_number+=1 counter+=1 file.close() #drawEverything() exportAniButton = Button('Export to py', action=exportAni, pos=(425, 45), color=(153,0,0)) # done buttons.append(exportAniButton) importAniButton = Button('Import from file', action=importAni, pos=(425, 80 ), color=(153,0,0)) # done buttons.append(importAniButton) exportConsButton = Button('Export to console', action=exportCons, pos=(425, 150), color=(160,160,160)) # done buttons.append(exportConsButton) exportPngButton = Button('Export to PNG', action=exportGrid, pos=(425, 185), color=(160,160,160)) # done buttons.append(exportPngButton) RotateButton = Button('Rotate LEDs', action=rotate, pos=(425, 255), color=(205,255,255)) # done buttons.append(RotateButton) clearButton = Button('Clear Grid', action=clearGrid, pos=(425, 220), color=(204,255,255))# done buttons.append(clearButton) quitButton = Button('Quit', action=quit, pos=(425, 290), color=(96,96,96)) buttons.append(quitButton) FasterButton = Button('+', action=faster, size=(40,30), pos=(270, 5), color=(184,138,0)) # done buttons.append(FasterButton) SlowerButton = Button('-', action=slower, size=(40,30), pos=(315, 5), color=(184,138,0))# done buttons.append(SlowerButton) PlayButton = Button('Play on LEDs', action=play, pos=(425, 340), color=(184,138,0)) # done buttons.append(PlayButton) RedButton = Button('', action=setColourRed, size=(50,30), pos=(365, 10),hilight=(0, 200, 200),color=(255,0,0)) # done buttons.append(RedButton) OrangeButton = Button('', action=setColourOrange, size=(50,30), pos=(365, 45),hilight=(0, 200, 200),color=(255,128,0)) # done buttons.append(OrangeButton) YellowButton = Button('', action=setColourYellow, size=(50,30), pos=(365, 80),hilight=(0, 200, 200),color=(255,255,0)) # done buttons.append(YellowButton) GreenButton = Button('', action=setColourGreen, size=(50,30), pos=(365, 115),hilight=(0, 200, 200),color=(0,255,0)) # done buttons.append(GreenButton) CyanButton = Button('', action=setColourCyan, size=(50,30), pos=(365, 150),hilight=(0, 200, 200),color=(0,255,255)) # done buttons.append(CyanButton) BlueButton = Button('', action=setColourBlue, size=(50,30), pos=(365, 185),hilight=(0, 200, 200),color=(0,0,255)) # done buttons.append(BlueButton) PurpleButton = Button('', action=setColourPurple, size=(50,30), pos=(365, 220),hilight=(0, 200, 200),color=(102,0,204)) # done buttons.append(PurpleButton) PinkButton = Button('', action=setColourPink, size=(50,30), pos=(365, 255),hilight=(0, 200, 200),color=(255,0,255)) # done buttons.append(PinkButton) WhiteButton = Button('', action=setColourWhite, size=(50,30), pos=(365, 290),hilight=(0, 200, 200),color=(255,255,255)) # done buttons.append(WhiteButton) PrevFrameButton = Button('<-', action=prevFrame, size=(25,30), pos=(50, 5), color=(184,138,0)) # done buttons.append(PrevFrameButton) NextFrameButton = Button('->', action=nextFrame, size=(25,30), pos=(80, 5), color=(184,138,0)) # done buttons.append(NextFrameButton) DelFrame = Button('Delete', action=delFrame, size=(45,25), pos=(115, 7), color=(184,138,0)) # done buttons.append(DelFrame) saveButton = Button('Save', action=save_it, size=(60,50), pos=(150, 250),hilight=(200, 0, 0),color=(255,255,0)) # done buttons_warn.append(saveButton) QuitButton = Button('Quit', action=prog_exit, size=(60,50), pos=(260, 250),hilight=(200, 0, 0),color=(255,255,0)) # done buttons_warn.append(QuitButton) def nosave_warn(): global warning warning = True font = pygame.font.Font(None,48) frame_text = 'Unsaved Frames ' for d in range(5): text = font.render(frame_text,1,(255,0,0)) screen.blit(text, (100,100)) pygame.display.flip() time.sleep(0.1) text = font.render(frame_text,1,(0,255,0)) screen.blit(text, (100,100)) pygame.display.flip() time.sleep(0.1) drawEverything() # Main prog loop while True: for event in pygame.event.get(): if event.type == QUIT: if saved == False: nosave_warn() else: prog_exit() if event.type == MOUSEBUTTONDOWN: handleClick() #update the display drawEverything()
{ "repo_name": "topshed/RPi_8x8GridDraw", "path": "8x8grid-unicornphat.py", "copies": "1", "size": "15582", "license": "mit", "hash": 8078726868055155000, "line_mean": 27.9628252788, "line_max": 126, "alpha_frac": 0.575664228, "autogenerated": false, "ratio": 3.1677170156535883, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9143741678617832, "avg_score": 0.01992791300715151, "num_lines": 538 }
''' 8x8grid-sense.py Animation and single frame creation append for SenseHAT 8x8 LED matrix''' import pygame import sys import math from pygame.locals import * from led import LED from buttons import Button import png # pypng from sense_hat import SenseHat import copy, time saved = True warning = False pygame.init() pygame.font.init() sh=SenseHat() screen = pygame.display.set_mode((530, 395), 0, 32) pygame.display.set_caption('Sense HAT Grid Editor') pygame.mouse.set_visible(1) background = pygame.Surface(screen.get_size()) background = background.convert() background.fill((0, 51, 25)) colour = (255,0,0) # Set default colour to red rotation = 0 frame_number = 1 fps = 4 def setColourRed(): global colour colour = (255,0,0) def setColourBlue(): global colour colour = (0,0,255) def setColourGreen(): global colour colour = (0,255,0) def setColourPurple(): global colour colour = (102,0,204) def setColourPink(): global colour colour = (255,0,255) def setColourYellow(): global colour colour = (255,255,0) def setColourOrange(): global colour colour = (255,128,0) def setColourWhite(): global colour colour = (255,255,255) def setColourCyan(): global colour colour = (0,255,255) def clearGrid(): # Clears the pygame LED grid and sets all the leds.lit back to False for led in leds: led.lit = False def buildGrid(): # Takes a grid and builds versions for exporting (png and text) e = [0,0,0] e_png = (0,0,0) grid = [ e,e,e,e,e,e,e,e, e,e,e,e,e,e,e,e, e,e,e,e,e,e,e,e, e,e,e,e,e,e,e,e, e,e,e,e,e,e,e,e, e,e,e,e,e,e,e,e, e,e,e,e,e,e,e,e, e,e,e,e,e,e,e,e ] #png_grid =[] png_grid = ['blank','blank','blank','blank','blank','blank','blank','blank'] for led in leds: if led.lit: val = led.pos[0] + (8 * led.pos[1]) #print val grid[val] = [led.color[0], led.color[1], led.color[2]] if png_grid[led.pos[0]] == 'blank': png_grid[led.pos[0]] = (led.color[0], led.color[1], led.color[2]) else: png_grid[led.pos[0]] = png_grid[led.pos[0]] + (led.color[0], led.color[1], led.color[2]) else: if png_grid[led.pos[0]] == 'blank': png_grid[led.pos[0]] = (0,0,0) else: png_grid[led.pos[0]] = png_grid[led.pos[0]] + (0,0,0) return (grid, png_grid) def piLoad(): # Loads image onto SenseHAT matrix grid, grid_png = buildGrid() sh.set_pixels(grid) def exportGrid(): # Writes png to file global saved grid, png_grid = buildGrid() FILE=open('image8x8.png','wb') w = png.Writer(8,8) w.write(FILE,png_grid) FILE.close() saved = True def exportCons(): # Writes raw list to console grid, png_grid = buildGrid() print(grid) def rotate(): #Rotates image on SenseHAT LED matrix global rotation if rotation == 270: rotation = 0 else: rotation = rotation + 90 sh.set_rotation(rotation) def handleClick(): global saved global warning pos = pygame.mouse.get_pos() led = findLED(pos, leds) if led: #print 'led ' + str(led) + ' clicked' led.clicked(colour) saved = False for butt in buttons: if butt.rect.collidepoint(pos): butt.click() #print 'button clicked' if warning: for butt in buttons_warn: if butt.rect.collidepoint(pos): butt.click() def findLED(clicked_pos, leds): # reads leds and checks if clicked position is in one of them x = clicked_pos[0] y = clicked_pos[1] for led in leds: if math.hypot(led.pos_x - x, led.pos_y - y) <= led.radius: return led #print 'hit led' return None def drawEverything(): global warning screen.blit(background, (0, 0)) #draw the leds for led in leds: led.draw() for button in buttons: button.draw(screen) font = pygame.font.Font(None,16) frame_text = 'Frame ' text = font.render(frame_text,1,(255,255,255)) screen.blit(text, (5,5)) frame_num_text = str(frame_number) text = font.render(frame_num_text,1,(255,255,255)) screen.blit(text, (18,18)) fps_text = 'Frame rate= ' + str(fps) +' fps' text = font.render(fps_text,1,(255,255,255)) screen.blit(text, (175,10)) font = pygame.font.Font(None,18) export_text = 'Animation' text = font.render(export_text,1,(255,255,255)) screen.blit(text, (445,15)) export_text = 'Single Frame' text = font.render(export_text,1,(255,255,255)) screen.blit(text, (435,120)) pygame.draw.circle(screen,colour,(390,345),20,0) #flip the screen if warning: for button in buttons_warn: button.draw(screen) pygame.display.flip() def load_leds_to_animation(): global frame_number global leds for saved_led in animation[frame_number]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True def nextFrame(): global frame_number global leds #print(frame_number) animation[frame_number] = copy.deepcopy(leds) #clearGrid() frame_number+=1 if frame_number in animation: leds =[] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) load_leds_to_animation() def prevFrame(): global frame_number global leds #print(frame_number) animation[frame_number] = copy.deepcopy(leds) clearGrid() if frame_number != 1: frame_number-=1 if frame_number in animation: leds =[] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) load_leds_to_animation() def delFrame(): global frame_number #print('ani length is ' + str(len(animation)) + ' frame is ' + str(frame_number)) if len(animation) > 1: print('length =' + str(len(animation))) animation[frame_number] = copy.deepcopy(leds) print('deleting ' + str(frame_number)) del animation[frame_number] print('length now =' + str(len(animation))) prevFrame() for shuffle_frame in range(frame_number+1,len(animation)): print('shifting ' + str(shuffle_frame+1) + ' to be ' + str(shuffle_frame)) animation[shuffle_frame] = animation[shuffle_frame+1] print('deleting ' + str(len(animation))) del animation[len(animation)] def getLitLEDs(): points = [] for led in leds: if led.lit: points.append(led.pos) return points # Main program body - set up leds and buttons leds = [] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) buttons = [] buttons_warn = [] animation={} #global frame_number def play(): global leds global frame_number animation[frame_number] = copy.deepcopy(leds) #print 'length of ani is ' + str(len(animation)) for playframe in range(1,(len(animation)+1)): #print(playframe) leds =[] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) for saved_led in animation[playframe]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True piLoad() time.sleep(1.0/fps) frame_number = len(animation) def faster(): global fps fps+=1 def slower(): global fps if fps != 1: fps-=1 def exportAni(): global saved FILE=open('animation8x8.py','w') FILE.write('from sense_hat import SenseHat\n') FILE.write('import time\n') FILE.write('sh=SenseHat()\n') FILE.write('FRAMES = [\n') global leds global frame_number animation[frame_number] = copy.deepcopy(leds) #print 'length of ani is ' + str(len(animation)) for playframe in range(1,(len(animation)+1)): #print(playframe) leds =[] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) for saved_led in animation[playframe]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True grid, png_grid = buildGrid() FILE.write(str(grid)) FILE.write(',\n') FILE.write(']\n') FILE.write('for x in FRAMES:\n') FILE.write('\t sh.set_pixels(x)\n') FILE.write('\t time.sleep('+ str(1.0/fps) + ')\n') FILE.close() saved = True def prog_exit(): print('exit clicked') global warning warning = False clearGrid() pygame.quit() sys.exit() def save_it(): print('save clicked') global warning exportAni() warning = False def quit(): global saved if saved == False: nosave_warn() else: prog_exit() def importAni(): global leds global frame_number with open('animation8x8.py') as ll: line_count = sum(1 for _ in ll) ll.close() #animation = {} frame_number = 1 file = open('animation8x8.py') for r in range(4): file.readline() for frame in range(line_count-8): buff = file.readline() load_frame = buff.split('], [') counter = 1 leds =[] for f in load_frame: if counter == 1: f = f[2:] elif counter == 64: f = f[:-4] y = int((counter-1)/8) x = int((counter-1)%8) #print(str(counter) + ' ' + f + ' x= ' + str(x) + ' y= ' + str(y)) led = LED(radius=20,pos=(x, y)) if f == '0, 0, 0': led.lit = False else: led.lit = True f_colours = f.split(',') #print(f_colours) led.color = [int(f_colours[0]),int(f_colours[1]),int(f_colours[2])] leds.append(led) counter+=1 animation[frame_number] = copy.deepcopy(leds) frame_number+=1 counter+=1 file.close() #drawEverything() exportAniButton = Button('Export to py', action=exportAni, pos=(425, 45), color=(153,0,0)) buttons.append(exportAniButton) importAniButton = Button('Import from file', action=importAni, pos=(425, 80), color=(153,0,0)) buttons.append(importAniButton) exportConsButton = Button('Export to console', action=exportCons, pos=(425, 150), color=(160,160,160)) buttons.append(exportConsButton) exportPngButton = Button('Export to PNG', action=exportGrid, pos=(425, 185), color=(160,160,160)) buttons.append(exportPngButton) RotateButton = Button('Rotate LEDs', action=rotate, pos=(425, 255), color=(205,255,255)) buttons.append(RotateButton) clearButton = Button('Clear Grid', action=clearGrid, pos=(425, 220), color=(204,255,255)) buttons.append(clearButton) quitButton = Button('Quit', action=quit, pos=(425, 290), color=(96,96,96)) buttons.append(quitButton) FasterButton = Button('+', action=faster, size=(40,30), pos=(270, 5), color=(184,138,0)) buttons.append(FasterButton) SlowerButton = Button('-', action=slower, size=(40,30), pos=(315, 5), color=(184,138,0)) buttons.append(SlowerButton) PlayButton = Button('Play on LEDs', action=play, pos=(425, 340), color=(184,138,0)) buttons.append(PlayButton) RedButton = Button('', action=setColourRed, size=(50,30), pos=(365, 10),hilight=(0, 200, 200),color=(255,0,0)) buttons.append(RedButton) OrangeButton = Button('', action=setColourOrange, size=(50,30), pos=(365, 45),hilight=(0, 200, 200),color=(255,128,0)) buttons.append(OrangeButton) YellowButton = Button('', action=setColourYellow, size=(50,30), pos=(365, 80),hilight=(0, 200, 200),color=(255,255,0)) buttons.append(YellowButton) GreenButton = Button('', action=setColourGreen, size=(50,30), pos=(365, 115),hilight=(0, 200, 200),color=(0,255,0)) buttons.append(GreenButton) CyanButton = Button('', action=setColourCyan, size=(50,30), pos=(365, 150),hilight=(0, 200, 200),color=(0,255,255)) buttons.append(CyanButton) BlueButton = Button('', action=setColourBlue, size=(50,30), pos=(365, 185),hilight=(0, 200, 200),color=(0,0,255)) buttons.append(BlueButton) PurpleButton = Button('', action=setColourPurple, size=(50,30), pos=(365, 220),hilight=(0, 200, 200),color=(102,0,204)) buttons.append(PurpleButton) PinkButton = Button('', action=setColourPink, size=(50,30), pos=(365, 255),hilight=(0, 200, 200),color=(255,0,255)) buttons.append(PinkButton) WhiteButton = Button('', action=setColourWhite, size=(50,30), pos=(365, 290),hilight=(0, 200, 200),color=(255,255,255)) buttons.append(WhiteButton) PrevFrameButton = Button('<-', action=prevFrame, size=(25,30), pos=(50, 5), color=(184,138,0)) buttons.append(PrevFrameButton) NextFrameButton = Button('->', action=nextFrame, size=(25,30), pos=(80, 5), color=(184,138,0)) buttons.append(NextFrameButton) DelFrame = Button('Delete', action=delFrame, size=(45,25), pos=(115, 7), color=(184,138,0)) buttons.append(DelFrame) saveButton = Button('Save', action=save_it, size=(60,50), pos=(150, 250),hilight=(200, 0, 0),color=(255,255,0)) buttons_warn.append(saveButton) QuitButton = Button('Quit', action=prog_exit, size=(60,50), pos=(260, 250),hilight=(200, 0, 0),color=(255,255,0)) buttons_warn.append(QuitButton) def nosave_warn(): global warning warning = True font = pygame.font.Font(None,48) frame_text = 'Unsaved Frames ' for d in range(5): text = font.render(frame_text,1,(255,0,0)) screen.blit(text, (100,100)) pygame.display.flip() time.sleep(0.1) text = font.render(frame_text,1,(0,255,0)) screen.blit(text, (100,100)) pygame.display.flip() time.sleep(0.1) drawEverything() # Main prog loop while True: for event in pygame.event.get(): if event.type == QUIT: if saved == False: nosave_warn() else: prog_exit() if event.type == MOUSEBUTTONDOWN: handleClick() #update the display drawEverything() #print(frame_number)
{ "repo_name": "topshed/RPi_8x8GridDraw", "path": "8x8grid-sense.py", "copies": "1", "size": "14715", "license": "mit", "hash": 711211136831649300, "line_mean": 27.3526011561, "line_max": 119, "alpha_frac": 0.5858647638, "autogenerated": false, "ratio": 3.185754492314354, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9171312195096479, "avg_score": 0.020061412203574828, "num_lines": 519 }
''' 8x8grid-unicorn.py Animation and single frame creation append for Pimoroni UnicornHat 8x8 LED matrix''' import pygame import sys import math from pygame.locals import * from led import LED from buttons import Button import png # pypng #from astro_pi import AstroPi import unicornhat as uh import copy, time saved = True warning = False pygame.display.init() pygame.font.init() #ap=AstroPi() screen = pygame.display.set_mode((530, 395), 0, 32) pygame.display.set_caption('UnicornHAT Grid editor') pygame.mouse.set_visible(1) background = pygame.Surface(screen.get_size()) background = background.convert() background.fill((0, 51, 25)) colour = (255,0,0) # Set default colour to red rotation = 0 #uh.rotation(rotation) frame_number = 1 fps = 4 def setColourRed(): global colour colour = (255,0,0) def setColourBlue(): global colour colour = (0,0,255) def setColourGreen(): global colour colour = (0,255,0) def setColourPurple(): global colour colour = (102,0,204) def setColourPink(): global colour colour = (255,0,255) def setColourYellow(): global colour colour = (255,255,0) def setColourOrange(): global colour colour = (255,128,0) def setColourWhite(): global colour colour = (255,255,255) def setColourCyan(): global colour colour = (0,255,255) def clearGrid(): # Clears the pygame LED grid and sets all the leds.lit back to False for led in leds: led.lit = False def buildGrid(): # Takes a grid and builds versions for exporting (png and text) e = [0,0,0] e_png = (0,0,0) grid = [ [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], [e,e,e,e,e,e,e,e], ] #png_grid =[] png_grid = ['blank','blank','blank','blank','blank','blank','blank','blank'] for led in leds: if led.lit: #val = led.pos[0] + (8 * led.pos[1]) val = (8* led.pos[0]) + led.pos[1] #print val grid[led.pos[1]][led.pos[0]] = [led.color[0], led.color[1], led.color[2]] if png_grid[led.pos[0]] == 'blank': png_grid[led.pos[0]] = (led.color[0], led.color[1], led.color[2]) else: png_grid[led.pos[0]] = png_grid[led.pos[0]] + (led.color[0], led.color[1], led.color[2]) else: if png_grid[led.pos[0]] == 'blank': png_grid[led.pos[0]] = (0,0,0) else: png_grid[led.pos[0]] = png_grid[led.pos[0]] + (0,0,0) return (grid, png_grid) def piLoad(): # Loads image onto AstroPi matrix #grid, grid_png = buildGrid() #ap.set_pixels(grid) uh.off() for led in leds: if led.lit: uh.set_pixel(led.pos[0], led.pos[1], led.color[0], led.color[1], led.color[2]) #print str(led.pos[0])+ ' ' +str(led.pos[1]) + ' ' + str(led.color[1]) uh.show() def exportGrid(): # Writes png to file global saved grid, png_grid = buildGrid() FILE=open('image8x8.png','wb') w = png.Writer(8,8) w.write(FILE,png_grid) FILE.close() saved = True def exportCons(): # Writes raw list to console grid, png_grid = buildGrid() print(grid) def rotate(): #Rotates image on AstroPi LED matrix global rotation if rotation == 270: rotation = 0 else: rotation = rotation + 90 #ap.set_rotation(rotation) uh.rotation(rotation) play() def handleClick(): global saved global warning pos = pygame.mouse.get_pos() led = findLED(pos, leds) if led: #print 'led ' + str(led) + ' clicked' led.clicked(colour) saved = False for butt in buttons: if butt.rect.collidepoint(pos): butt.click() #print 'button clicked' if warning: for butt in buttons_warn: if butt.rect.collidepoint(pos): butt.click() def findLED(clicked_pos, leds): # reads leds and checks if clicked position is in one of them x = clicked_pos[0] y = clicked_pos[1] for led in leds: if math.hypot(led.pos_x - x, led.pos_y - y) <= led.radius: return led #print 'hit led' return None def drawEverything(): global warning screen.blit(background, (0, 0)) #draw the leds for led in leds: led.draw() for button in buttons: button.draw(screen) font = pygame.font.Font(None,16) frame_text = 'Frame ' text = font.render(frame_text,1,(255,255,255)) screen.blit(text, (5,5)) frame_num_text = str(frame_number) text = font.render(frame_num_text,1,(255,255,255)) screen.blit(text, (18,18)) fps_text = 'Frame rate= ' + str(fps) +' fps' text = font.render(fps_text,1,(255,255,255)) screen.blit(text, (175,10)) # done font = pygame.font.Font(None,18) export_text = 'Animation' # done text = font.render(export_text,1,(255,255,255)) screen.blit(text, (445,15)) # done export_text = 'Single Frame' text = font.render(export_text,1,(255,255,255)) screen.blit(text, (435,120)) # done pygame.draw.circle(screen,colour,(390,345),20,0) #flip the screen if warning: for button in buttons_warn: button.draw(screen) pygame.display.flip() def load_leds_to_animation(): global frame_number global leds for saved_led in animation[frame_number]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True def nextFrame(): global frame_number global leds #print(frame_number) animation[frame_number] = copy.deepcopy(leds) #clearGrid() frame_number+=1 if frame_number in animation: leds =[] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) load_leds_to_animation() def prevFrame(): global frame_number global leds #print(frame_number) animation[frame_number] = copy.deepcopy(leds) clearGrid() if frame_number != 1: frame_number-=1 if frame_number in animation: leds =[] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) load_leds_to_animation() def delFrame(): global frame_number #print('ani length is ' + str(len(animation)) + ' frame is ' + str(frame_number)) if len(animation) > 1: animation[frame_number] = copy.deepcopy(leds) del animation[frame_number] prevFrame() for shuffle_frame in range(frame_number+1,len(animation)): animation[shuffle_frame] = animation[shuffle_frame+1] del animation[len(animation)] def getLitLEDs(): points = [] for led in leds: if led.lit: points.append(led.pos) return points # Main program body - set up leds and buttons leds = [] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) buttons = [] buttons_warn = [] animation={} #global frame_number def play(): global leds global frame_number animation[frame_number] = copy.deepcopy(leds) #print 'length of ani is ' + str(len(animation)) for playframe in range(1,(len(animation)+1)): #print(playframe) leds =[] for x in range(0, 8): for y in range(0, 8): led = LED(radius=20,pos=(x, y)) leds.append(led) for saved_led in animation[playframe]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True piLoad() time.sleep(1.0/fps) frame_number = len(animation) def faster(): global fps fps+=1 def slower(): global fps if fps != 1: fps-=1 def exportAni(): global saved FILE=open('animation8x8.py','w') FILE.write('import unicornhat as uh\n') FILE.write('import time\n') FILE.write('FRAMES = [\n') global leds global frame_number animation[frame_number] = copy.deepcopy(leds) #print 'length of ani is ' + str(len(animation)) for playframe in range(1,(len(animation)+1)): #print(playframe) leds =[] for x in range(0,8): for y in range(0,8): led = LED(radius=20,pos=(x, y)) leds.append(led) for saved_led in animation[playframe]: if saved_led.lit: for led in leds: if led.pos == saved_led.pos: led.color = saved_led.color led.lit = True grid, png_grid = buildGrid() #grid = uh.get_pixels() FILE.write(str(grid)) FILE.write(',\n') FILE.write(']\n') FILE.write('for x in FRAMES:\n') FILE.write('\t uh.set_pixels(x)\n') FILE.write('\t uh.show()\n') FILE.write('\t time.sleep('+ str(1.0/fps) + ')\n') FILE.close() saved = True def prog_exit(): print('exit clicked') global warning warning = False #clearGrid() pygame.quit() sys.exit(-1) def save_it(): print('save clicked') global warning exportAni() warning = False def quit(): global saved if saved == False: nosave_warn() else: prog_exit() def importAni(): global leds global frame_number with open('animation8x8.py') as ll: line_count = sum(1 for _ in ll) ll.close() #animation = {} frame_number = 1 file = open('animation8x8.py') for r in range(3): file.readline() for frame in range(line_count-8): buff = file.readline() load_frame = buff.split('], [') #print load_frame counter = 1 leds =[] for f in load_frame: if counter == 1: f = f[3:] elif counter == 64: f = f[:-5] elif counter%8 == 0 and counter != 64: f = f[:-1] elif (counter-1)%8 == 0: f = f[1:] y = int((counter-1)/8) x = int((counter-1)%8) #print(counter,x,y) #print(str(counter) + ' ' + f + ' x= ' + str(x) + ' y= ' + str(y)) led = LED(radius=20,pos=(x, y)) if f == '0, 0, 0': led.lit = False else: led.lit = True f_colours = f.split(',') #print(f_colours) led.color = [int(f_colours[0]),int(f_colours[1]),int(f_colours[2])] leds.append(led) counter+=1 animation[frame_number] = copy.deepcopy(leds) frame_number+=1 counter+=1 file.close() #drawEverything() exportAniButton = Button('Export to py', action=exportAni, pos=(425, 45), color=(153,0,0)) # done buttons.append(exportAniButton) importAniButton = Button('Import from file', action=importAni, pos=(425, 80 ), color=(153,0,0)) # done buttons.append(importAniButton) exportConsButton = Button('Export to console', action=exportCons, pos=(425, 150), color=(160,160,160)) # done buttons.append(exportConsButton) exportPngButton = Button('Export to PNG', action=exportGrid, pos=(425, 185), color=(160,160,160)) # done buttons.append(exportPngButton) RotateButton = Button('Rotate LEDs', action=rotate, pos=(425, 255), color=(205,255,255)) # done buttons.append(RotateButton) clearButton = Button('Clear Grid', action=clearGrid, pos=(425, 220), color=(204,255,255))# done buttons.append(clearButton) quitButton = Button('Quit', action=quit, pos=(425, 290), color=(96,96,96)) buttons.append(quitButton) FasterButton = Button('+', action=faster, size=(40,30), pos=(270, 5), color=(184,138,0)) # done buttons.append(FasterButton) SlowerButton = Button('-', action=slower, size=(40,30), pos=(315, 5), color=(184,138,0))# done buttons.append(SlowerButton) PlayButton = Button('Play on LEDs', action=play, pos=(425, 340), color=(184,138,0)) # done buttons.append(PlayButton) RedButton = Button('', action=setColourRed, size=(50,30), pos=(365, 10),hilight=(0, 200, 200),color=(255,0,0)) # done buttons.append(RedButton) OrangeButton = Button('', action=setColourOrange, size=(50,30), pos=(365, 45),hilight=(0, 200, 200),color=(255,128,0)) # done buttons.append(OrangeButton) YellowButton = Button('', action=setColourYellow, size=(50,30), pos=(365, 80),hilight=(0, 200, 200),color=(255,255,0)) # done buttons.append(YellowButton) GreenButton = Button('', action=setColourGreen, size=(50,30), pos=(365, 115),hilight=(0, 200, 200),color=(0,255,0)) # done buttons.append(GreenButton) CyanButton = Button('', action=setColourCyan, size=(50,30), pos=(365, 150),hilight=(0, 200, 200),color=(0,255,255)) # done buttons.append(CyanButton) BlueButton = Button('', action=setColourBlue, size=(50,30), pos=(365, 185),hilight=(0, 200, 200),color=(0,0,255)) # done buttons.append(BlueButton) PurpleButton = Button('', action=setColourPurple, size=(50,30), pos=(365, 220),hilight=(0, 200, 200),color=(102,0,204)) # done buttons.append(PurpleButton) PinkButton = Button('', action=setColourPink, size=(50,30), pos=(365, 255),hilight=(0, 200, 200),color=(255,0,255)) # done buttons.append(PinkButton) WhiteButton = Button('', action=setColourWhite, size=(50,30), pos=(365, 290),hilight=(0, 200, 200),color=(255,255,255)) # done buttons.append(WhiteButton) PrevFrameButton = Button('<-', action=prevFrame, size=(25,30), pos=(50, 5), color=(184,138,0)) # done buttons.append(PrevFrameButton) NextFrameButton = Button('->', action=nextFrame, size=(25,30), pos=(80, 5), color=(184,138,0)) # done buttons.append(NextFrameButton) DelFrame = Button('Delete', action=delFrame, size=(45,25), pos=(115, 7), color=(184,138,0)) # done buttons.append(DelFrame) saveButton = Button('Save', action=save_it, size=(60,50), pos=(150, 250),hilight=(200, 0, 0),color=(255,255,0)) # done buttons_warn.append(saveButton) QuitButton = Button('Quit', action=prog_exit, size=(60,50), pos=(260, 250),hilight=(200, 0, 0),color=(255,255,0)) # done buttons_warn.append(QuitButton) def nosave_warn(): global warning warning = True font = pygame.font.Font(None,48) frame_text = 'Unsaved Frames ' for d in range(5): text = font.render(frame_text,1,(255,0,0)) screen.blit(text, (100,100)) pygame.display.flip() time.sleep(0.1) text = font.render(frame_text,1,(0,255,0)) screen.blit(text, (100,100)) pygame.display.flip() time.sleep(0.1) drawEverything() # Main prog loop while True: for event in pygame.event.get(): if event.type == QUIT: if saved == False: nosave_warn() else: prog_exit() if event.type == MOUSEBUTTONDOWN: handleClick() #update the display drawEverything()
{ "repo_name": "topshed/RPi_8x8GridDraw", "path": "8x8grid-unicorn.py", "copies": "1", "size": "15251", "license": "mit", "hash": -1442927982785858800, "line_mean": 27.8298676749, "line_max": 126, "alpha_frac": 0.5808799423, "autogenerated": false, "ratio": 3.1588649544324774, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.42397448967324775, "avg_score": null, "num_lines": null }
# 8x8 LED arrangements for various images from PIL import Image from PIL import ImageDraw def smiley(rotation=0): """ 8x8 LED Matrix smiley face LED arrangement """ smiley = [[0, 0, 1, 1, 1, 1, 0, 0], [0, 1, 0, 0, 0, 0, 1, 0], [1, 0, 1, 0, 0, 1, 0, 1], [1, 0, 0, 0, 0, 0, 0, 1], [1, 0, 1, 0, 0, 1, 0, 1], [1, 0, 0, 1, 1, 0, 0, 1], [0, 1, 0, 0, 0, 0, 1, 0], [0, 0, 1, 1, 1, 1, 0, 0]] return matrix2image(smiley).rotate(rotation) def box1(): image = Image.new('1', (8, 8)) draw = ImageDraw.Draw(image) draw.rectangle((0, 0, 7, 7), outline=255, fill=0) draw.rectangle((2, 2, 5, 5), outline=255, fill=1) return image def box2(): image = Image.new('1', (8, 8)) draw = ImageDraw.Draw(image) draw.rectangle((0, 0, 7, 7), outline=255, fill=0) draw.rectangle((2, 2, 5, 5), outline=255, fill=0) return image def box3(): image = Image.new('1', (8, 8)) # Create 8x8 1 bit color image draw = ImageDraw.Draw(image) # Create a draw instance draw.rectangle((0, 0, 7, 7), outline=255, fill=0) # Rectangle outline draw.rectangle((2, 2, 5, 5), outline=255, fill=1) # Rectangle fill draw.line((1, 1, 6, 6), fill=255) # Draw line from top left to bottom right draw.line((1, 6, 6, 1), fill=255) # Draw line from top right to bottom left return image def line1(): image = Image.new('1', (8, 8)) draw = ImageDraw.Draw(image) draw.line((0, 3, 7, 3), fill=255) return image def matrix2image(matrix): image = Image.new('1', (8, 8)) for i, row in enumerate(matrix): for j, value in enumerate(row): if value == 1: image.putpixel((i, j), 1) return image
{ "repo_name": "kbsezginel/raspberry-pi", "path": "scripts/rpi/8x8-led-lmatrix/images8x8.py", "copies": "1", "size": "1869", "license": "bsd-3-clause", "hash": -2554022521650018000, "line_mean": 30.6779661017, "line_max": 98, "alpha_frac": 0.5157838416, "autogenerated": false, "ratio": 2.94794952681388, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.396373336841388, "avg_score": null, "num_lines": null }
"""90. Subsets II https://leetcode.com/problems/subsets-ii/ Given a collection of integers that might contain duplicates, nums, return all possible subsets (the power set). Note: The solution set must not contain duplicate subsets. Example: Input: [1,2,2] Output: [ [2], [1], [1,2,2], [2,2], [1,2], [] ] """ import copy from typing import List class Solution: def subsets_with_dup(self, nums: List[int]) -> List[List[int]]: def frequency(collection: List[int], target: int) -> int: res = 0 for i in collection: if i == target: res += 1 return res if not nums: return [[]] pre_nums = nums[:-1] pre_ans = self.subsets_with_dup(pre_nums) ans = copy.deepcopy(pre_ans) is_dup = nums[-1] in pre_nums for ele in pre_ans: if is_dup: # key tip: if previous list contains N num # and N < frequency of num in previous nums, then abandon it if frequency(ele, nums[-1]) < frequency(pre_ans[-1], nums[-1]): continue ans.append(ele + [nums[-1]]) return ans
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/subsets_ii.py", "copies": "1", "size": "1207", "license": "mit", "hash": -6310278320942938000, "line_mean": 23.14, "line_max": 79, "alpha_frac": 0.5401822701, "autogenerated": false, "ratio": 3.6465256797583083, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.46867079498583086, "avg_score": null, "num_lines": null }
# 913. Cat and Mouse from typing import List MOUSE, CAT, DRAW = 1, 2, 0 # O(2 * n^3) states and O(n) ways of transition -> O(n^4) class Solution: def catMouseGame(self, graph: List[List[int]]) -> int: memo = dict() def dfs(mouse: int, cat: int, level: int): if level == len(graph) * 2: return DRAW # Exhausted all states current = (mouse, cat, level) if current in memo: return memo[current] if mouse == 0: memo[current] = MOUSE return memo[current] if mouse == cat: memo[current] = CAT return memo[current] mouseTurn = (level % 2) == 0 childResult = CAT if mouseTurn else MOUSE # Worst case scenario for child in graph[mouse if mouseTurn else cat]: if not mouseTurn and child == 0: continue # The cat can not enter the hole result = dfs(child, cat, level + 1) if mouseTurn else dfs( mouse, child, level + 1) if result == (MOUSE if mouseTurn else CAT): childResult = result # Best case scenario break elif result == DRAW: childResult = DRAW # Better than worst case memo[current] = childResult return memo[current] return dfs(1, 2, 0)
{ "repo_name": "digiter/Arena", "path": "913-cat-and-mouse.py", "copies": "1", "size": "1456", "license": "mit", "hash": 4492257581581473000, "line_mean": 33.6666666667, "line_max": 76, "alpha_frac": 0.5034340659, "autogenerated": false, "ratio": 4.183908045977011, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5187342111877011, "avg_score": null, "num_lines": null }
# 913. Cat and Mouse # O(2 * N^3) from typing import List from collections import defaultdict from queue import SimpleQueue MOUSE, CAT, DRAW = 1, 2, 0 class Solution: def catMouseGame(self, graph: List[List[int]]) -> int: state = dict() # mouse, cat, turn -> result que = SimpleQueue() def enqueue(mouse, cat, turn, result): s = (mouse, cat, turn) if not (s in state): state[s] = result que.put(s) n = len(graph) for cat in range(1, n): # The cat can not enter the hole. enqueue(0, cat, MOUSE, MOUSE) enqueue(0, cat, CAT, MOUSE) enqueue(cat, cat, MOUSE, CAT) enqueue(cat, cat, CAT, CAT) # mouse, cat, turn -> number of neighbours neighbourCount = dict() # mouse, cat, turn -> number of updates updateCount = defaultdict(int) # mouse, cat, turn -> if the current player can draw canDraw = defaultdict(bool) for mouse in range(n): for cat in range(1, n): neighbourCount[(mouse, cat, MOUSE)] = len(graph[mouse]) neighbourCount[(mouse, cat, CAT)] = len(graph[cat]) - (0 in graph[cat]) def update(mouse, cat, turn, result): s = (mouse, cat, turn) updateCount[s] += 1 if result == DRAW: canDraw[s] = True bestResult, worstResult = turn, MOUSE + CAT - turn # Quick win if result == bestResult: enqueue(mouse, cat, turn, bestResult) return # Updated from all neighbours if updateCount[s] == neighbourCount[s]: enqueue(mouse, cat, turn, DRAW if canDraw[s] else worstResult) while not que.empty(): mouse, cat, turn = que.get() result = state[(mouse, cat, turn)] prevTurn = MOUSE + CAT - turn if prevTurn == MOUSE: for prevMouse in graph[mouse]: update(prevMouse, cat, prevTurn, result) else: for prevCat in graph[cat]: if prevCat != 0: update(mouse, prevCat, prevTurn, result) end = (1, 2, MOUSE) if end in state: return state[end] return DRAW if __name__ == "__main__": grah = [[2, 5], [3], [0, 4, 5], [1, 4, 5], [2, 3], [0, 2, 3]] print(Solution().catMouseGame(grah))
{ "repo_name": "digiter/Arena", "path": "913-cat-and-mouse_2.py", "copies": "1", "size": "2532", "license": "mit", "hash": 560435874847510660, "line_mean": 31.8831168831, "line_max": 78, "alpha_frac": 0.5031595577, "autogenerated": false, "ratio": 3.7961019490254873, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9799261506725487, "avg_score": 0, "num_lines": 77 }
"""9-1. store two attributes: restaurant_name and cousine_type make a method called describe_restaurant() that prints this two pieces of information, and a method called open_restaurant() this prints a message indicating that the restaurant is open Make an instance called restaurant from your class. Print the two attributes individually, and then call both methods""" class Restaurant(): def __init__(self, restaurant_name, cousine_type): self.restaurant_name = restaurant_name self.cousine_type = cousine_type def describe_restaurant(self): print(self.restaurant_name.title() + " is the name of the restaurant\n") print(self.cousine_type.title() + " is the type of cousine\n") def open_restaurant(self): print(self.restaurant_name.title() +" is open\n") """my_restaurant = Restaurant('El imperio', 'Cortes de carne') my_restaurant.describe_restaurant() my_restaurant.open_restaurant()""" """9-6. An ice cream stand is a specific kind of restaurant. Write a class called IceCreamStand that inherits from the Restaurant class. Add an attribute called flavors that stores a list of ice cream flavors. Write a method that displays these flavors. Create an instance of IceCreamStand, and call this method.""" class IceCreamStand(Restaurant): "Inherits from restaurant" def __init__(self,restaurant_name, cousine_type, flavors): super().__init__(restaurant_name, cousine_type) self.flavors = flavors def Listflavors(self): print("This are the ice cream flavors:\n" + self.flavors.title()+"\n") IceCream = IceCreamStand('Fancy Ice', 'Desserts', "vanilla, chocolate, peanut, strawberry") IceCream.describe_restaurant() IceCream.Listflavors()
{ "repo_name": "AnhellO/DAS_Sistemas", "path": "Ene-Jun-2018/Juan Sleiman/Tarea 2(PyCrashCourse.9-6,9-9)/Restaurant.py", "copies": "1", "size": "1743", "license": "mit", "hash": -3666402149774623000, "line_mean": 43.6923076923, "line_max": 101, "alpha_frac": 0.7234652897, "autogenerated": false, "ratio": 3.616182572614108, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.48396478623141076, "avg_score": null, "num_lines": null }
# 91.题目:时间函数举例1。 import time print(time.ctime(time.time())) print(time.asctime(time.localtime(time.time()))) print(time.asctime(time.gmtime(time.time()))) print(time.time()) # 92 题目:时间函数举例2。 start = time.time() for i in range(10000): i += 1 end = time.time() print(start-end) # 93.题目:时间函数举例2。 start = time.clock() for i in range(10000): i += 1 end = time.clock() print(start-end) # 94. 题目:时间函数举例4,一个猜数游戏,判断一个人反应快慢。 # 95. 题目:字符串日期转换为易读的日期格式。 print(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime())) # 96. 题目:计算字符串中子串出现的次数。 source = 'abc,bcd,abcd,aabbccddbcd' print(source.count('bcd')) # 97. 题目:从键盘输入一些字符,逐个把它们写到磁盘文件上,直到输入一个 # 为止。 from sys import stdout def output_in_disk(): fileName = input('pls input the file name: \n') fp = open(fileName, 'w') ch = input('pls input the context: \n') while ch != '#': fp.write(ch) stdout.write(ch) ch = input('pls go on : \n') fp.close() #output_in_disk() # 98. 题目:从键盘输入一个字符串,将小写字母全部转换成大写字母,然后输出到一个磁盘文件"test"中保存。 def output_in_disk_with_caseup(fileName): #fileName = input('pls input the file name: \n') fp = open(fileName, 'w') ch = input('pls input the context: \n') while ch != '#': fp.write(ch.upper()) ch = input('pls go on: \n') ch = ch.upper() fp.close() # output_in_disk_with_caseup('test_gogo') # 99. 题目:有两个磁盘文件A和B,各存放一行字母,要求把这两个文件中的信息合并(按字母顺序排列), 输出到一个新文件C中。 def merge_file_and_sorted(): output_in_disk_with_caseup('gog') fp = open('gog') x = fp.read() print(x) l = list(x) l.sort() print(l) # merge_file_and_sorted() # 100. 题目:列表转换为字典。 x = ['a', 'b'] y = [1, 2] print(dict(zip(x, y)))
{ "repo_name": "cwenao/python_web_learn", "path": "base100/base100/base_91-100.py", "copies": "1", "size": "2172", "license": "apache-2.0", "hash": -8833718215388849000, "line_mean": 14.4054054054, "line_max": 64, "alpha_frac": 0.6029239766, "autogenerated": false, "ratio": 1.930022573363431, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.3032946549963431, "avg_score": null, "num_lines": null }
# 9.2.1 重新一般方法和特殊的构造方法 class A: def hello(self): print('Hello, I\'m A.') class B(A): pass a = A() b = B() a.hello() # Hello, I'm A. b.hello() # Hello, I'm A. # 重写 A 的 hello 方法 class B(A): def hello(self): print('Hello, I\'m B.') b = B() b.hello() # Hello, I'm B. # 如果一个类的构造方法被重写,那么就需要调用超类(你所继承的类)的构造方法, # 否则对象可能不会被正确地初始化 class Bird: def __init__(self): self.hungry = True def eat(self): if self.hungry: print('Aaaah...') self.hungry = False else: print('No, thanks!') b = Bird() b.eat() # Aaaah... b.eat() # No, thanks! class SongBird(Bird): def __init__(self): self.sound = 'Squawk!' def sing(self): print(self.sound) sb = SongBird() sb.sing() # Squawk! # sb.eat() # AttributError: 'SongBird' object has no attribute 'hungry' # 上面新的构造方法没有任何关于初始化hungry特性的代码, # SongBird的构造方法必须调用其超类Bird的构造方法来确保进行基本的初始化。 # 有两种方法: # 1. 调用超类构造方法的未绑定版本; # 2. 使用 super 函数 # 1. 调用超类构造方法的未绑定版本 # 如果直接调用类的方法(比如 Bird.__init__),那么久没有实例会被绑定, # 这样就可以自由地提供需要的 self 参数。 # 这样的方法称为未绑定(unbound)方法。 class SongBird(Bird): def __init__(self): Bird.__init__(self) self.sound = 'Squawk!' def sing(self): print(self.sound) sb = SongBird() sb.sing() # Squawk! sb.eat() # Aaaah... sb.eat() # No, thanks! # 2. 使用 super 函数(推荐使用) # 当前的类和对象可以作为super函数的参数使用, # 调用函数返回的对象的任何方法都是调用超类的方法,而不是当前了类的方法。 class Bird: def __init__(self): self.hungry = True def eat(self): if self.hungry: print('Aaaah...') self.hungry = False else: print('No, thanks!') class SongBird(Bird): def __init__(self): super(SongBird, self).__init__() # Super 在这里 self.sound = 'Squawk!' def sing(self): print(self.sound) sb = SongBird() sb.sing() # Squawk! sb.eat() # Aaaah... sb.eat() # No, thanks!
{ "repo_name": "xiezipei/beginning-python-demo", "path": "demo/construct.py", "copies": "1", "size": "2509", "license": "mit", "hash": -6855357652077139000, "line_mean": 17.6057692308, "line_max": 74, "alpha_frac": 0.5586563307, "autogenerated": false, "ratio": 1.7703568161024703, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.7803608863585687, "avg_score": 0.005080856643356644, "num_lines": 104 }
# 9/23/2013 # Alethea Butler import socket import threading class Client: def __init__(self, dest_addr=("127.0.0.1", 5280), listen_addr=("127.0.0.1", 5281)): self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.bsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.dest_addr = dest_addr self.listen_addr = listen_addr self.running = True self.thread = None def recvthread(self): self.bsock.bind(self.listen_addr) self.bsock.settimeout(2) while self.running: try: message, address = self.bsock.recvfrom(1024) except socket.timeout: continue print(message.decode('UTF-8')) def run(self): self.thread = threading.Thread(target=self.recvthread) self.thread.start() self.sock.settimeout(None) while self.running: try: message = bytes(input("Message? "), "UTF-8") except EOFError: print('\nDone') self.running = False self.thread.join() break if message: sent = self.sock.sendto(message, self.dest_addr) print("{0} bytes sent".format(sent)) else: print("no message sent") def __del__(self): self.running = False self.thread.join() try: self.sock.close() except socket.error as e: print(e) if __name__ == "__main__": Client().run()
{ "repo_name": "alethea/udp-chat", "path": "tinychat/client.py", "copies": "1", "size": "1579", "license": "apache-2.0", "hash": -8255308047783005000, "line_mean": 27.1964285714, "line_max": 87, "alpha_frac": 0.5281823939, "autogenerated": false, "ratio": 3.9475, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9974667783510389, "avg_score": 0.00020292207792207794, "num_lines": 56 }
# 9/23/2013 # Charles O. Goddard import socket import sys class Server: def __init__(self, bind_addr=("0.0.0.0", 5280), client_port=5281): self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.bind_addr = bind_addr self.client_port = client_port self.running = False self.clients = [] def run(self, timeout=2.0): self.running = True self.sock.bind(self.bind_addr) self.sock.settimeout(timeout) while self.running: try: raw_message, address = self.sock.recvfrom(1024) if not address[0] in self.clients: self.clients.append(address[0]) message = raw_message.decode('UTF-8') s = "[%s]: %s" % (address[0], message) print(s) data = s.encode('UTF-8') for cl in self.clients: #if cl == address[0]: # continue self.sock.sendto(data, (cl, self.client_port)) except socket.timeout: continue except socket.error as e: print(e) break except KeyboardInterrupt: print("\nI can see when I\'m not wanted.") break def __del__(self): self.running = False try: self.sock.close() except socket.error as e: print(e)
{ "repo_name": "alethea/udp-chat", "path": "tinychat/server.py", "copies": "1", "size": "1470", "license": "apache-2.0", "hash": -404008474993909060, "line_mean": 28.4, "line_max": 70, "alpha_frac": 0.4918367347, "autogenerated": false, "ratio": 4.060773480662983, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0019047619047619048, "num_lines": 50 }
"""938. Range Sum of BST https://leetcode.com/problems/range-sum-of-bst/ Given the root node of a binary search tree, return the sum of values of all nodes with value between L and R (inclusive). The binary search tree is guaranteed to have unique values. Example 1: Input: root = [10,5,15,3,7,null,18], L = 7, R = 15 Output: 32 Example 2: Input: root = [10,5,15,3,7,13,18,1,null,6], L = 6, R = 10 Output: 23 Note: The number of nodes in the tree is at most 10000. The final answer is guaranteed to be less than 2^31. """ from common.tree_node import TreeNode class Solution: def range_sum_bst(self, root: TreeNode, l: int, r: int) -> int: """ A nerd approach which doesn't use BST. :param root: :param l: :param r: :return: """ if not root: return 0 ans = 0 if l <= root.val <= r: ans += root.val ans += self.range_sum_bst(root.left, l, r) ans += self.range_sum_bst(root.right, l, r) return ans def range_sum_bst_2(self, root: TreeNode, l: int, r: int) -> int: if not root: return 0 ans = 0 if root.val < l: ans += self.range_sum_bst_2(root.right, l, r) elif root.val > r: ans += self.range_sum_bst_2(root.left, l, r) else: ans += root.val ans += self.range_sum_bst_2(root.left, l, r) ans += self.range_sum_bst_2(root.right, l, r) return ans
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/range_sum_of_bst.py", "copies": "1", "size": "1522", "license": "mit", "hash": 914627529621628200, "line_mean": 21.0579710145, "line_max": 76, "alpha_frac": 0.5551905388, "autogenerated": false, "ratio": 3.0872210953346855, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.41424116341346856, "avg_score": null, "num_lines": null }
"""9-3. Users: Make a class called User. Create two attributes called first_name and last_name, and then create several other attributes that are typically stored in a user profile. Make a method called describe_user() that prints a summary of the user’s information. Make another method called greet_user() that prints a personalized greeting to the user.""" class User(): def __init__(self, first_name, last_name, age, ID): self.first_name = first_name.title() self.last_name = last_name.title() self.age = str(age) self.ID = str(ID) def describe_user(self): print("\n" + self.first_name + " " + self.last_name) print("Age: " + self.age) print("ID: " + self.ID) def greet_user(self): print("\n¡Welcome " + self.first_name + " " + self.last_name + "!" ) """sleiman = User('juan', 'sleiman', 21, 14151301) sleiman.describe_user() sleiman.greet_user()""" """9-7. Write a class called Admin that inherits from the User class. Add an attribute, privileges, that stores a list of strings like "can add post", "can delete post", "can ban user", and so on. Write a method called show_privileges() that lists the administrator’s set of privileges. Create an instance of Admin, and call your method.""" class Admin(User): def __init__(self,first_name, last_name, age, ID): super().__init__(first_name, last_name, age, ID) # empty set of privileges. self.privileges = Privileges() """def show_privileges(self): print("\nPrivileges:") for p in self.privileges: print("* " + p)""" """sleiman = Admin('juan', 'sleiman', 21, 1451301) sleiman.describe_user() sleiman.privileges = ['can add users', 'can ban users', 'can do the Harlem Shake'] sleiman.show_privileges()""" """9-8. Privileges: Write a separate Privileges class. The class should have one attribute, privileges, that stores a list of strings as described in Exercise 9-7. Move the show_privileges() method to this class. Make a Privileges instance as an attribute in the Admin class. Create a new instance of Admin and use your method to show its privileges.""" class Privileges(): def __init__(self, privileges=[]): self.privileges = privileges def show_privileges(self): print("\nPrivileges:") if self.privileges: for p in self.privileges: print("* " + p) else: print("The user hasn't privileges.") sleiman = Admin('juan', 'sleiman', 21, 1451301) sleiman.describe_user() sleiman.privileges.show_privileges() print("\nNow we are adding some privileges: ") sleiman_privileges = ['can add users', 'can ban users', 'can do the Harlem Shake'] sleiman.privileges.privileges = sleiman_privileges sleiman.privileges.show_privileges()
{ "repo_name": "AnhellO/DAS_Sistemas", "path": "Ene-Jun-2018/Juan Sleiman/Tarea 2(PyCrashCourse.9-6,9-9)/UserAdmin.py", "copies": "1", "size": "2815", "license": "mit", "hash": 1341132050950069200, "line_mean": 36.972972973, "line_max": 118, "alpha_frac": 0.6647686833, "autogenerated": false, "ratio": 3.607188703465982, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.974855025439749, "avg_score": 0.004681426473698257, "num_lines": 74 }
"""94. Binary Tree Inorder Traversal https://leetcode.com/problems/binary-tree-inorder-traversal/ Given a binary tree, return the in-order traversal of its nodes' values. Example: Input: [1,null,2,3] 1 \ 2 / 3 Output: [1,3,2] Follow up: Recursive solution is trivial, could you do it iteratively? """ from typing import List from common.tree_node import TreeNode class Solution: def iterative_inorder_traversal(self, root: TreeNode) -> List[int]: """ iterative traversal :param root: :return: """ ans = [] stack = [] while root or stack: if root: stack.append(root) root = root.left else: root = stack.pop() ans.append(root.val) root = root.right return ans def recursive_inorder_traversal(self, root: TreeNode) -> List[int]: """ recursive traversal, process left if needed, then val, at last right :param root: :return: """ if not root: return [] ans = [] ans += self.recursive_inorder_traversal(root.left) ans.append(root.val) ans += self.recursive_inorder_traversal(root.right) return ans
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/binary_tree_inorder_traversal.py", "copies": "1", "size": "1306", "license": "mit", "hash": -681623068723062500, "line_mean": 21.9122807018, "line_max": 76, "alpha_frac": 0.5574272588, "autogenerated": false, "ratio": 3.886904761904762, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4944332020704762, "avg_score": null, "num_lines": null }
# 9.4 # Liu Li # 18 Nov, 2015 ''' Write a program to read through the mbox-short.txt and figure out who has the sent the greatest number of mail messages. The program looks for 'From ' lines and takes the second word of those lines as the person who sent the mail. The program creates a Python dictionary that maps the sender's mail address to a count of the number of times they appear in the file. After the dictionary is produced, the program reads through the dictionary using a maximum loop to find the most prolific committer. ''' import re # name = raw_input("Enter file:") # if len(name) < 1 : name = "mbox-short.txt" fname = "mbox-short.txt" with open(fname, 'r') as fh: dir = {} for line in fh: if re.search("From ", line): key = re.findall('[^ ]+@[^ ]+', line)[0] dir[key] = dir.get(key, 0)+ 1 key = list(dir.keys()) value = list(dir.values()) print key[value.index(max(value))], max(value) ############################################################# # Desired Output # cwen@iupui.edu 5
{ "repo_name": "ll0816/PythonForEverybody", "path": "Python-Data-Structure/Ass9.4.py", "copies": "1", "size": "1044", "license": "mit", "hash": 1909214826778944800, "line_mean": 40.76, "line_max": 498, "alpha_frac": 0.6360153257, "autogenerated": false, "ratio": 3.5631399317406145, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4699155257440614, "avg_score": null, "num_lines": null }
#9.4 Write a program to read through the mbox-short.txt and figure out who has the sent the greatest number of mail messages. The program looks for 'From ' lines and takes the second word of those lines as the person who sent the mail. The program creates a Python dictionary that maps the sender's mail address to a count of the number of times they appear in the file. After the dictionary is produced, the program reads through the dictionary using a maximum loop to find the most prolific committer. name = raw_input('Enter file:') if len(name) == 0: name = 'mbox-short.txt' handle = open(name, 'r') #text = handle.read() sender_list = [] lst = list() counts = {} for lines in handle: if not lines.startswith("From") : continue if lines.startswith("From:") : continue lst = lines.split() lst_item = lst[1] sender_list.append(lst_item) #print sender_list,'\n\n' for name in sender_list : counts[name] = counts.get(name,0) + 1 #print counts bigcount = None bigname = None for word,count in counts.items(): if bigcount is None or count > bigcount: bigname = word bigcount = count print bigname,bigcount
{ "repo_name": "joeyb182/pynet_ansible", "path": "Coursera-UMich/9_4-mbox-short-count.py", "copies": "1", "size": "1139", "license": "apache-2.0", "hash": 1151198781344117000, "line_mean": 34.625, "line_max": 503, "alpha_frac": 0.7199297629, "autogenerated": false, "ratio": 3.4831804281345566, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9595412791126767, "avg_score": 0.02153947998155787, "num_lines": 32 }
""" 9.4 Write a program to read through the mbox-short.txt and figure out who has the sent the greatest number of mail messages. The program looks for 'From ' lines and takes the second word of those lines as the person who sent the mail. The program creates a Python dictionary that maps the sender's mail address to a count of the number of times they appear in the file. After the dictionary is produced, the program reads through the dictionary using a maximum loop to find the most prolific committer. Desired output = cwen@iupui.edu 5 """ filename = raw_input("enter file name:") handle = None try: handle = open(filename) except: print 'File cannot be opened or read.', filename exit() counts = {} for line in handle: if line.strip().startswith('From:'): line = line.strip().lower() words = line.split() for word in words: if '@' in word: counts[word] = counts.get(word, 0) + 1 handle.close() # always close the file as soon as possible. Freeing resources asap is a best practice. email = None email_count = 0 for word,count in counts.items(): if email is None or count > email_count: email = word email_count = count print email, email_count
{ "repo_name": "missulmer/Pythonstudy", "path": "coursera_python_specialization/9_4.py", "copies": "1", "size": "1246", "license": "cc0-1.0", "hash": -6944992554957157000, "line_mean": 30.9487179487, "line_max": 134, "alpha_frac": 0.6894060995, "autogenerated": false, "ratio": 3.9182389937106916, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5107645093210691, "avg_score": null, "num_lines": null }
#-9.5 --> 30.5/40 #Part 1: Terminology (15 points) -3.5 --> 11.5/15 #1 1pt) What is the symbol "=" used for? #That symbol is called a assignment operator, it creates new variables, and gives the variables values. #1pt # #2 3pts) Write a technical definition for 'function' #A function is a named set of code that peforms a computation. #3pt # #3 1pt) What does the keyword "return" do? #The keyword return, calls the function, after being called the function takes an argument and returns a result. This result is called a return value. # 1pt # #4 5pts) We know 5 basic data types. Write the name for each one and provide two # examples of each below # 1:int = 2 3 # 2:str = "hello" "testing" # 3:float = 2.346 3.14 # 4:bool = True False # 5:tupple = ("My name is DeeDee", "I am", 14, "years old") ("I like dogs", "I have" 3 "dogs") #5pt # #5 2pts) What is the difference between a "function definition" and a # "function call"? #A function definiton is just defining the fucntion, while a function call is the calling the actual function with numbers or strings. #0pt (reused "define" and "call" didn't really explain) # # #6 3pts) What are the 3 phases that every computer program has? What happens in # each of them # 1:input # 2:process # 3:output #1.5pt (didn't explain what happens) # #Part 2: Programming (25 points) - 6 --> 19/25 #Write a program that asks the user for the areas of 3 circles. #It should then calculate the diameter of each and the sum of the diameters #of the 3 circles. #Finally, it should produce output like this: #Circle Diameter #c1 ... #c2 ... #c3 ... #TOTALS ... # Hint: Radius is the square root of the area divided by pi #input import math C1 = raw_input("Area of C1: ") C2 = raw_input("Area of C2: ") C3 = raw_input("Area of C3: ") #1 pt for header line #3 pt for correct formula #1 pt for return value #1 pt for parameter name #1 pt for function name def circle_diameter(area): return math.sqrt(((area) / math.pi)) + math.sqrt(((area) / math.pi)) #NO OUTPUT FUNCTION #-1pt for header line #1pt for parameter names #1pt for return value #1pt for correct output format #-3pt for correct use of format function #NO MAIN FUNCTION #-1pt header line #1pt getting input #1pt converting input #-1pt for calling output function #2pt for correct diameter formula #1pt for variable names #-1pt for calling main print "" #processing CD1 = math.sqrt(((float(C1)) / math.pi)) + math.sqrt(((float(C1)) / math.pi)) CD2 = math.sqrt(((float(C2)) / math.pi)) + math.sqrt(((float(C2)) / math.pi)) CD3 = math.sqrt(((float(C3)) / math.pi)) + math.sqrt(((float(C3)) / math.pi)) Total = float(CD1) + float(CD2) + float(CD3) #output print "Circle Diameter" print "c1 " + str(CD1) print "c2 " + str(CD2) print "c3 " + str(CD3) print "Totals " + str(Total) #1pt explanatory comments #1pt code format
{ "repo_name": "deedee1886-cmis/deedee1886-cmis-cs2", "path": "cs2quiz1.py", "copies": "1", "size": "2885", "license": "cc0-1.0", "hash": -4936192264473424000, "line_mean": 28.7422680412, "line_max": 150, "alpha_frac": 0.6842287695, "autogenerated": false, "ratio": 2.9378818737270875, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8952290260859663, "avg_score": 0.03396407647348466, "num_lines": 97 }
# 9.5 属性 # 访问器是一个简单的方法,它能够使用getHeight、setHeight # 这样的名字来得到或者重新绑定一些特性。 class Rectangle: def __init__(self): self.width = 0 self.height = 0 def setsize(self, size): self.width, self.height = size def getsize(self): return self.width, self.height def getarea(self): return self.width * self.height size = property(getsize, setsize) r = Rectangle() r.width = 10 r.height = 5 print(r.getsize()) # (10, 5) print(r.getarea()) # 50 print(r.size) # (10, 5) size看起来像个普通属性 # Python能隐藏访问器方法,让所有特性看起来一样, # 这些通过访问器定义的特性被称为属性。 # 在Python中有两种创建属性的机制。 # 9.5.2 静态方法和类成员方法 # 静态方法和类成员方法分别在创建时分别被装入 # Staticmethod类型和Classmethod类型的对象中。 # 静态方法的定义没有self参数,且能够被类本身直接调用。 # 类方法在定义时需要名为cls的类似于self的参数, # # 类成员方法可以直接用类的具体对象调用, # 但cls参数是自动被绑定到类的。 # < Python 2.4,手动包装和替换方法 # class MyClass: # def smeth(): # print('This is a static method.') # smeth = staticmethod(smeth) # # def cmeth(cls): # print('This is a class method of', cls) # cmeth = classmethod(cmeth) # > Python 2.4,装饰器(decorators) class MyClass: @staticmethod def smeth(): print('This is a static method.') @classmethod def cmeth(cls): print('This is a class method of', cls) MyClass.smeth() # This is a static method. MyClass.cmeth() # This is a class method of <class '__main__.MyClass'> # 静态方法和类成员方法在Python中向来并不是很重要 # 主要原因是大部分情况下可以使用函数或者绑定方法代替 # 9.5.3 __getattr__、__setattr__和它的朋友们 # 拦截(intercept)对象的所有特性是可能的, # 这样可以用旧式类实现属性(因为property方法不能使用)。 # 为了在访问特性的时候可以执行代码,必须使用一些魔法方法。 # __getattribute__(self, name) 当特性name被访问时自动被调用(只能在新式类中使用) # __getattr__(self, name) 当特性name被访问且对象没有相应的特性时被自动调用 # __setattr__(self, name, value) 当试图给特性name赋值时会被自动调用 # __delattr__(self, name) 当试图删除特性name时被自动调用 # 增加管理细节的Rectangle类 class RectanglePlus: def __init__(self): self.width = 0 self.height = 0 def __setattr__(self, name, value): if name == 'size': self.width, self.height = value else: self.__dict__[name] = value def __getattr__(self, name): if name == 'size': return self.width, self.height else: raise AttributeError # 9.6 迭代器 # 只讨论一个特殊方法——__iter__,这个方法是迭代器规则(iterator protocal)的基础。 # __iter__方法返回一个迭代器(iterator), # 所谓的迭代器就是具有next方法(这个方法在调用时不需要任何参数)。 # 在调用next方法时,迭代器会返回它的下一个值。 # 如果next方法被调用,但迭代器没有值可以返回, # 就会引发一个StopIteration异常。 class Fibs: def __init__(self): self.a = 0 self.b = 1 def __next__(self): self.a, self.b = self.b, self.a + self.b return self.a def __iter__(self): return self # 推荐使迭代器实现它自己的__iter__方法, # 然后就能直接在for循环中使用迭代其本身了 # 正式的说法是,一个实现了__iter__方法的对象是可迭代的, # 一个实现了next方法的对象则是迭代器。 fibs= Fibs() for f in fibs: if f > 1000: print(f) # 1597 break # 内建函数iter可以从可以迭代的对象中获得迭代器 it = iter([1, 2, 3]) print(it.__next__()) # 1 print(it.__next__()) # 2 # 使用list构造方法显式地将迭代器转化为列表 # class TestIterator: # value = 0 # # def next(self): # self.value += 1 # if self.value > 10: # raise StopIteration # return self.value # # def __iter__(self): # return self # # ti = TestIterator() # list(ti) # TypeError: iter() returned non-iterator of type 'TestIterator' # 9.7 生成器 # 生成器是Python新引入的概念,由于历史原因,它也叫简单生成器。 # 生成器可以帮助写出非常优雅的代码(当然不是必须的)。 # 生成器是一种用普遍的函数语法定义的迭代器。 # 列表的列表 nd = [[1, 2], [3, 4], [5]] # 函数应该按顺序打印出列表中的数字 # 首先迭代提供的嵌套列表中的所有子列表, # 然后按顺序迭代子列表中的元素。 # 新知识点:yield语句。 # 任何包含yield语句的函数称为生成器。 # 除了名字不同以外,它的行为和普通的函数也有很大的差别。 # 这就在于它不是像return那样返回值,而是每次产生多个值。 # 每次产生一个值(使用yield语句),函数就会被冻结: # 即函数停在那点等待被激活。 # 函数被激活后就从停止的那点开始执行。 def flatten(nested): for sublist in nested: for element in sublist: yield element print(flatten(nd)) # <generator object flatten at 0x10ad59ca8> for num in flatten(nd): print(num) # 1 2 3 4 5 print(list(flatten(nd))) # [1, 2, 3, 4, 5] # 循环生成器 g = ((i+2)**2 for i in range(2, 27)) print(g.__next__()) # 16 # 9.7.2 递归生成器 # 9.7.3 通用生成器 # 9.7.4 生成器方法 # 9.7.5 模拟生成器 # 生成器在旧版本的Python中是不可用的, # 如何使用普通的函数模拟生成器?
{ "repo_name": "xiezipei/beginning-python-demo", "path": "demo/attr.py", "copies": "1", "size": "6128", "license": "mit", "hash": -1260554104339724800, "line_mean": 18.7867298578, "line_max": 78, "alpha_frac": 0.6238620029, "autogenerated": false, "ratio": 1.5913076629813192, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.27151696658813196, "avg_score": null, "num_lines": null }
"""95. Unique Binary Search Trees II https://leetcode.com/problems/unique-binary-search-trees-ii/description/ Given an integer n, generate all structurally unique BST's (binary search trees) that store values 1 ... n. Example: Input: 3 Output: [ [1,null,3,2], [3,2,null,1], [3,1,null,null,2], [2,1,3], [1,null,2,null,3] ] Explanation: The above output corresponds to the 5 unique BST's shown below: ⁠ 1 3 3 2 1 ⁠ \ / / / \ \ ⁠ 3 2 1 1 3 2 ⁠ / / \ \ ⁠ 2 1 2 3 """ import copy from typing import List from common.tree_node import TreeNode class Solution: def generate_trees_1(self, n: int) -> List[TreeNode]: """ recursive :param n: :return: """ if n == 0: return [] if n == 1: return [TreeNode(1)] def backtrack(left: int, right: int) -> List[TreeNode]: if left > right: return [None] if left == right: return [TreeNode(left)] trees = [] for i in range(left, right + 1): for left_tree in backtrack(left, i - 1): for right_tree in backtrack(i + 1, right): root = TreeNode(i) root.left = left_tree root.right = right_tree trees.append(root) return trees return backtrack(1, n) def generate_trees_2(self, n: int) -> List[TreeNode]: """ dp :param n: :return: """ def modify_tree(node: TreeNode, offset: int): if offset == 0 or not node: return node.val += offset modify_tree(node.left, offset) modify_tree(node.right, offset) # dp[i] stores the trees of n=i if n == 0: return [] dp = [[] for _ in range(n + 2)] dp[0].append(None) dp[1].append(TreeNode(1)) for i in range(2, n + 1): for j in range(1, i + 1): for left_tree in dp[j - 1]: for right_tree in dp[i - j]: root = TreeNode(j) root.left = left_tree new_right_tree = copy.deepcopy(right_tree) modify_tree(new_right_tree, j) root.right = new_right_tree dp[i].append(root) return dp[n]
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/unique_binary_search_trees_ii.py", "copies": "1", "size": "2584", "license": "mit", "hash": -8724293836029515000, "line_mean": 26.6666666667, "line_max": 73, "alpha_frac": 0.4535561601, "autogenerated": false, "ratio": 3.7021582733812948, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9651312748264669, "avg_score": 0.0008803370433251587, "num_lines": 93 }
#9600, 8, none, stop1, noflow from __future__ import print_function import serial import time import struct import datetime import sys import os.path # Init def initOPC(ser): print("Init:") time.sleep(1) ser.write(bytearray([0x5A,0x01])) nl = ser.read(3) print(nl) time.sleep(.1) ser.write(bytearray([0x5A,0x03])) nl=ser.read(9) print(nl) time.sleep(.1) ser.write(bytearray([0x5A,0x02,0x92,0x07])) nl=ser.read(2) print(nl) time.sleep(.1) # Turn fan and laser off def fanOff(ser): ser.write(bytearray([0x61,0x03])) nl = ser.read(2) print(nl) time.sleep(.1) ser.write(bytearray([0x61,0x01])) nl = ser.read(2) print(nl) time.sleep(.1) # Turn fan and laser on def fanOn(ser): ser.write(bytearray([0x61,0x03])) nl = ser.read(2) print(nl) time.sleep(.1) ser.write(bytearray([0x61,0x00])) nl = ser.read(2) print(nl) time.sleep(.1) def combine_bytes(LSB, MSB): return (MSB << 8) | LSB def getHist(ser): ser.write(bytearray([0x61,0x30])) nl=ser.read(2) print(nl) time.sleep(.1) br = bytearray([0x61]) for i in range(0,62): br.append(0x30) ser.write(br) ans=bytearray(ser.read(1)) ans=bytearray(ser.read(62)) data={} data['Bin 0'] = combine_bytes(ans[0],ans[1]) data['Bin 1'] = combine_bytes(ans[2],ans[3]) data['Bin 2'] = combine_bytes(ans[4],ans[5]) data['Bin 3'] = combine_bytes(ans[6],ans[7]) data['Bin 4'] = combine_bytes(ans[8],ans[9]) data['Bin 5'] = combine_bytes(ans[10],ans[11]) data['Bin 6'] = combine_bytes(ans[12],ans[13]) data['Bin 7'] = combine_bytes(ans[14],ans[15]) data['Bin 8'] = combine_bytes(ans[16],ans[17]) data['Bin 9'] = combine_bytes(ans[18],ans[19]) data['Bin 10'] = combine_bytes(ans[20],ans[21]) data['Bin 11'] = combine_bytes(ans[22],ans[23]) data['Bin 12'] = combine_bytes(ans[24],ans[25]) data['Bin 13'] = combine_bytes(ans[26],ans[27]) data['Bin 14'] = combine_bytes(ans[28],ans[29]) data['Bin 15'] = combine_bytes(ans[30],ans[30]) data['period'] = struct.unpack('f',bytes(ans[44:48]))[0] data['pm1'] = struct.unpack('f',bytes(ans[50:54]))[0] data['pm2'] = struct.unpack('f',bytes(ans[54:58]))[0] data['pm10'] = struct.unpack('f',bytes(ans[58:]))[0] return(data) # Retrieve data def getData(ser): ser.write(bytearray([0x61,0x32])) nl=ser.read(2) time.sleep(.1) ser.write(bytearray([0x61,0x32,0x32,0x32,0x32,0x32,0x32,0x32,0x32,0x32,0x32,0x32,0x32])) ans=bytearray(ser.read(13)) b1 = ans[1:5] b2 = ans[5:9] b3 = ans[9:13] c1=struct.unpack('f',bytes(b1))[0] c2=struct.unpack('f',bytes(b2))[0] c3=struct.unpack('f',bytes(b3))[0] return([c1,c2,c3]) if __name__ == "__main__": serial_opts = { # built-in serial port is "COM1" # USB serial port is "COM4" "port": "/dev/ttyACM0", "baudrate": 9600, "parity": serial.PARITY_NONE, "bytesize": serial.EIGHTBITS, "stopbits": serial.STOPBITS_ONE, "xonxoff": False, "timeout": 1 } ofile=sys.argv[1] ser = serial.Serial(**serial_opts) ser.open() print("Init:") initOPC(ser) time.sleep(1) print("Fan Off:") fanOff(ser) time.sleep(5) print("Fan on:") fanOn(ser) time.sleep(5) print("Opening Output File:") if(not os.path.isfile(ofile)): f=open(ofile,'w+') print("time,b0,b1,b2,b3,b4,b5,b6,b7,b8,b9,b10,b11,b12,b13,b14,b15,period,pm1,pm2,pm10",file=f) else: f=open(ofile,'a') print("Looping:") for i in range(0,4320): t=getHist(ser) ts = time.time() tnow = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') data=t print(tnow + "," + str(data['Bin 0']) + "," + str(data['Bin 1']) + "," + str(data['Bin 2']) + "," + str(data['Bin 3']) + "," + str(data['Bin 4']) + "," + str(data['Bin 5']) + "," + str(data['Bin 6']) + "," + str(data['Bin 7']) + "," + str(data['Bin 8']) + "," + str(data['Bin 9']) + "," + str(data['Bin 10']) + "," + str(data['Bin 11']) + "," + str(data['Bin 12']) + "," + str(data['Bin 13']) + "," + str(data['Bin 14']) + "," + str(data['Bin 15']) + "," + str(data['period']) + "," + str(data['pm1']) + "," + str(data['pm2']) + "," + str(data['pm10']) , file=f) print(tnow + "," + str(data['pm1']) + "," + str(data['period']) + "," + str(data['Bin 15'])) f.flush() time.sleep(59) print("Closing:") f.close() fanOff(ser) ser.close()
{ "repo_name": "ozjimbob/alphasense-opc", "path": "opc.py", "copies": "1", "size": "4239", "license": "mit", "hash": -7466551465594394000, "line_mean": 26.3483870968, "line_max": 583, "alpha_frac": 0.6013210663, "autogenerated": false, "ratio": 2.265633351149118, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.7934214995162899, "avg_score": 0.08654788445724378, "num_lines": 155 }
# 964. Least Operators to Express Number # # This solution is wrong becuase choice2 assumes the cost of an extra x^(i+1) is # `1+cost[i+1]`. But the minimum cost of x^(i+1) could be using substract # instead of addtion. from functools import cache class Solution: def leastOpsExpressTarget(self, x: int, target: int) -> int: MAX = 36 power, cost = [1], [2] # +/- x^i and its cost for i in range(1, MAX): power.append(power[i - 1] * x) cost.append(i) @cache def dp(i, goal): if i == -1: return 0 if not (power[i] <= goal and goal < power[i + 1]): print(i, goal, dp(i - 1, goal)) return dp(i - 1, goal) # goal = quotient * x^i + remainer # 1 <= quotient < x # 0 <= remainer < x^i quotient, remainer = divmod(goal, power[i]) choice1 = quotient * cost[i] choice2 = 1 + cost[i + 1] + (x - quotient) * cost[i] print(i, goal, min(choice1, choice2) + dp(i - 1, remainer)) return min(choice1, choice2) + dp(i - 1, remainer) return dp(MAX - 2, target) - 1 if __name__ == "__main__": s = Solution() print(s.leastOpsExpressTarget(3, 929)) # Outputs 21, expected 19
{ "repo_name": "digiter/Arena", "path": "964-least-operators-to-express-number_2_WA.py", "copies": "1", "size": "1314", "license": "mit", "hash": 4346068954914455600, "line_mean": 32.6923076923, "line_max": 80, "alpha_frac": 0.5213089802, "autogenerated": false, "ratio": 3.2768079800498753, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4298116960249875, "avg_score": null, "num_lines": null }
# 964. Least Operators to Express Number # O(log(target)^2) from functools import cache class Solution: def leastOpsExpressTarget(self, x: int, target: int) -> int: @cache def solve(goal): if goal == 0: return 0 if x > goal: # x/x + x/x + ... choice1 = goal + goal - 1 # x - x/x - x/x - ... choice2 = (x - goal) * 2 return min(choice1, choice2) if x == goal: return 0 # Now x < goal power, cost = x, 0 while not (power <= goal and goal < power * x): power, cost = power * x, cost + 1 if goal - power == 0: return cost # # x^i + (goal - power) # choice1 = cost + 1 + solve(goal - power) # if goal - power <= power * x - goal: # # This is due to the fact that when a <= b, solve(a) <= solve(b) # return choice1 # else: # # x^(i+1) - (x^(i+1) - goal) # choice2 = (cost + 1) + 1 + solve(power * x - goal) # return min(choice1, choice2) # The following alternative also works because the if clause ensures # parameter of solve always decreases. That ensures the recursion # always ends. choice1 = cost + 1 + solve(goal - power) if power * x - goal < goal: choice2 = (cost + 1) + 1 + solve(power * x - goal) return min(choice1, choice2) else: return choice1 return solve(target)
{ "repo_name": "digiter/Arena", "path": "964-least-operators-to-express-number_3.py", "copies": "1", "size": "1685", "license": "mit", "hash": 1115351946143768400, "line_mean": 32.7, "line_max": 82, "alpha_frac": 0.4462908012, "autogenerated": false, "ratio": 4.002375296912114, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9946226710933771, "avg_score": 0.0004878774356686003, "num_lines": 50 }
# 97 through 122 import time f = open('/Users/kevin/Computer Science/project_euler/p59_text.txt', 'r') #print cipher def translate(a,b,c): length = len(cipher) temp = cipher place = 1 for i in xrange(0, length): if place == 3: temp[i] = chr(int(cipher[i]) ^ c) place = 1 continue if place == 2: temp[i] = chr(int(cipher[i]) ^ b) place = 3 continue temp[i] = chr(int(cipher[i]) ^ a) place = 2 return temp def compress(chr_arr): s = '' for i in xrange(0, len(chr_arr)): s += chr_arr[i] return s for a in xrange(97,123): for b in xrange(97,123): for c in xrange(97,123): cipher = f.readline() cipher = cipher.split(',') f.seek(0) # sets pointer of file back to beginning sol = translate(a,b,c) sol = compress(sol) if 'God' in sol and 'John' in sol: print sol s = 0 for c in sol: s += ord(c) print s quit() f.close()
{ "repo_name": "kbrose/project_euler", "path": "p50-59/p59.py", "copies": "1", "size": "1153", "license": "unlicense", "hash": -5469053161861186000, "line_mean": 22.5306122449, "line_max": 73, "alpha_frac": 0.4640069384, "autogenerated": false, "ratio": 3.5476923076923077, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4511699246092308, "avg_score": null, "num_lines": null }
"""981. Time Based Key-Value Store https://leetcode.com/problems/time-based-key-value-store/ Create a timebased key-value store class TimeMap, that supports two operations. 1. set(string key, string value, int timestamp) Stores the key and value, along with the given timestamp. 2. get(string key, int timestamp) Returns a value such that set(key, value, timestamp_prev) was called previously, with timestamp_prev <= timestamp. If there are multiple such values, it returns the one with the largest timestamp_prev. If there are no values, it returns the empty string (""). Example 1: Input: inputs = ["TimeMap","set","get","get","set","get","get"], inputs = [[],["foo","bar",1],["foo",1],["foo",3],["foo","bar2",4],["foo",4],["foo",5]] Output: [null,null,"bar","bar",null,"bar2","bar2"] Explanation: TimeMap kv; kv.set("foo", "bar", 1); // store the key "foo" and value "bar" along with timestamp = 1 kv.get("foo", 1); // output "bar" kv.get("foo", 3); // output "bar" since there is no value corresponding to foo at timestamp 3 and timestamp 2, then the only value is at timestamp 1 ie "bar" kv.set("foo", "bar2", 4); kv.get("foo", 4); // output "bar2" kv.get("foo", 5); //output "bar2" Example 2: Input: inputs = ["TimeMap","set","set","get","get","get","get","get"], inputs = [[],["love","high",10],["love","low",20],["love",5],["love",10],["love",15],["love",20],["love",25]] Output: [null,null,null,"","high","high","low","low"] Note: All key/value strings are lowercase. All key/value strings have length in the range [1, 100] The timestamps for all TimeMap.set operations are strictly increasing. 1 <= timestamp <= 10^7 TimeMap.set and TimeMap.get functions will be called a total of 120000 times (combined) per test case. """ class TimeMap: def __init__(self): """ Initialize your data structure here. """ self.dic = {} def set(self, key: 'str', value: 'str', timestamp: 'int') -> 'None': if key in self.dic: self.dic[key].append({'v': value, 't': timestamp}) else: self.dic[key] = [{'v': value, 't': timestamp}] def get(self, key: 'str', timestamp: 'int') -> 'str': if key in self.dic: for kv in reversed(self.dic[key]): if timestamp >= kv['t']: return kv['v'] return "" else: return "" # Your TimeMap object will be instantiated and called as such: # obj = TimeMap() # obj.set(key,value,timestamp) # param_2 = obj.get(key,timestamp)
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/time_based_key_value_store.py", "copies": "1", "size": "2561", "license": "mit", "hash": 3392176462893833000, "line_mean": 34.0821917808, "line_max": 180, "alpha_frac": 0.6149941429, "autogenerated": false, "ratio": 3.2707535121328224, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.929495884428233, "avg_score": 0.018157762150098188, "num_lines": 73 }
"""984. String Without AAA or BBB https://leetcode.com/problems/string-without-aaa-or-bbb/ Given two integers A and B, return any string S such that: S has length A + B and contains exactly A 'a' letters, and exactly B 'b' letters; The substring 'aaa' does not occur in S; The substring 'bbb' does not occur in S. Example 1: Input: A = 1, B = 2 Output: "abb" Explanation: "abb", "bab" and "bba" are all correct answers. Example 2: Input: A = 4, B = 1 Output: "aabaa" Note: 0 <= A <= 100 0 <= B <= 100 It is guaranteed such an S exists for the given A and B. """ class Solution: def str_without_3a3b(self, a, b): """ :type a: int :type b: int :rtype: str """ res = "" while a and b: if a > b: res = res + "aab" a = a - 2 b = b - 1 elif a < b: res = res + "bba" a = a - 1 b = b - 2 else: res = res + "ab" a = a - 1 b = b - 1 if a: res = res + "a" * a if b: res = res + "b" * b return res
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/string_without_3a3b.py", "copies": "1", "size": "1190", "license": "mit", "hash": -1916451612155437300, "line_mean": 19.5172413793, "line_max": 60, "alpha_frac": 0.4605042017, "autogenerated": false, "ratio": 3.3055555555555554, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9178313451836836, "avg_score": 0.017549261083743842, "num_lines": 58 }
"""98. Validate Binary Search Tree https://leetcode.com/problems/validate-binary-search-tree/description/ Given a binary tree, determine if it is a valid binary search tree (BST). Assume a BST is defined as follows: - The left subtree of a node contains only nodes with keys less than the node's key. - The right subtree of a node contains only nodes with keys greater than the node's key. - Both the left and right subtrees must also be binary search trees. Example 1: ⁠ 2 ⁠ / \ ⁠ 1 3 Input: [2,1,3] Output: true Example 2: ⁠ 5 ⁠ / \ ⁠ 1 4 / \ 3 6 Input: [5,1,4,null,null,3,6] Output: false Explanation: The root node's value is 5 but its right child's value is 4. """ from common.tree_node import TreeNode class Solution: def is_valid_bst(self, root: TreeNode) -> bool: def dfs(node: TreeNode, minimum, maximum) -> bool: if not node: return True if minimum is not None and minimum >= node.val: return False if maximum is not None and maximum <= node.val: return False return dfs(node.left, minimum, node.val) and dfs(node.right, node.val, maximum) return dfs(root, None, None) def is_valid_bst_inorder(self, root: TreeNode) -> bool: """ BST's in-order traversal sequence must be incremental. :param root: :return: """ seq = [] stack = [] while root or stack: if root: stack.append(root) root = root.left else: top_ele = stack.pop() seq.append(top_ele.val) if top_ele.right: root = top_ele.right for i in range(1, len(seq)): if seq[i - 1] >= seq[i]: return False return True
{ "repo_name": "isudox/leetcode-solution", "path": "python-algorithm/leetcode/validate_binary_search_tree.py", "copies": "1", "size": "1931", "license": "mit", "hash": 2847565672476987400, "line_mean": 24.2368421053, "line_max": 79, "alpha_frac": 0.5531803962, "autogenerated": false, "ratio": 3.6813819577735125, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.47345623539735127, "avg_score": null, "num_lines": null }
"""9-9. Battery Upgrade: Use the final version of electric_car.py from this section. Add a method to the Battery class called upgrade_battery(). This method should check the battery size and set the capacity to 85 if it isn’t already. Make an electric car with a default battery size, call get_range() once, and then call get_range() a second time after upgrading the battery. You should see an increase in the car’s range. """ class Car(object): """A simple attempt to represent a car.""" def __init__(self, make, model, year): """Initialize attributes""" self.make = make self.model = model self.year = year self.odometer_reading = 0 def get_descriptive_name(self): """Description of a car""" long_name = str(self.year) + ' ' + self.make + ' ' + self.model return long_name.title() def read_odometer(self): """Odometer""" print("This car has " + str(self.odometer_reading) + " miles on it.") def update_odometer(self, mileage): """update odometer""" if mileage >= self.odometer_reading: self.odometer_reading = mileage else: print("You can't roll back an odometer!") def increment_odometer(self, miles): """increment odometer""" self.odometer_reading += miles class Battery(object): """A simple attempt to model a battery for an electric car.""" def __init__(self, battery_size = 70): """Initialize the battery's attributes.""" self.battery_size = battery_size def describe_battery(self): """Print a statement describing the battery size.""" print("This car has a " + str(self.battery_size) + "-kWh battery.") def get_range(self): """Print a statement about the range this battery provides.""" if self.battery_size == 70: range = 240 elif self.battery_size == 85: range = 270 message = "This car can go approximately " + str(range) message += " miles on a full charge." print(message) def upgrade_battery(self): """change battery size""" if self.battery_size != 85: self.battery_size = 85 class ElectricCar(Car): """Represent aspects of a car, specific to electric vehicles.""" def __init__(self, make, model, year): """Initialize attributes of the parent class. Then initialize attributes specific to an electric car. """ super().__init__(make, model, year) self.battery_size = Battery() def describe_battery(self): """Print a statement describing the battery size.""" print("This car has a " + str(self.battery_size) + "-kWh battery.") def fill_gas_tank(): """Electric cars don't have gas tanks.""" print("This car doesn't need a gas tank!") my_tesla = ElectricCar('tesla', 'model s', 2016) # create electric car print("Car: " + my_tesla.get_descriptive_name()) # print description print(my_tesla.battery_size.get_range()) # get range my_tesla.battery_size.upgrade_battery() # upgrade battery print(my_tesla.battery_size.get_range()) # get range again after battery update
{ "repo_name": "AnhellO/DAS_Sistemas", "path": "Ene-Jun-2019/Angelica Rodriguez/Practica 1/battery_upgrade_9_9.py", "copies": "1", "size": "3206", "license": "mit", "hash": 8683257964822964000, "line_mean": 36.2325581395, "line_max": 79, "alpha_frac": 0.6252342286, "autogenerated": false, "ratio": 3.749414519906323, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.48746487485063233, "avg_score": null, "num_lines": null }
# 9. print_log('\n9. Build the SCHEMA request to add new schema to the ledger as a Steward\n') seq_no = 1 schema = { 'seqNo': seq_no, 'dest': steward_did, 'data': { 'id': '1', 'name': 'gvt', 'version': '1.0', 'ver': '1.0', 'attrNames': ['age', 'sex', 'height', 'name'] } } schema_data = schema['data'] print_log('Schema data: ') pprint.pprint(schema_data) print_log('Schema: ') pprint.pprint(schema) schema_request = await ledger.build_schema_request(steward_did, json.dumps(schema_data)) print_log('Schema request: ') pprint.pprint(json.loads(schema_request)) # 10. print_log('\n10. Sending the SCHEMA request to the ledger\n') schema_response = await ledger.sign_and_submit_request(pool_handle, wallet_handle, steward_did, schema_request) print_log('Schema response:') pprint.pprint(json.loads(schema_response))
{ "repo_name": "srottem/indy-sdk", "path": "docs/how-tos/save-schema-and-cred-def/python/step3.py", "copies": "4", "size": "1087", "license": "apache-2.0", "hash": -4292125829809776000, "line_mean": 37.8214285714, "line_max": 119, "alpha_frac": 0.5243790248, "autogenerated": false, "ratio": 3.7226027397260273, "config_test": false, "has_no_keywords": true, "few_assignments": false, "quality_score": 1, "avg_score": 0.004141923281151138, "num_lines": 28 }
''' 9-plot_flux.py ========================= AIM: Plot maps of the stray light flux or equivalent magnitude given a particular date INPUT: files: - <orbit_id>_misc/orbits.dat - <orbit_id>_flux/flux_*.dat - resources/moon_*.dat, sun_*.dat, orbits_*.dat variables: see section PARAMETERS (below) OUTPUT: in <orbit_id>_figures/maps/ : map with the following name: flux_%07d.png CMD: python 9-plot_flux.py ISSUES: - Boundary zone when observing zone is centred on 360 deg REQUIRES:- standard python libraries, specific libraries in resources/ (+ SciPy) - Structure of the root folder: * <orbit_id>_flux/ --> flux files * <orbit_id>_figures/maps/ --> figures * <orbit_id>_misc/ --> storages of data * all_figures/ --> comparison figures REMARKS: <none> ''' ##################################################################################################################### # DEFINITIONS AND INCLUDES import numpy as np import pylab as plt import os import random import time import copy from scipy.interpolate import griddata from matplotlib.patches import Rectangle, Circle from resources.routines import * from resources.TimeStepping import * import parameters as param import resources.constants as const import resources.figures as figures from matplotlib.collections import PatchCollection import matplotlib.path as mpath import matplotlib.patches as mpatches import matplotlib.lines as mlines ##################################################################################################################### # PARAMETERS # Orbital elements apogee=650 perigee=650 orbit_id = '6am_650_5_conf4e' # First minute in data set ! minute_ini = 75#1440 * 172 minute_end = 81#1440 * 172 + 100 # File name for the output data file orbits_file = 'orbits.dat' # is true outputs .eps and .pdf for every step. Much slower and heavier fancy = False # compare the data to the flux of different stars magnitudes = False # Show stray light contour map ? straylight = True # Draw boundaries ? boundaries = False # Speeds up by not loading the different position files, does not save, but shows --> for new implementation or debugging future = True save = False # TODO: # - For minute (orbit_ID 800) 363500, contour of zone better file_orbit = 'orbit_%s.dat' % orbit_id file_sun = 'sun_%s.dat' % orbit_id file_moon = 'moon_%s.dat' % orbit_id # Factor in the SL post treatment correction ? SL_post_treat = True # Factor in mirror efficiency for the equivalent star magnitude ? mirror_correction = False ##################################################################################################################### # CONSTANTS AND PHYSICAL PARAMETERS period = altitude2period(apogee,perigee) sl_min = 1e-9 sl_max = 0.1 ##################################################################################################################### # INITIALISATION file_flux = 'flux_' # Formatted folders definitions folder_flux, folder_figures, folder_misc = init_folders(orbit_id) folder_figures= '%s_figures/maps/' % (orbit_id) #params = {'backend': 'ps','axes.labelsize': 14,'text.fontsize': 18,'legend.fontsize': 18,'xtick.labelsize': 14,'ytick.labelsize': 14,'text.usetex': True} #plt.rcParams.update(params) from matplotlib import rc rc('font',**{'family':'serif','serif':['Palatino'],'size':14}) rc('text', usetex=True) print '\nObservability Map Plotting' print 'ORBIT ID:', orbit_id print '-------------------------------------------' if not os.path.isdir(folder_figures): print '\tError: figure folder %s does not exists.' % (folder_figures) exit() ra = dec = S_sl=np.zeros(1) # load the postions of the Moon, Sun and Earth if not future : sys.stdout.write("Loading orbit file...\t\t\t") sys.stdout.flush() try: sat = np.loadtxt('resources/'+file_orbit, delimiter='\t') except ValueError: sat = np.loadtxt('resources/'+file_orbit, delimiter=' ') # apply time conditions sat = sat[sat[:,0] >= minute_ini] sat = sat[sat[:,0] <= minute_end] print "Done." sys.stdout.write("Loading Sun file...\t\t\t") sys.stdout.flush() try: sun = np.loadtxt('resources/'+file_sun, delimiter=' ') except ValueError: sun = np.loadtxt('resources/'+file_sun, delimiter='\t') sun = sun[sun[:,0] >= minute_ini] sun = sun[sun[:,0] <= minute_end] print "Done." sys.stdout.write("Loading Moon file...\t\t\t") sys.stdout.flush() try: moon = np.loadtxt('resources/'+file_moon, delimiter=',') except ValueError: moon = np.loadtxt('resources/'+file_moon, delimiter=' ') moon = moon[moon[:,0] >= minute_ini] moon = moon[moon[:,0] <= minute_end] print "Done." ################################################################################ # Prepare the grid n_alpha = param.resx n_delta = param.resy ra_i = 0 ra_f = 2.*np.pi dec_i = -np.pi/2. dec_f = np.pi/2. ra_step = (ra_f-ra_i)/n_alpha dec_step = (dec_f-dec_i)/n_delta iterable = (ra_i + i*ra_step for i in range(n_alpha)) ras = np.fromiter(iterable, np.float)-np.pi iterable = (dec_i + i*dec_step for i in range(n_delta)) decs = np.fromiter(iterable, np.float) ra_grid, dec_grid = np.meshgrid(ras, decs) grid_points0 = np.zeros(np.shape(ra_grid)) iterable = (ra_i + ra_step/2 + i*ra_step for i in range(n_alpha)) ras2 = np.fromiter(iterable, np.float)-np.pi iterable = (dec_i + dec_step/2 + i*dec_step for i in range(n_delta)) decs2 = np.fromiter(iterable, np.float) ##################################################################################################################### # Prepares the list of minutes sys.stdout.write("Loading computed orbits...\t\t") sys.stdout.flush() orbits = np.loadtxt(folder_misc+orbits_file,dtype='i4') list_minutes = -1. * np.ones( ( np.shape(orbits)[0] + 2 ) * period ) id_min = 0 times = np.loadtxt('resources/minute_table_%s.dat' % orbit_id, delimiter=',',dtype='Int32') for ii, orbit_current in enumerate(orbits[:,0]): t_ini, t_end, a_ini, a_end = fast_orbit2times(times,orbit_current,orbit_id) for minute in range(a_ini, a_end+1): list_minutes[id_min] = int(minute) id_min += 1 list_minutes = list_minutes[list_minutes > -1] # apply time conditions list_minutes = list_minutes[list_minutes >= minute_ini] list_minutes = list_minutes[list_minutes <= minute_end] print 'Done.' ##### def draw_boundaries(ax,xx,yy,ra_step,dec_step, force=False): # sort the outer points rights to draw the limits correctly xx, yy, ox, oy = sort_boundary(xx,yy,ra_step,dec_step, force) # the last point is the first to close the path xx = np.append(xx,xx[0]) yy = np.append(yy,yy[0]) # Format the vertices of the path correctly ie [(x1,y1), (x2,y2), ..., (x1, y1)] verts = np.array(zip(xx,yy)) # Create the path and say which points are the 1st, verticies or last point Path = mpath.Path codes = [Path.MOVETO] for ii in range(1, len(xx)-1): codes.append(Path.LINETO) codes.append(Path.CLOSEPOLY) path = mpath.Path(verts+[0,0], codes) patch = mpatches.PathPatch(path, facecolor='none', edgecolor='black',zorder=100) patch = ax.add_patch(patch) return ox, oy #### if not magnitudes: from matplotlib import colors, ticker, cm ##################################################################################################################### # Loops on every time step to get the data for id_min, minute in enumerate(list_minutes): minute = int(minute) sys.stdout.write("Plotting stray light map "+str(minute)+'...\t') sys.stdout.flush() ##################################################################################################################### # Loads the data # As it's produced by fortran, it may be a bit sketchy. We read line by line (which is slower than the native numpy method of reading text btw and filter out all space af$ try: ra, dec, S_sl = load_flux_file(minute, file_flux, folder=folder_flux) # Apply the flux correction (SL post-treatment removal and the mirror efficiency) if mirror_correction: S_sl /= param.mirror_efficiency if SL_post_treat: S_sl *= (1.0 - param.SL_post_treat_reduction) S_sl *= param.SL_QE # Now we compare the flux with the magnitude of the star. if magnitudes: S_sl = flux2mag(S_sl,param.ppm_threshold) # Data is loaded and processed. --> We do a plot doplot = True except IOError: # There is no file. That means that there is nothing to plot raise S_sl=np.zeros(1) ra = dec = S_sl doplot = False sys.stdout.write("Warning: No point\t") sys.stdout.flush() ##################################################################################################################### # Start ploting for minute min plt.figure() ax = plt.subplot(111, projection="mollweide") ax.grid(True) ax.set_xticklabels([r'$30^{\circ}$',r'$60^{\circ}$',r'$90^{\circ}$',r'$120^{\circ}$',r'$150^{\circ}$',r'$180^{\circ}$',r'$210^{\circ}$',r'$240^{\circ}$',r'$270^{\circ}$',r'$300^{\circ}$',r'$330^{\circ}$']) #,r'$360^{\circ}$' ax.set_xlabel(r'$\alpha$') ax.set_ylabel(r'$\delta$') ##################################################################################################################### # Prepare plots... extent = (-np.pi,np.pi,-np.pi/2.,np.pi/2.) if doplot: if magnitudes: vf = np.linspace(param.magnitude_min,param.magnitude_max+0.2, (param.magnitude_max-param.magnitude_min+1)*2, endpoint=True) v = np.linspace(param.magnitude_min,param.magnitude_max, (param.magnitude_max-param.magnitude_min+1), endpoint=True) t = map(figures.format_mag, v) else: vf = np.logspace(np.log10(sl_min), np.log10(sl_max), np.log10(sl_max)-np.log10(sl_min)*2, endpoint=True) v = np.logspace(np.log10(sl_min), np.log10(sl_max), np.log10(sl_max)-np.log10(sl_min)+1, endpoint=True) t = map(figures.format_log10, v) # prepare the surface plot xi = np.linspace(-np.pi,np.pi,n_alpha*2) yi = np.linspace(-np.pi/2,np.pi/2,n_delta*2) # grid the data. # Sometimes if the points are too well aligned, griddate hangs up as it uses 2d interpolation. Introducing therefore a small random noise on the position. # or http://stackoverflow.com/questions/10886971/orbit_idernatives-to-scipy-interpolate-griddata-that-dont-hang-on-aligned-points ra = ra + np.random.random(ra.shape[0]) * 1e-6 - np.pi dec = dec + np.random.random(ra.shape[0]) * 1e-6 zi = griddata((ra, dec), S_sl, (xi[None,:], yi[:,None]), method='cubic') # print np.log10(sl_max)-np.log10(sl_min) # if not close enough, mask value # cosmetics for ii, rag in enumerate(xi) : a = np.where(abs(ra-rag)<0.1)[0] if np.shape(a) == 0: continue # if no a in array, continue for jj, decg in enumerate(yi) : b = np.where(abs(dec-decg)<0.1)[0] # if no dec in array, continue if np.shape(b)[0] == 0: continue # if not close enough, mask value if np.shape(np.intersect1d(a,b))[0] == 0: zi[jj,ii] = np.nan if straylight: # complete the holes in the interpolation cmap = plt.cm.jet w = ra_step h = dec_step for x, y, c_sl in zip(ra, dec, S_sl): if magnitudes: cc = (c_sl-param.magnitude_min)/(param.magnitude_max+0.2-param.magnitude_min) else: cc = (np.log10(c_sl)-np.log10(sl_min))/(np.log10(sl_max)-np.log10(sl_min)) ax.add_artist(Rectangle(xy=(x-w/2,y-h/2), color=cmap(cc), width=w, height=h, zorder=0))#, # make a plot of the centers of the cells scat=plt.plot(ra,dec,'o',c='k', markersize=2) if straylight: if magnitudes: CS = plt.contour(xi,yi,zi,vf,linewidths=0.5,colors='k',extent=extent) CS = plt.contourf(xi,yi,zi,vf,cmap=plt.cm.jet,extent=extent) else: CS = plt.contour(xi,yi,zi,vf,linewidths=0.5,colors='k',extent=extent,locator=ticker.LogLocator()) CS = plt.contourf(xi,yi,zi,vf,cmap=plt.cm.jet,extent=extent,locator=ticker.LogLocator()) cbar = plt.colorbar(CS, ticks=v, orientation='horizontal',shrink=.8) cbar.set_ticklabels(t) l,b,w,h = plt.gca().get_position().bounds ll,bb,ww,hh = cbar.ax.get_position().bounds cbar.ax.set_position([ll, bb+0.1, ww, hh]) if magnitudes: cbar.set_label(r'$m_V$') else: cbar.set_label(r'$\mathrm{Stray\ light\ flux}\ [\frac{\mathrm{ph}}{\mathrm{px}\cdot s}]$') ########################################################################################## # Get the outer limit of the data # make deep copy to avoid pointers issues from one image to the next. grid_points = copy.deepcopy(grid_points0) grid_points2 = copy.deepcopy(grid_points0) # For each point, find out what are the neighbouring points on the (half) grid for ii, ra_ in enumerate(ra): dec_ = dec[ii] id_ra = find_nearest(ras,ra_+ra_step/2) id_dec = find_nearest(decs,dec_+dec_step/2) grid_points[id_dec,id_ra] = 1 id_ra = find_nearest(ras,ra_+ra_step/2) id_dec = find_nearest(decs,dec_-dec_step/2) grid_points[id_dec,id_ra] = 1 id_ra = find_nearest(ras,ra_-ra_step/2) id_dec = find_nearest(decs,dec_+dec_step/2) grid_points[id_dec,id_ra] = 1 id_ra = find_nearest(ras,ra_-ra_step/2) id_dec = find_nearest(decs,dec_-dec_step/2) grid_points[id_dec,id_ra] = 1 if boundaries: # Size of "board" X = n_alpha-1 Y = n_delta-1 # Create the list of all points interesting neighbours = lambda x, y : [(x2, y2) for x2 in range(x-1, x+2) for y2 in range(y-1, y+2) if -1 < x <= X and -1 < y <= Y and (x != x2 or y != y2)] # restrict to only outer points for ii in range(0, n_alpha): for jj in range(0, n_delta): if grid_points[jj, ii]>0. : v = [grid_points[neighbours(ii,jj)[kk][1], neighbours(ii,jj)[kk][0]] for kk in range( 0, len( neighbours(ii,jj) ) )] # If there is point grid point which is not neighbour to a stray light calculation point, then it is a outer cell point. if 0 in v: grid_points2[jj,ii] = 1 # restrict the list of the point in the cell grid to only outer points ra_grid2 = ra_grid[np.where(grid_points2>0.)] dec_grid2 = dec_grid[np.where(grid_points2>0.)] # change the name of the variable for clarity xx = ra_grid2 yy = dec_grid2 del ra_grid2, dec_grid2, grid_points, grid_points2 ox, oy = draw_boundaries(ax,xx,yy,ra_step,dec_step) if np.shape(ox)[0] > 2 and future: ox, oy = draw_boundaries(ax,ox,oy,ra_step,dec_step,True) del xx, yy, ox, oy if not future: # find the position of the satellite in the orbit and to compute RA, DEC of the Earth and the Sun with respect to the sat's center. # EARTH id_sat = find_nearest(sat[:,0],minute) x = -1*sat[id_sat,1] y = -1*sat[id_sat,2] z = -1*sat[id_sat,3] ra_sat = rev( right_ascension(x,y) ) dec_sat= declination(x,y,z) circle = Circle((ra_sat-np.pi,dec_sat), 0.07, facecolor='blue', edgecolor='none', alpha=1, zorder=5) ax.add_patch(circle) # SUN id_sat = find_nearest(sun[:,0],minute) x = sun[id_sat,1] y = sun[id_sat,2] z = sun[id_sat,3] ra_sun = rev( right_ascension(x,y) ) dec_sun= declination(x,y,z) plt.plot(ra_sun - np.pi,dec_sun,'o',color="yellow", markersize=8, zorder=5) # MOON id_sat = find_nearest(moon[:,0],minute) x = moon[id_sat,1] y = moon[id_sat,2] z = moon[id_sat,3] ra_moon = rev( right_ascension(x,y) ) dec_moon= declination(x,y,z) circle = Circle((ra_moon - np.pi,dec_moon), 0.03, facecolor='white', edgecolor='black', linewidth=0.5, alpha=1, zorder=5) ax.add_patch(circle) # reduces the size of the arrays speed up the plots sat = sat[sat[:,0] >= minute] sun = sun[sun[:,0] >= minute] moon = moon[moon[:,0] >= minute] else: v = np.linspace(param.magnitude_min,param.magnitude_max+0.2, (param.magnitude_max-param.magnitude_min+1)*2, endpoint=True) scat=plt.scatter(ra,dec,c=S_sl, s=2,vmin=param.magnitude_min, vmax=v[-1]) t = np.linspace(param.magnitude_min,param.magnitude_max, (param.magnitude_max-param.magnitude_min+1), endpoint=True) plt.colorbar(ticks=t) scat.remove() plt.grid(True) # add the time, the orbit number and the stray light angle. # convert epoch to matplotlib float format labels = minute * 60. + const.timestamp_2018_01_01 # to human readable date pre = time.gmtime(labels) labels = figures.format_second(pre) orbit_current = fast_minute2orbit(times,minute,orbit_id) plt.text(-0.1, 1.0,'%s' % labels, transform = ax.transAxes) plt.text(-0.1, 0.9,r'$\mathrm{orbit}\ %d$' % orbit_current, transform = ax.transAxes) # plt.text(-0.1, 0.8,r'$\mathrm{id}\ %d$' % orbit_id, transform = ax.transAxes) if future: plt.show() exit() if not save: plt.show() if magnitudes: fname = '%s/flux_%07d' % (folder_figures, minute) else: fname = '%s/straylight_%07d' % (folder_figures, minute) if (fancy and save): plt.savefig(fname+'.eps') os.system("epstopdf "+fname+".eps") os.system('pdfcrop '+fname+'.pdf') os.system('mv '+fname+'-crop.pdf '+fname+'.pdf') os.system('pdftocairo -png '+fname+'.pdf'+' '+fname) if save: plt.savefig(fname+'.png', dpi=param.dpi) plt.close() del ra, dec, S_sl print "Done."
{ "repo_name": "kuntzer/SALSA-public", "path": "9_plot_flux.py", "copies": "1", "size": "16932", "license": "bsd-3-clause", "hash": 8266739750742710000, "line_mean": 32.2652259332, "line_max": 225, "alpha_frac": 0.6182967163, "autogenerated": false, "ratio": 2.844758064516129, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8775725576144229, "avg_score": 0.037465840934380046, "num_lines": 509 }
9#!/usr/bin/env python import argparse import sys import os import subprocess as sp DEFAULT_SUB_FILE = '''#!/bin/csh #$ -M {}@nd.edu #$ -m ae #$ -N {} #$ -q {} #$ -r n {} module load schrodinger/2015u3 module load gaussian/09D01 module load tinker setenv SCHRODINGER_TEMP_PROJECT "~/.schrodtmp" setenv SCHRODINGER_TMPDIR "~/.schrodtmp" setenv SCHRODINGER_JOBDB2 "~/.schrodtmp" {}''' #Change the default user here defaultuser='arosale4' def CRC_qsub(job_name,USER,QUEUE,CPU,COMMAND): ## Writes the submission file with all the appropriate options: job name, ## queue, processors, and the job command. submission_file = open(job_name + '.sh', 'w') submission_file.write( DEFAULT_SUB_FILE.format(job_name,USER,QUEUE,CPU,COMMAND.format( job_name + '.com'))) submission_file.close() def queue(opts): ## Queue option of the crc. I think I can only use long and debug, which ## long is the default. if opts.queue: QUEUE = opts.queue else: QUEUE = 'long' return QUEUE def processors(opts): ## Sets the number of processors to request from the CRC. Default is to use ## 8 processors. When there is no argument to follow ("-pe") then this ## section is removed to allow for only one processor. An additional ## argument will just write that argument, e.g. "-pe -pe smp 16" would add ## #$ -pe smp 16 to the submission script. if opts.processors == 'default': CPU = '#$ -pe smp 8' elif opts.processors == 'none': CPU = ' ' else: CPU = '#$ ' + opts.processors return CPU def command(opts): ## Sets the actuall command to accomplish. By default it will do a gaussian ## job. Example of an alternative is "--command bmin -WAIT conf_search" if opts.command: COMMAND = opts.command else: COMMAND = 'g09 {}' return COMMAND def main(args): parser = return_parser() opts = parser.parse_args(args) QUEUE = queue(opts) CPU = processors(opts) COMMAND = command(opts) if opts.username: USER = opts.username else: USER = defaultuser for filename in opts.filename: run_file = os.path.splitext(filename)[0] CRC_qsub(run_file,USER,QUEUE,CPU,COMMAND) sp.call('qsub {}.sh'.format(run_file), shell=True) # print('This is where you would run the following command') # print('>>>>> qsub {}.sh'.format(run_file)) def return_parser(): parser = argparse.ArgumentParser( description='To fill out later') parser.add_argument( 'filename', type=str, nargs='+', help='Filename') parser.add_argument( '-q','--queue', type=str, help='"long" or "debug"') parser.add_argument( '-pe','--processors', type=str, nargs='?', const='none', default='default', help='No option string = default smp 8; \n' 'Option string but no argument = no multiple processing; and \n' 'Option string with argument = "#$" + argument') parser.add_argument( '-c','--command', type=str, help='Command that are being ran. The \ default will be to perform a g09 calcualtion on <filename>') parser.add_argument( '-u','--username', type=str, help='Notre Dame CRC user name. Probably \ a NetID.') return parser if __name__ == '__main__': main(sys.argv[1:]) ##################################
{ "repo_name": "arosale4/q2mm", "path": "tools/submit.py", "copies": "3", "size": "3435", "license": "mit", "hash": -8008412664737023000, "line_mean": 29.9459459459, "line_max": 79, "alpha_frac": 0.6154294032, "autogenerated": false, "ratio": 3.544891640866873, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5660321044066873, "avg_score": null, "num_lines": null }
# # a="00014#" # # print(a.isdigit()) # # print(a.isnumeric()) # # print(a.isalnum()) # # a = {"name":"a","count":3} # print(type(a)) # b = a.keys() # c = a.values() # print(b) # print(c) # for i in b: # print(i) # for i in c: # print(i) # print(type(b)) # print(type(c)) # d = set(b) # print(type(d)) # print(d) # import os # print(__file__) # b = os.path.dirname(os.path.abspath(__file__)) # print(b) # c = os.path.join(b,"info.txt") # print(c) # with open(c,'r') as f: # for i in f.readlines(): # print(i) # i = {"name":1,"sex":0} # l = 0 # while l < 1: # a = [] # a.append(i) # i = {"name":2,"sex":1} # a.append(i) # print(a) # l += 1 # from collections import Counter # a="asfasdgagqw3re" # b = Counter(a) # print(b) # print(b.items()) # for i,h in b.items(): # print(i,h) #有序字典 # import collections # a = collections.OrderedDict() # a['a']='a' # a['b']='b' # a['c']='c' # # a.update({'a':'aa','d':'dd'}) # print(a) #默认字典就是定义values的值是什么类型 # import collections # l = [1,2,3,4,5,6] # a= collections.defaultdict(list) # for i in l: # a['k1'].append(i) # print(a) #可命名元祖 # import collections # MytupleClass = collections.namedtuple('mytuple',['x','y','z']) # a = MytupleClass(11,22,33) # print(a) ## mytuple(x=11, y=22, z=33) #队列,像是一个列表 # import collections # def c(): # pass # a = collections.deque() # a.append({'b':1,'c':2}) # a.append({'e':1}) # a.append(1) # a.appendleft(10) # a.append(c) # a.extend([111,'sfa',10]) # print(a) # print(a.index(10)) # a.rotate(2) # print(a) # print(a.pop()) # print(a) # # # # b = [] # b.append({'b':1,'c':2}) # b.append({'e':1}) # b.append(1) # b.extend([10,1,'sdf']) # b.append(c) # print(b) # print(b.index(10)) # print(b.pop(0)) # print(b) # # import collections # # a = dict([('name',1),('sex',0),('age',18)]) # for i in a.items(): # print(i) # # b = collections.OrderedDict([('name',1),('sex',0),('age',18)]) # for i in b.items(): # print(i) # import json # a = '{"name":"ds","other":{"count":1,"age":18}}' # b = json.loads(a) # print(b) # a = ["name"] # b = str(a[0:]) # print(b) # a = {} # a is dict print("\033[4;31;")
{ "repo_name": "dianshen/python_day", "path": "day3/test.py", "copies": "1", "size": "2216", "license": "apache-2.0", "hash": -7510194399769710000, "line_mean": 15.1804511278, "line_max": 64, "alpha_frac": 0.5269516729, "autogenerated": false, "ratio": 2.1370407149950346, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8146050865540306, "avg_score": 0.003588304470945857, "num_lines": 133 }
A =[0,2,0,2,0,0,3,0,0,4] B =[0,0,0,0,5,0,2,0,0,8] def dot_product_unsorted(A,B): # T O(mn) # without order resA = {} resB = {} for i in range(len(A)): if A[i] != 0: resA[i] = A[i] for j in range(len(B)): if B[j] != 0: resB[j] = B[j] res = 0 for each in resA: if each in resB: res += resA[each] * resB[each] print res def dot_product_sorted(A,B): # O(min(m,n)) # with order resA = [] resB = [] for i in range(len(A)): if A[i]: resA.append((i,A[i])) for j in range(len(B)): if B[j]: resB.append((j,B[j])) res = 0 i1 = 0 i2 = 0 while i1 < len(resA) and i2 < len(resB): if resA[i1][0] == resB[i2][0]: res += resA[i1][1] * resB[i2][1] i1 += 1 i2 += 1 elif resA[i1][0] > resB[i2][0]: i2 += 1 else: i1 += 1 print res def binarysearch(array, start, end, target): while start + 1 < end: mid = start + (end - start) / 2 pair = array[mid] if pair[0] == target: return mid elif pair[0] < target: start = mid else: end = mid if array[end][0] == target: return end return start def dot_product3(B,A): # if A is so large # O(mlgn) if resA and resB given resA = [] resB = [] for i in range(len(A)): if A[i]: resA.append((i,A[i])) for j in range(len(B)): if B[j]: resB.append((j,B[j])) i = 0 j = 0 res = 0 print resA, resB while i < len(resA): pairA = resA[i] i += 1 j = binarysearch(resB, j, len(resB)-1, pairA[0]) pairB = resB[j] j += 1 print pairA,pairB if pairA[0] == pairB[0]: res += pairA[1] * pairB[1] print res dot_product_unsorted(A,B) dot_product_sorted(A,B) dot_product3(A,B)
{ "repo_name": "youhusky/Facebook_Prepare", "path": "dot_product.py", "copies": "1", "size": "1995", "license": "mit", "hash": -288516040476354800, "line_mean": 20.6847826087, "line_max": 56, "alpha_frac": 0.4426065163, "autogenerated": false, "ratio": 2.755524861878453, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.856916864520751, "avg_score": 0.025792546594188663, "num_lines": 92 }
a = "{0[ ]:X>+10d}" a = "{0[ ]!s:X>+10d}" a = "{0[ ]:Xd>+10d}" #invalid a : source.python : source.python = : keyword.operator.assignment.python, source.python : source.python " : punctuation.definition.string.begin.python, source.python, string.quoted.single.python {0[ ] : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, string.quoted.single.python :X>+10d : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, storage.type.format.python, string.quoted.single.python } : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, string.quoted.single.python " : punctuation.definition.string.end.python, source.python, string.quoted.single.python a : source.python : source.python = : keyword.operator.assignment.python, source.python : source.python " : punctuation.definition.string.begin.python, source.python, string.quoted.single.python {0[ ] : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, string.quoted.single.python !s : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, storage.type.format.python, string.quoted.single.python :X>+10d : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, storage.type.format.python, string.quoted.single.python } : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, string.quoted.single.python " : punctuation.definition.string.end.python, source.python, string.quoted.single.python a : source.python : source.python = : keyword.operator.assignment.python, source.python : source.python " : punctuation.definition.string.begin.python, source.python, string.quoted.single.python {0[ ] : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, string.quoted.single.python : : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, storage.type.format.python, string.quoted.single.python Xd>+10d} : constant.character.format.placeholder.other.python, meta.format.brace.python, source.python, string.quoted.single.python " : punctuation.definition.string.end.python, source.python, string.quoted.single.python : source.python # : comment.line.number-sign.python, punctuation.definition.comment.python, source.python invalid : comment.line.number-sign.python, source.python
{ "repo_name": "MagicStack/MagicPython", "path": "test/strings/format1.py", "copies": "1", "size": "2832", "license": "mit", "hash": 9073153414131596000, "line_mean": 75.5405405405, "line_max": 164, "alpha_frac": 0.6938559322, "autogenerated": false, "ratio": 3.791164658634538, "config_test": false, "has_no_keywords": true, "few_assignments": false, "quality_score": 0.4985020590834538, "avg_score": null, "num_lines": null }
a = 100 b = 200 print(a) # 100 print(b) # 200 a, b = 100, 200 print(a) # 100 print(b) # 200 a, b, c = 0.1, 100, 'string' print(a) # 0.1 print(b) # 100 print(c) # string a = 100, 200 print(a) print(type(a)) # (100, 200) # <class 'tuple'> # a, b = 100, 200, 300 # ValueError: too many values to unpack (expected 2) # a, b, c = 100, 200 # ValueError: not enough values to unpack (expected 3, got 2) a, *b = 100, 200, 300 print(a) print(type(a)) # 100 # <class 'int'> print(b) print(type(b)) # [200, 300] # <class 'list'> *a, b = 100, 200, 300 print(a) print(type(a)) # [100, 200] # <class 'list'> print(b) print(type(b)) # 300 # <class 'int'> a = b = 100 print(a) # 100 print(b) # 100 a = 200 print(a) # 200 print(b) # 100 a = b = c = 'string' print(a) # string print(b) # string print(c) # string a = b = [0, 1, 2] print(a is b) # True a[0] = 100 print(a) # [100, 1, 2] print(b) # [100, 1, 2] b = [0, 1, 2] a = b print(a is b) # True a[0] = 100 print(a) # [100, 1, 2] print(b) # [100, 1, 2] a = [0, 1, 2] b = [0, 1, 2] print(a is b) # False a[0] = 100 print(a) # [100, 1, 2] print(b) # [0, 1, 2]
{ "repo_name": "nkmk/python-snippets", "path": "notebook/multi_variables_values.py", "copies": "1", "size": "1135", "license": "mit", "hash": -4459949238109588000, "line_mean": 7.7984496124, "line_max": 61, "alpha_frac": 0.5339207048, "autogenerated": false, "ratio": 1.9269949066213923, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.7960915611421393, "avg_score": 0, "num_lines": 129 }
a = 100 if a >= 0: print(a) else: print(-a) print(True and True) print(10 / 3) classmates = ['Michael', 'Bod', 'Tracy'] print(len(classmates)) classmates.insert(1, 'Jack') if a >= 3: print('adult') elif a >= 5: print('child') else: pass for name in classmates: print(name) # dict d = {'Micheal': 98, 'Bob': 45} print(d['Micheal']) print(d.get('Thomas', -1)) # set s = {1, 2, 3} s.add(3) s.remove(3) # list L = {1, 2, 3} # 函数 def m_abs(x): if not isinstance(x, (int, float)): raise TypeError('类型错误') if x >= 0: return x else: return -x def nop(): pass # m_abs('a') # 默认参数 必须只想不变对象 def power(x, n=2): s = 1 while n > 0: n = n - 1 s = s * x return s def add_end(L=None): if L is None: L = [] L.append('END') return L # 可变参数 def calc(*numbers): sum = 0 for n in numbers: sum += n * n return sum # 使用 *nums 将list所有参数传入 nums = [1, 2, 3] calc(*nums) # calc(nums) # 关键字参数 def person(name, age, **kw): print('name', name, 'age', age, 'other', kw) person(name='Jovi', age=25) person(name='Jovi', age=25, city='SH') person(name='Jovi', age=25, gender='M', city='SH') extra = {'city': 'SH', 'job': 'Engineer'} person(name='Jovi', age=25, **extra) def person(name, age, **kw): if 'city' in kw: pass if 'jopb' in kw: pass print('name', name, 'age', age, 'other', kw) person('Jovi', 25, city='SH', addr='Zhangjiang') # 命名关键字参数:限制关键字参数名字,* 号后面的参数被视为命名关键字参数 def person(name, age, *, city, job): print(name, age, city, job) person('Jovi', 25, city='SH', job='Engineer') # 如果已存在可变参数,后面的命名关键字参数就不需要 * def person(name, age, *args, city, job): print(name, age, args, city, job) # 命名关键字参数可以设置默认值 def person(name, age, *, city='SH', job): print(name, age, city, job) person('Jovi', 25, job='Engineer') # 切片 L = ['Micheal', 'Spark', 'Bob', 'Jack'] print(L[: 3]) print(L[-1]) # 迭代 d = {'a': 1, 'b': 2, 'c': 3} for key in d: print(key) for value in d.values(): print(value) for k, v in d.items(): print(k, ':', v) from collections import Iterable print(isinstance('abv', Iterable)) for i, value in enumerate(['a', 'b', 'c']): print(i, value) # 列表生成式 list(range(1, 11)) var = [x * x for x in range(1, 11)] var = [x * x for x in range(1, 11) if x % 2 == 0] [m + n for m in 'ABC' for n in 'ZYX'] import os d = [d for d in os.listdir('..')] print(d) # 生成器 g = (x * x for x in range(10)) for n in g: print(n) def fib(max): n, a, b = 0, 0, 1 while n < max: yield b a, b = b, a + b n = n + 1 return 'done' def odd(): print('step 1') yield 1 print('step 2') yield 2 print('step 3') yield 3 g = fib(6) while True: try: x = next(g) print('g', x) except StopIteration as e: print('Generator return Value', e.value) break # 杨辉三角 def triangles(line): L = [1] while True: yield L L = [1] + [L[i] + L[i + 1] for i in range(len(L) - 1)] + [1] n = 0 for t in triangles(10): print(t) n = n + 1 if n == 10: break # 迭代器 isinstance((x for x in range(10)), Iterable) # 高阶函数 def add(x, y, f): return f(x) + f(y) # map def f(x): return x * x r = map(f, [1, 2, 3, 4]) print(list(r)) list(map(str, [1, 2, 3])) from functools import reduce def add(x, y): return x + y reduce(add, [1, 3, 5, 7, 9]) def str2Int(s): def fn(x, y): return x * 10 + y def char2Num(s): return {'0': 0, '1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9}[s] return reduce(fn, map(char2Num, s)) # 使用 y 表达式 def char2Num(s): return {'0': 0, '1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9}[s] def str2Int(s): return reduce(lambda x, y: x * 10 + y, map(char2Num, s))
{ "repo_name": "zhayangtao/HelloPython", "path": "python01/PythonBasic.py", "copies": "1", "size": "4166", "license": "apache-2.0", "hash": 7217849806142647000, "line_mean": 14.3307086614, "line_max": 98, "alpha_frac": 0.520287622, "autogenerated": false, "ratio": 2.267909143855562, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.3288196765855562, "avg_score": null, "num_lines": null }
#a10-ipython-inventory.py import sys import yaml import json import os import sys import socket from requests.adapters import HTTPAdapter from urllib3.poolmanager import PoolManager import ssl import requests from xml.dom.minidom import parse, parseString __author__ = 'ACMEsEngTeam' hostname = socket.getfqdn() file = open('IRIS/host_vars/'+hostname,'r') host_vars = yaml.load(file) file.close() # In [23]: host_vars['dynamic_lb_pool_match']['web'] # Out[23]: 'app_tpl_dynamic_srvgrp' # read the credentials stream = open('scripts/'+"/secrets", 'r') creds= yaml.safe_load(stream) stream.close() username=creds['username'] password=creds['password'] loadbalancer=creds['loadbalancer'] server_list = '' class MyAdapter(HTTPAdapter): def init_poolmanager(self, connections, maxsize, block=False): self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, ssl_version=ssl.PROTOCOL_TLSv1) s = requests.Session() s.mount('https://', MyAdapter()) url = "https://"+loadbalancer+"/services/rest/V2/?method=authenticate&username="+username+"&password="+password+"&format=json" r = s.get(url, verify=False) data = json.loads(r.text) session_id = data['session_id'] #method="slb.server.fetchStatistics" #url = "https://lb2.dc-iris.acme.ir/services/rest/V2/?&session_id=" + session_id + "&format=json&method="+method+"&name=web1" method="slb.service_group.getAll" url = "https://"+loadbalancer+"/services/rest/V2/?&session_id=" + session_id + "&format=json&method="+method r = s.get(url, verify=False) sblall = json.loads(r.text) # In [23]: host_vars['dynamic_lb_pool_match']['web'] # Out[23]: 'app_tpl_dynamic_srvgrp' sg = [] active = [] inventory = {} inventory['_meta'] = {} inventory['_meta']['hostvars'] = {} hostvar = {} sgl = sblall['service_group_list'] # no me interesa iterar por todos los service_group_list, solo los que uso para deployar # for pool in sblall['service_group_list']: # print "imprimo el pool name" # print pool['name'] # print host_vars['dynamic_lb_pool_match'][pool['name']] # pero iterar luego se me complica, service_group_list hay que iterarla como lista que es # for pool in host_vars['dynamic_lb_pool_match']: # print pool # for lb_pool in sblall['service_group_list'][pool]: # if s['status']: # # inventory[] print member # back to ... for pool in sblall['service_group_list']: if pool['name'] in host_vars['dynamic_lb_pool_match']: # print pool['name'] # this is a pool we deploy on pool_name = host_vars['dynamic_lb_pool_match'][pool['name']] inventory[pool_name] = [] for member in pool['member_list']: # print member['server'] if member['status']: # checking status 1 # print member['server'] inventory[pool_name].append(member['server']) inventory['_meta']['hostvars'][member['server']] = { 'pool_type' : 'dynamic'} # In [60]: inventory # Out[60]: # {'abl': [u'abl2', u'abl1'], # 'cal': [u'cal2', u'cal1'], # 'mobile_webapp': [u'webapp2', u'webapp1'], # 'sal': [u'sal2', u'sal1'], # 'static': [u'static2', u'static1'], # 'web': [u'web3', u'web2', u'web1']} output = json.dumps(inventory,sort_keys = False, indent = 1) # output = output+'\n' # print output.replace('"empty"','')
{ "repo_name": "otsuarez/starmeup", "path": "release/scripts/inventory-a10-python.py", "copies": "1", "size": "3325", "license": "mit", "hash": -757005643157798700, "line_mean": 31.9207920792, "line_max": 126, "alpha_frac": 0.6565413534, "autogenerated": false, "ratio": 3.095903165735568, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.42524445191355675, "avg_score": null, "num_lines": null }
# a = [[1,2],[1,2,3]] # print(a) # # b = (1,2,3) # print(b) # print(a[0]) # print(b[0]) # b[0]=4 # d = {"key":{"key2":"bob"}} # d['key3'] = 4 # print(d.get("key4",0)) # x=4 # if x>3: # print(x) # elif x == 3: # print(x+1) # else: # print(x-1) # x=0 # while x<4: # print(x) # x+=1 def add(x,y): return x+y # # print(add(1,2)) # print(add(1.0,3.0)) # print(add("1",3.0)) # print(add("Hi ","bob")) # # b = " a fun sentence for an example " # print(b.rstrip().lstrip() + "hello") class Human: species="H. sapiens" #hopefully def __init__(self, name): self.name=name self.age=0 def say(self,msg): return "{name}:{message}".format(name=self.name, message=msg) class Student(Human): def __init__(self, name, year): Human.__init__(self,name) self.year = year def say(self,msg): return "{name}:{year}:{message}".format(name=self.name, year=self.year, message=msg) H = Human("bob") H2 = Student("Chris",2018) print(H2.say("hello"))
{ "repo_name": "CSUChico-CINS465/CINS465-Fall2017-Lecture-Examples", "path": "day2python.py", "copies": "1", "size": "1028", "license": "mit", "hash": 7501667041130019000, "line_mean": 17.6909090909, "line_max": 92, "alpha_frac": 0.5262645914, "autogenerated": false, "ratio": 2.3962703962703964, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8366696899232811, "avg_score": 0.011167617687517312, "num_lines": 55 }
a=[[1,2,3,4,5],[1,2,3,4,5],[1,2,3,4,5]] b=[[1,2,3,4],[3,4,5,5]] def convMatrix(a, b, mode='full'): if mode == 'full': row=len(a)+len(b) - 1 col=len(a[0])+len(b[0]) - 1 c= [[0 for i in range(col)] for i in range(row)] for i in range(len(a)): for j in range(len(a[0])): for m in range(len(b)): for n in range(len(b[0])): c[i+m][j+n] += a[i][j] * b[m][n] return c if mode == 'same': row=len(a) col=len(a[0]) c= [[0 for i in range(col)] for i in range(row)] for i in range(len(a)): for j in range(len(a[0])): for m in range(len(b)): for n in range(len(b[0])): if (0 <= i+m-len(b)/2 < row and 0 <= j+n-len(b[0])/2 < col): c[i+m-len(b)/2][j+n-len(b[0])/2] += a[i][j] * b[m][n] return c if mode == 'valid': row=len(a)-len(b) + 1 col=len(a[0])-len(b[0]) + 1 c= [[0 for i in range(col)] for i in range(row)] for i in range(len(a)): for j in range(len(a[0])): for m in range(len(b)): for n in range(len(b[0])): r = i-len(b)+m+1 co = j+n-len(b[0])+1 if (0 <= r < row and 0 <= co < col): c[r][co] += a[i][j] * b[m][n] return c def convArray(a, b, mode='full'): if mode == 'full': c=[0 for i in range(len(a) + len(b) - 1)] for i, value in enumerate(a): for j, key in enumerate(b): c[i+j] += a[i] * b[j] return c if mode == 'same': c=[0 for i in range(len(a))] for i, value in enumerate(a): for j, key in enumerate(b): if (i+j-len(b)/2>=0 and i+j-len(b)/2<len(a)): c[i+j-len(b)/2] += a[i] * b[j] return c if mode == 'valid': c=[0 for i in range(len(a) - len(b)+1)] for i, value in enumerate(a): for j, key in enumerate(b): if (i+j-len(b)+1>=0 and i+j-len(b)+1 < len(c)): c[i+j-len(b)+1] += a[i] * b[j] return c print convArray(a[0], b[0],'full') print convArray(a[0], b[0],'same') print convArray(a[0], b[0],'valid') print convMatrix(a, b,'full') print convMatrix(a, b,'same') print convMatrix(a, b,'valid')
{ "repo_name": "Dengjianping/AlgorithmsPractice", "path": "Misc/convm.py", "copies": "1", "size": "2496", "license": "mit", "hash": -1117267013272603100, "line_mean": 34.1690140845, "line_max": 84, "alpha_frac": 0.405849359, "autogenerated": false, "ratio": 2.8428246013667424, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.37486739603667424, "avg_score": null, "num_lines": null }
a = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] # pprint (a) # # a2d = [a, a, a, a] # pprint(a2d) # [[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], # [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], # [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], # [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]] # # #indexing 2d lists # N =122 # a2d = [ list(range(i*N, i*N + N)) for i in range(N) ] # print(a2d) # # slicing # N = 999 # a = list(range(N)) # print(a[99:105]) #slicing 2 # print(a[2:5]) # # Iteration on lists - using index and element access # a = list(range(N)) # for i in range(len(a)): # print(i, a[i]) # use readlines to read a line a time # filename = "data/data.txt" # with open(filename) as f: # for line in f.readlines(): # process line # values = line.strip().split() # # read the whole file into a buffer # buffer = open(filename).read() # # for line in buffer.split('\n'): # process line # # import urllib.request # help(urllib.request.urlopen) # Help on function urlopen in module urllib.request: # # urlopen(url, data=None, timeout=<object object at 0x10185f2c0>, *, cafile=None, capath=None, cadefault=False, context=None) # # request # # uri = "http://claritytrec.ucd.ie/~alawlor/comp30670/input_assign3.txt" # req = urllib.request.urlopen(uri) # # buffer = req.read().decode('utf-8') # # def read_uri(fname): # if fname.startswith('http'): # use urllib.request.urlopen(uri) # else: # use open(uri) # return ... # # import argparse # # parser = argparse.ArgumentParser() # parser.add_argument('--input', help='input help') # args = parser.parse_args() # # filename = args.input
{ "repo_name": "JosephJamesDoyle87/software", "path": "tests/test5.py", "copies": "1", "size": "1742", "license": "mit", "hash": -8672902668140046000, "line_mean": 23.2753623188, "line_max": 125, "alpha_frac": 0.543053961, "autogenerated": false, "ratio": 2.422809457579972, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.34658634185799725, "avg_score": null, "num_lines": null }
a={'1': 2,'3': 4} print(a.get('3')) b = [1, 2] b.extend('hey') b.extend([]) print(b) ada_family = { 'Judith Blunt-Lytton': ['Anne Isabella Blunt', 'Wilfrid Scawen Blunt'], 'Ada King-Milbanke': ['Ralph King-Milbanke', 'Fanny Heriot'], 'Ralph King-Milbanke': ['Augusta Ada King', 'William King-Noel'], 'Anne Isabella Blunt': ['Augusta Ada King', 'William King-Noel'], 'Byron King-Noel': ['Augusta Ada King', 'William King-Noel'], 'Augusta Ada King': ['Anne Isabella Milbanke', 'George Gordon Byron'], 'George Gordon Byron': ['Catherine Gordon', 'Captain John Byron'], 'John Byron': ['Vice-Admiral John Byron', 'Sophia Trevannion'] } def ancestors(genealogy, person): #if genealogy.get(person)==None: #return [] #if genealogy.get(genealogy[person][0])==[] and genealogy.get(genealogy[person][1])==[]: #return genealogy[person] #else: #return genealogy[person].extend(ancestors(genealogy,genealogy[person][0])).extend(ancestors(genealogy,genealogy[person][1])) if person not in genealogy: return [] else: #print(person) #print(genealogy[person]) for entry in genealogy[person]: #genealogy[person].extend(ancestors(genealogy,entry)) if entry in genealogy: for x in genealogy[entry]: if x not in genealogy[person]: genealogy[person].extend(ancestors(genealogy,entry)) return genealogy[person] print(ancestors(ada_family, 'Dave')) print(ancestors(ada_family, 'Augusta Ada King')) #>>> ['Anne Isabella Milbanke', 'George Gordon Byron', # 'Catherine Gordon','Captain John Byron'] print(ancestors(ada_family, 'Judith Blunt-Lytton')) #>>> ['Anne Isabella Blunt', 'Wilfrid Scawen Blunt', 'Augusta Ada King', # 'William King-Noel', 'Anne Isabella Milbanke', 'George Gordon Byron', # 'Catherine Gordon', 'Captain John Byron']
{ "repo_name": "coodoing/udacity-searchengine", "path": "course6_Star1.py", "copies": "1", "size": "2019", "license": "apache-2.0", "hash": -341841668800345340, "line_mean": 42.8913043478, "line_max": 133, "alpha_frac": 0.6087171867, "autogenerated": false, "ratio": 2.827731092436975, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.3936448279136975, "avg_score": null, "num_lines": null }
a = [1,2,3,4] while a: print(a.pop()) # 4 3 2 1 if []: print("True") else: print("False") # False if [1,2]: print("True") else: print("False") # True # all type of value for example integer, it is also object. print(type(3)) # <class 'int'> a = 3 b = 3 print(a is b) # True. Compare referenced address print(a == b) # True. Compare Value import sys print(sys.getrefcount(3)) # 57 print(sys.getrefcount(a)) # 57 c = 3 print(sys.getrefcount(3)) # 58 # 3 object is referenced if it is declared. # a lot of technique to declare variables a,b = ('python', 'life') print(a, b) # python life (a, b) = 'python', 'life' print(a, b) # python life [a, b] = ['python', 'life'] print(a, b) # python life a = b = 'python' print(a, b) # python python a = 3 b = 5 a,b = b,a print(a, b) # 5, 3 del(a) del(b) print(sys.getrefcount(3)) # 56 a = [1, 2, 3] b = a del a[0] print(b) # [2, 3] # call by reference a = [1, 2, 3] b = a[:] del a[0] print(b) # [1, 2, 3] print(b is a) # False from copy import copy a = [1, 2, 3] b = copy(a) del a[0] print(b) # [1, 2, 3] print(b is a) # False
{ "repo_name": "novemberde/lang-practice", "path": "python3/dataTypeFeature.py", "copies": "1", "size": "1145", "license": "mit", "hash": 60967998261459720, "line_mean": 13.6923076923, "line_max": 59, "alpha_frac": 0.5589519651, "autogenerated": false, "ratio": 2.3559670781893005, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.34149190432893006, "avg_score": null, "num_lines": null }
a=[1,2,3] a.append(230) print(a) def explode_list(p,n): result=[] if n==0: return [] else: if p!=[]: for entry in p: k = n # after each loop, just restore the k to n while k>0: result.append(entry) k-=1 return result print (explode_list([1, 2, 3], 2)) print (explode_list([1, 0, 1], 0)) print (explode_list(["super"], 5)) def reverse_index(dict): result = {} for entry in dict: if dict[entry] not in result: result[dict[entry]] = [entry] else: #print(result[dict[entry]]) result[dict[entry]].append(entry) return result winners_by_year = { 1930: 'Uruguay', 1934: 'Italy', 1938: 'Italy', 1950: 'Uruguay', 1954: 'West Germany', 1958: 'Brazil', 1962: 'Brazil', 1966: 'England', 1970: 'Brazil', 1974: 'West Germany', 1978: 'Argentina', 1982: 'Italy', 1986: 'Argentina', 1990: 'West Germany', 1994: 'Brazil', 1998: 'France', 2002: 'Brazil', 2006: 'Italy', 2010: 'Spain' } wins_by_country = reverse_index(winners_by_year) print (wins_by_country['Brazil']) print (wins_by_country['England']) def is_list(p): return isinstance(p, list) def same_structure(a,b): if is_list(a)==False and is_list(b)==False: return True elif is_list(a)==True and is_list(b)==False: return False elif is_list(a)==False and is_list(b)==True: return False else: if len(a)!=len(b): return False else: i=0 while i<len(a): if same_structure(a[i],b[i])==False: return False i+=1 return True print (same_structure(3, 7)) #>>> True print (same_structure([1, 0, 1], [2, 1, 2])) #>>> True print (same_structure([1, [0], 1], [2, 5, 3])) #>>> False print (same_structure([1, [2, [3, [4, 5]]]], ['a', ['b', ['c', ['d', 'e']]]])) #>>> True print (same_structure([1, [2, [3, [4, 5]]]], ['a', ['b', ['c', ['de']]]])) #>>> False
{ "repo_name": "coodoing/udacity-searchengine", "path": "course7Ex.py", "copies": "1", "size": "2084", "license": "apache-2.0", "hash": 7702970810344630000, "line_mean": 25.7179487179, "line_max": 78, "alpha_frac": 0.5081573896, "autogenerated": false, "ratio": 2.906555090655509, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.3914712480255509, "avg_score": null, "num_lines": null }
a = 123 b = 'abc' print('{} and {}'.format(a, b)) # 123 and abc print('{first} and {second}'.format(first=a, second=b)) # 123 and abc print(f'{a} and {b}') # 123 and abc print(F'{a} and {b}') # 123 and abc print(f"{a} and {b}") # 123 and abc print(f'''{a} and {b}''') # 123 and abc print(f"""{a} and {b}""") # 123 and abc s = 'abc' print(f'right : {s:_>8}') print(f'center: {s:_^8}') print(f'left : {s:_<8}') # right : _____abc # center: __abc___ # left : abc_____ i = 1234 print(f'zero padding: {i:08}') # zero padding: 00001234 print(f'comma: {i:,}') # comma: 1,234 print(f'bin: {i:b}') print(f'oct: {i:o}') print(f'hex: {i:x}') # bin: 10011010010 # oct: 2322 # hex: 4d2 print(f'bin: {i:#b}') print(f'oct: {i:#o}') print(f'hex: {i:#x}') # bin: 0b10011010010 # oct: 0o2322 # hex: 0x4d2 f = 12.3456 print(f'digit(decimal): {f:.3f}') print(f'digit(all) : {f:.3g}') # digit(decimal): 12.346 # digit(all) : 12.3 print(f'exponent: {f:.3e}') # exponent: 1.235e+01 f = 0.123 print(f'percent: {f:.2%}') # percent: 12.30% n = 123 print(f'{{}}-{n}-{{{n}}}') # {}-123-{123} n = 123 i = 8 print('{n:0{i}}'.format(n=n, i=i)) # 00000123 print(f'{n:0{i}}') # 00000123 f = 1.2345 for i in range(5): print(f'{f:.{i}f}') # 1 # 1.2 # 1.23 # 1.234 # 1.2345 print('x\ty') # x y print(r'x\ty') # x\ty x = 'XXX' y = 'YYY' print(f'{x}\t{y}') # XXX YYY print(rf'{x}\t{y}') # XXX\tYYY print(fr'{x}\t{y}') # XXX\tYYY a = 3 b = 4 # print('{a} + {b} = {a + b}'.format(a=a, b=b)) # KeyError: 'a + b' print(f'{a} + {b} = {a + b}') # 3 + 4 = 7 print(f'{a} * {b} = {a * b}') # 3 * 4 = 12 print(f'{a} / {b} = {a / b:.2e}') # 3 / 4 = 7.50e-01 d = {'key1': 3, 'key2': 4} print('{0[key1]}, {0[key2]}'.format(d)) # 3, 4 # print('{0["key1"]}, {0["key2"]}'.format(d)) # KeyError: '"key1"' print(f'{d["key1"]}, {d["key2"]}') # 3, 4 # print(f'{d[key1]}, {d[key2]}') # NameError: name 'key1' is not defined # print(f'{d['key1']}, {d['key2']}') # SyntaxError: invalid syntax print(f"{d['key1']}, {d['key2']}") # 3, 4 # print(f'{d[\'key1\']}, {d[\'key2\']}') # SyntaxError: f-string expression part cannot include a backslash i = 123 print(f'{i=}') # i=123 print(f'{i = }') # i = 123 print(f'{ i = }') # i = 123 print(f'{i = :#b}') # i = 0b1111011 print(f'{i * 2 = }') # i * 2 = 246 l = [0, 1, 2] print(f'{l = }, {l[0] = }') # l = [0, 1, 2], l[0] = 0 d = {'key1': 3, 'key2': 4} print(f'{d = }, {d["key1"] = }') # d = {'key1': 3, 'key2': 4}, d["key1"] = 3
{ "repo_name": "nkmk/python-snippets", "path": "notebook/f_strings.py", "copies": "1", "size": "2486", "license": "mit", "hash": 974334587496819100, "line_mean": 13.0451977401, "line_max": 66, "alpha_frac": 0.4951729686, "autogenerated": false, "ratio": 1.9792993630573248, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.29744723316573246, "avg_score": null, "num_lines": null }
""" a1emp11.py -- Example of simple update transaction Simple Update of Record given by ISN in File with fields AX, AY, C1 DBID and FNR $Date: 2008-08-22 13:32:56 +0200 (Fri, 22 Aug 2008) $ $Rev: 57 $ """ # Copyright 2004-2008 Software AG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import binascii import string import adabas import adabas.dump from adabas.api import * from adabas.datamap import * FNR=11 DBID=8 c1=Adabas(fbl=64,rbl=64) c1.dbid=DBID c1.cb.fnr=FNR print datetime.datetime.now(), 'Program started' try: c1.open(mode=UPD) c1.dumpcb=c1.dumprb=1 c1.cb.cid='abcd' c1.cb.isn=499 c1.fb.value='AX1,AY1,C1.' c1.rb.value='000000000000A' c1.update(hold=1) c1.dumpcb=c1.dumprb=0 c1.et() c1.close() except DatabaseError, (line, apa): print line dump.dump(apa.acb, header='Control Block') raise # print datetime.datetime.now(), 'Program stopped'
{ "repo_name": "flavio-casacurta/Nat2Py", "path": "Adabas/demo/a1emp11.py", "copies": "1", "size": "1447", "license": "mit", "hash": 6744283283382787000, "line_mean": 20.9242424242, "line_max": 75, "alpha_frac": 0.7083621285, "autogenerated": false, "ratio": 2.977366255144033, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9122420048460698, "avg_score": 0.012661667036667037, "num_lines": 66 }
a = 1 b = 2 c = 3 nf1 = 1.99 nf2 = 2.87 print nf1 + nf2 class novaClasse(): x = 10 y = 10 def func1(self): return 1 + 1 def func2(self): return self.x + self.y if a > 0: if b > 0: print "hello world" else: print "hello" ' ' 'world' '\"escape\"' c = 10 print 'this is', ' a ', ' long print ', 1 + 1 + 2.5, 2, '!', nf2, a, 'end' + " " + '!' def square(xx, a): return xx * 10 if b < 1 and c != 4: if a > 2 or b == 2: if nf1 < 0: if nf2 != 9 and not "hello" == "world": print "hello" else: print "world" a = 2 & 1 | 3 a = 10 def line(a, b = 1): a = 'hello' for i in range (2,3): for j in [1, 2, 3]: if i == j: print "soma: ", i + j for i in [1, 2, 3]: if 3 == 3: if 2 == 2: if 1 == 1: a ^= i a = 5 c = 1 while a * c < 10: c += 1 if c == 2: # a nice comment a *= 2 print "\"dedent\"" # another comment z = 1 while z: z += 1 z *= 2 z = z + 1 + 2 + 3 if z > 100: break for yy in 'string': if yy == 'i': print 'o' else: print yy print "That's all folks!"
{ "repo_name": "brenolf/python-to-c", "path": "teste/ok/big-code.py", "copies": "1", "size": "1289", "license": "mit", "hash": -1116310325815253900, "line_mean": 13.988372093, "line_max": 86, "alpha_frac": 0.3840186191, "autogenerated": false, "ratio": 2.851769911504425, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.3735788530604425, "avg_score": null, "num_lines": null }
a = 1 if a: print("True") else: print("False") # True if 0: print("True") else: print("False") if "": print("True") else: print("False") if []: print("True") else: print("False") if (): print("True") else: print("False") if {}: print("True") else: print("False") # False False False False False # Operator # >=, >, <, <=, ==, !=, in, not in prompt = """ 1. Add 2. Del 3. List 4. Quit Enter Number: """ # number = 0 # while number != 4: # print(prompt) # number = int(input()) # # Escape from while # if number == 5: break # elif number == 6: continue test_list = [1,2,3,4] for i in test_list: print(i) # 1 2 3 4 a = [(1,2), (3,4), (5,6)] for (first, last) in a: print(first + last) # 3 7 11 for i in range(10): print(i) # 0 ~ 9 for i in range(2, 4): print(i) # 2 3 a = [1,2,3,4,5] for i in range(len(a)): print(i) # 0 1 2 3 4 print("Print one line to next print.", end=" ") print("hi") # Print one line to next print. hi # List includ For result = [x*y for x in range(1, 3) for y in range(4, 7)] print(result) # [4, 5, 6, 8, 10, 12]
{ "repo_name": "novemberde/lang-practice", "path": "python3/controlStatement.py", "copies": "1", "size": "1153", "license": "mit", "hash": -2362008325936123400, "line_mean": 13.425, "line_max": 47, "alpha_frac": 0.5238508239, "autogenerated": false, "ratio": 2.5452538631346577, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.35691046870346577, "avg_score": null, "num_lines": null }
a = 1 result = 'even' if a % 2 == 0 else 'odd' print(result) # odd a = 2 result = 'even' if a % 2 == 0 else 'odd' print(result) # even a = 1 result = a * 2 if a % 2 == 0 else a * 3 print(result) # 3 a = 2 result = a * 2 if a % 2 == 0 else a * 3 print(result) # 4 a = 1 print('even') if a % 2 == 0 else print('odd') # odd a = 1 if a % 2 == 0: print('even') else: print('odd') # odd a = -2 result = 'negative and even' if a < 0 and a % 2 == 0 else 'positive or odd' print(result) # negative and even a = -1 result = 'negative and even' if a < 0 and a % 2 == 0 else 'positive or odd' print(result) # positive or odd a = 2 result = 'negative' if a < 0 else 'positive' if a > 0 else 'zero' print(result) # positive a = 0 result = 'negative' if a < 0 else 'positive' if a > 0 else 'zero' print(result) # zero a = -2 result = 'negative' if a < 0 else 'positive' if a > 0 else 'zero' print(result) # negative result = 'negative' if a < 0 else ('positive' if a > 0 else 'zero') print(result) # negative result = ('negative' if a < 0 else 'positive') if a > 0 else 'zero' print(result) # zero l = ['even' if i % 2 == 0 else i for i in range(10)] print(l) # ['even', 1, 'even', 3, 'even', 5, 'even', 7, 'even', 9] l = [i * 10 if i % 2 == 0 else i for i in range(10)] print(l) # [0, 1, 20, 3, 40, 5, 60, 7, 80, 9] get_odd_even = lambda x: 'even' if x % 2 == 0 else 'odd' print(get_odd_even(1)) # odd print(get_odd_even(2)) # even
{ "repo_name": "nkmk/python-snippets", "path": "notebook/conditional_expressions.py", "copies": "1", "size": "1444", "license": "mit", "hash": -7078073800931998000, "line_mean": 17.05, "line_max": 75, "alpha_frac": 0.5789473684, "autogenerated": false, "ratio": 2.5069444444444446, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8582436831050337, "avg_score": 0.0006909963588215822, "num_lines": 80 }
A = 20.0 def __calculate_extrs(alist): tgS = [ (0, 0.0) ] for i in range(1, len(alist)-1): m_l = alist[i-1].m m = alist[i].m m_r = alist[i+1].m g_l = alist[i-1].g g = alist[i].g g_r = alist[i+1].g dM_l = float(m - m_l) dG_l = float(g - g_l) dM_r = float(m_r - m) dG_r = float(g_r - g) dMdG_l = dM_l / dG_l dMdG_r = dM_r / dG_r tg_l = dMdG_l / A tg_r = dMdG_r / A tg_point = (tg_l - tg_r) / (1 + tg_l * tg_r) tgS += [ (i, tg_point) ] extrs = [] for i in range(1, len(tgS)-1): number_l, tg_l = tgS[i-1] number , tg = tgS[i ] number_r, tg_r = tgS[i+1] if tg>tg_l and tg>tg_r: extrs += [ (number, tg) ] if len(extrs)==0: extrs = tgS if len(extrs)<len(alist): extrs += [ (len(extrs), 0.0) ] assert len(extrs)==len(alist) return extrs def get_primary_alignments(alist): extrs = __calculate_extrs(alist) extrs.sort(key=lambda value: value[1], reverse=True) result = [] for number, value in extrs: result += [alist[number]] return result def get_primary_points(alist): return __calculate_extrs(alist) def calculate_gops(alist): minGOP = [ 0.0 ] * len(alist) maxGOP = [ 99999999.9 ] * len(alist) g = 0 while g < len(alist)-1: g += 1 for c in range(g-1, -1, -1): # range [g-1, g-2, ... , 0] deltaW = alist[g].m - alist[c].m deltaG = alist[g].g - alist[c].g nS = float(deltaW) / float(deltaG) if nS < maxGOP[c]: t = c break else: maxGOP[c] = None minGOP[c] = None maxGOP[g] = nS minGOP[t] = nS result = [] for i in range(0, len(alist)): result += [(minGOP[i],maxGOP[i])] return result def alignment_to_string(source, mutant, alignment, other=None, weights=None, width=60): line1 = "" line2 = "" line3 = "" line4 = "" line5 = "" for i1, i2 in alignment: assert i1!=-1 or i2!=-1 if i1!=-1 and i2!=-1: line1 += source[i1] line3 += mutant[i2] if source[i1]==mutant[i2]: line2 += "|" else: a = source[i1] b = source[i2] w = None if not weights is None: if weights.has_key((a,b)): w = weights[(a,b)] elif weights.has_key((b,a)): w = weights[(b,a)] if not w is None: if w>=0: line2 += ":" else: line2 += "." else: line2 += " " elif i1==-1: line1 += "-" line2 += " " line3 += mutant[i2] elif i2==-1: line1 += source[i1] line2 += " " line3 += "-" if not other is None: if (i1, i2) in other: line4 += "*" else: line4 += " " result = "" if width>0: fpe = 0 spe = 0 fp = 0 sp = 0 index = 0 while len(line1)>0: line1_part = line1[0:width-20] line2_part = line2[0:width-20] line3_part = line3[0:width-20] if not other is None: line4_part = line4[0:width-20] if not weights is None: line5_part = line5[0:width-20] line5_part = line5[0:width-20] fp = fpe + 1 sp = spe + 1 fpe = fp + len(line1_part)-line1_part.count("-") spe = sp + len(line3_part)-line3_part.count("-") result += str(fp).rjust(6)+" "*2+line1_part.ljust(width-20) + \ " "+ str(fpe)+"\n" result += " "*8+line2_part.ljust(width-20) + "\n" result += str(sp).rjust(6)+" "*2+line3_part.ljust(width-20) + \ " "+ str(spe)+"\n" if not other is None: result += " "*8+line4_part+"\n" if not weights is None: result += " "*8+line5_part+"\n" result += "\n" sp = spe fp = fpe line1 = line1[width-20:] line2 = line2[width-20:] line3 = line3[width-20:] if not other is None: line4 = line4[width-20:] if not weights is None: line5 = line5[width-20:] index += width-20 else: result = line1+"\n"+line2+"\n"+line3+"\n" if not other is None: result += line4 + "\n" if not weights is None: result += line5 + "\n" result += "\n" return result def get_common_part(alignments_list): assert len(alignments_list)>0 result = None for al in alignments_list: if result is None: result = set(al) else: result = result & set(al) return result
{ "repo_name": "victor-yacovlev/parca", "path": "python/parca/_util.py", "copies": "1", "size": "5371", "license": "mit", "hash": 7624221201117107000, "line_mean": 29.1741573034, "line_max": 75, "alpha_frac": 0.4075591138, "autogenerated": false, "ratio": 3.323638613861386, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4231197727661386, "avg_score": null, "num_lines": null }
a = [2,1,-4,3,0,6] a.sort() print a b = "rksdubtheynjmpwqzlfiovxgac" print sorted(b,None,lambda x: ord(x)) c = [2,1,-4,3,0,6] print sorted(c) print sorted(c, lambda x, y: y - x); class Test: def __init__(self, id, value): self.id = id self.value = value def __repr__(self): return "id: " + str(self.id) + " value: " + self.value d = [ Test(4, "test"), Test(3, "test"), Test(6, "test"), Test(1, "test"), Test(2, "test"), Test(9, "test"), Test(0, "test") ] print sorted(d, lambda x, y: y - x, lambda x: x.id, True) print c print sorted(c, None, None, True) c.sort(reverse=True) print c c.sort() print c c.sort(lambda x, y: y - x, lambda x: pow(x, 2), True) print c L = [7, 3, -2, 4] d = {'a': 5, 'b': 9} def g(k): return d[k] print(g('a')) print(sorted(d.keys(), None, g)) print(sorted(d.keys(), None, None)) print(sorted(d.keys(), None, lambda x: d[x])) def myabs(x): return abs(x) print(sorted(L, None, myabs)) print(sorted(L, None, lambda x: myabs(x))) print(sorted(L, None, lambda x: abs(x))) print(sorted(L, None, abs)) print(sorted(L, key=lambda x: -x, reverse=True)) print(sorted(L, key=lambda x: -x))
{ "repo_name": "ArcherSys/ArcherSys", "path": "skulpt/test/run/t488.py", "copies": "1", "size": "1161", "license": "mit", "hash": -134992657396039360, "line_mean": 20.1272727273, "line_max": 125, "alpha_frac": 0.5857019811, "autogenerated": false, "ratio": 2.3313253012048194, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8319907039751141, "avg_score": 0.019424048510735814, "num_lines": 55 }
# a**2 + b**2 = c**2 # a + b + c = 1000 # a < b < c import time class BreakOut(Exception):pass start = time.time() try: for a in xrange(1,1000): for b in xrange(1, 1000): for c in xrange(1,1000): if (a+b+c==1000): if (a**2+b**2==c**2): print "Product = %d"%(a*b*c) print "A: %d B: %d C: %d"%(a,b,c) raise BreakOut except BreakOut: pass # Product = 31875000 # A: 200 B: 375 C: 425 # Sol 1 takes: 16.16309380531311 elapsed = time.time() print "Sol 1 takes: %r" % (elapsed - start) start2 = time.time() try: for r in xrange(1,1000): for s in xrange(1, 1000): for t in xrange(1,1000): if r**2 == 2*s*t: x = r+s y = r+t z = r+s+t if x+y+z == 1000: print "Answer found!" print "Product= %d"%(x*y*z) print " x: %d, y: %d, z: %d " % (x, y, z) raise BreakOut except BreakOut: pass elapsed2 = time.time() print "Sol 2 takes: %r" % (elapsed2 - start2) # Answer found! # Product= 31875000 # x: 200, y: 375, z: 425 # Sol 2 takes: 23.333422899246216 # Euclid's formula # M&N # A = M**2 - N**2 # B = (M*N)*2 # C = M**2 + N**2 start3 = time.time() m = 0 notfound = True while notfound: n = 0 while n<m: if (m>n): a = (m**2) - (n**2) b = (m*n)*2 c = (m**2) + (n**2) if (a+b+c == 1000): print "Product = %d"%(a*b*c) print "A: %d B: %d C: %d"%(a,b,c) notfound = False break n+=1 m+=1 elapsed3 = time.time() print "Sol 3 takes: %r" % (elapsed3 - start3) # Product = 31875000 # A: 375 B: 200 C: 425 # Sol takes: 0.0003540515899658203 # daaaaaamn son where'd ya find this
{ "repo_name": "jamtot/PyProjectEuler", "path": "9 - Special Pythagorean triplet/tripthag.py", "copies": "1", "size": "1961", "license": "mit", "hash": 7943153303065229000, "line_mean": 22.3452380952, "line_max": 65, "alpha_frac": 0.4375318715, "autogenerated": false, "ratio": 2.8420289855072465, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.37795608570072464, "avg_score": null, "num_lines": null }