Dataset Viewer
Auto-converted to Parquet
commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
2a0a29effa48caf5d95ed892d85cee235ebe1624
lamvery/utils.py
lamvery/utils.py
import os import sys import re import shlex import subprocess from termcolor import cprint ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="") y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret
import os import sys import re import shlex import subprocess ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): print('Overwrite {}? [y/n]: '.format(path)) y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret
Fix error when import lamvery in function
Fix error when import lamvery in function
Python
mit
marcy-terui/lamvery,marcy-terui/lamvery
import os import sys import re import shlex import subprocess - from termcolor import cprint ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): - cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="") + print('Overwrite {}? [y/n]: '.format(path)) y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret
Fix error when import lamvery in function
## Code Before: import os import sys import re import shlex import subprocess from termcolor import cprint ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="") y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret ## Instruction: Fix error when import lamvery in function ## Code After: import os import sys import re import shlex import subprocess ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): print('Overwrite {}? [y/n]: '.format(path)) y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret
... import subprocess ... if os.path.exists(path): print('Overwrite {}? [y/n]: '.format(path)) y_n = sys.stdin.readline() ...
293d50438fab81e74ab4559df7a4f7aa7cfd8f03
etcdocker/container.py
etcdocker/container.py
import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), name=self.name) # Start 'er up client.start( container=self.name, port_bindings=self.params.get('ports'), privileged=self.params.get('privileged'))
import ast import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) # Convert our ports into a dict if necessary ports = ast.literal_eval(self.params.get('ports')) # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), ports=ports.keys(), name=self.name) # Start 'er up client.start( container=self.name, port_bindings=ports, privileged=self.params.get('privileged'))
Convert port list to dict
Convert port list to dict
Python
mit
CloudBrewery/docrane
+ import ast import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) + # Convert our ports into a dict if necessary + ports = ast.literal_eval(self.params.get('ports')) + # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), + ports=ports.keys(), name=self.name) # Start 'er up client.start( container=self.name, - port_bindings=self.params.get('ports'), + port_bindings=ports, privileged=self.params.get('privileged'))
Convert port list to dict
## Code Before: import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), name=self.name) # Start 'er up client.start( container=self.name, port_bindings=self.params.get('ports'), privileged=self.params.get('privileged')) ## Instruction: Convert port list to dict ## Code After: import ast import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) # Convert our ports into a dict if necessary ports = ast.literal_eval(self.params.get('ports')) # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), ports=ports.keys(), name=self.name) # Start 'er up client.start( container=self.name, port_bindings=ports, privileged=self.params.get('privileged'))
// ... existing code ... import ast import docker // ... modified code ... # Convert our ports into a dict if necessary ports = ast.literal_eval(self.params.get('ports')) # Create container with specified args ... volumes=self.params.get('volumes'), ports=ports.keys(), name=self.name) ... container=self.name, port_bindings=ports, privileged=self.params.get('privileged')) // ... rest of the code ...
6d72a1d3b4bd2e1a11e2fb9744353e5d2d9c8863
setup.py
setup.py
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup(cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), Extension("ccomp", ["ccomp.pyx"])])
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')])
Add NumPy includes dir for Cython builds.
Add NumPy includes dir for Cython builds.
Python
bsd-3-clause
stefanv/lulu
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext + import numpy + + def cext(name): + return Extension(name, [name + ".pyx"], + include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, + ext_modules = [cext('lulu_base'), cext('ccomp')]) - ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), - Extension("ccomp", ["ccomp.pyx"])]) +
Add NumPy includes dir for Cython builds.
## Code Before: from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup(cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), Extension("ccomp", ["ccomp.pyx"])]) ## Instruction: Add NumPy includes dir for Cython builds. ## Code After: from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')])
... from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) ... setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')]) ...
14a085f787f5fe80a0737d97515b71adaf05d1cd
checker/checker/contest.py
checker/checker/contest.py
from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
Fix double-encoding of binary blobs
Fix double-encoding of binary blobs
Python
isc
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) - return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1')) + return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
Fix double-encoding of binary blobs
## Code Before: from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data) ## Instruction: Fix double-encoding of binary blobs ## Code After: from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
# ... existing code ... data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) # ... rest of the code ...
e50aee5973a2593546d1308b5ba77cd0905dd2be
app/models.py
app/models.py
import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. return cls(**fields)
import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. del fields['valid'] # We only store valid weather data, hence. return cls(**fields)
Fix excessive fields in conversion
Fix excessive fields in conversion
Python
agpl-3.0
rschiang/ntu-weather,rschiang/ntu-weather
import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. + del fields['valid'] # We only store valid weather data, hence. return cls(**fields)
Fix excessive fields in conversion
## Code Before: import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. return cls(**fields) ## Instruction: Fix excessive fields in conversion ## Code After: import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. del fields['valid'] # We only store valid weather data, hence. return cls(**fields)
# ... existing code ... del fields['provider'] # We don’t store provider name as there would be only one. del fields['valid'] # We only store valid weather data, hence. return cls(**fields) # ... rest of the code ...
da59d4334eb1a6f77bd0a9599614a6289ef843e4
pytest-server-fixtures/tests/integration/test_mongo_server.py
pytest-server-fixtures/tests/integration/test_mongo_server.py
import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert_one({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count_documents({}) == 0 coll.insert_one({'a': 'b'}) assert coll.count_documents({}) == 1
import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count() == 0 coll.insert({'a': 'b'}) assert coll.count() == 1
Revert "fix deprecation warnings in mongo"
Revert "fix deprecation warnings in mongo" This reverts commit 5d449ff9376e7c0a3c78f2b2d631ab0ecd08fe81.
Python
mit
manahl/pytest-plugins,manahl/pytest-plugins
import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete - mongo_server.api.db.test.insert_one({'a': 'b', 'c': 'd'}) + mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection - assert coll.count_documents({}) == 0 + assert coll.count() == 0 - coll.insert_one({'a': 'b'}) + coll.insert({'a': 'b'}) - assert coll.count_documents({}) == 1 + assert coll.count() == 1
Revert "fix deprecation warnings in mongo"
## Code Before: import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert_one({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count_documents({}) == 0 coll.insert_one({'a': 'b'}) assert coll.count_documents({}) == 1 ## Instruction: Revert "fix deprecation warnings in mongo" ## Code After: import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count() == 0 coll.insert({'a': 'b'}) assert coll.count() == 1
# ... existing code ... assert mongo_server.delete mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} # ... modified code ... coll = mongo_server.api.some_database.some_collection assert coll.count() == 0 coll.insert({'a': 'b'}) assert coll.count() == 1 # ... rest of the code ...
a1bcb99691f5a0238f6a34a5579df3e89e8d6823
child_sync_gp/model/project_compassion.py
child_sync_gp/model/project_compassion.py
from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res
from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) if not isinstance(ids, list): ids = [ids] gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res
Fix bug in write project.
Fix bug in write project.
Python
agpl-3.0
CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ecino/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland
from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) + if not isinstance(ids, list): + ids = [ids] gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res
Fix bug in write project.
## Code Before: from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res ## Instruction: Fix bug in write project. ## Code After: from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) if not isinstance(ids, list): ids = [ids] gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res
# ... existing code ... context) if not isinstance(ids, list): ids = [ids] gp_connect = gp_connector.GPConnect() # ... rest of the code ...
8235a217b50520093d549115fe09a8d4ff5e9191
webmanager/default_settings.py
webmanager/default_settings.py
INSTALLED_APPS += ( 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', 'provider.oauth2', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True
INSTALLED_APPS += ( 'provider', 'provider.oauth2', 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True
Fix provider oauth2 warning by import provider before oauth2 as described in the manual
Fix provider oauth2 warning by import provider before oauth2 as described in the manual
Python
bsd-3-clause
weijia/webmanager,weijia/webmanager,weijia/webmanager
INSTALLED_APPS += ( + 'provider', + 'provider.oauth2', 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', - 'provider.oauth2', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True
Fix provider oauth2 warning by import provider before oauth2 as described in the manual
## Code Before: INSTALLED_APPS += ( 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', 'provider.oauth2', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True ## Instruction: Fix provider oauth2 warning by import provider before oauth2 as described in the manual ## Code After: INSTALLED_APPS += ( 'provider', 'provider.oauth2', 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True
... INSTALLED_APPS += ( 'provider', 'provider.oauth2', 'simplemenu', ... # 'social_auth', ) ...
3fbca600b1b90ad3499d941e178aae89d1c7df70
regulations/generator/layers/external_citation.py
regulations/generator/layers/external_citation.py
from django.template import loader import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data)
from django.template import loader from regulations.generator.layers import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data)
Make external citations Python3 compatible
Make external citations Python3 compatible
Python
cc0-1.0
18F/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site
from django.template import loader - import utils + from regulations.generator.layers import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data)
Make external citations Python3 compatible
## Code Before: from django.template import loader import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data) ## Instruction: Make external citations Python3 compatible ## Code After: from django.template import loader from regulations.generator.layers import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data)
# ... existing code ... from django.template import loader from regulations.generator.layers import utils from regulations.generator.layers.base import SearchReplaceLayer # ... rest of the code ...
30f8317838a2e984e54fe22042fd3ffff10f82e6
waterbutler/core/streams/file.py
waterbutler/core/streams/file.py
import os import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: data = self.file_pointer.read(self.read_size) if not data: break yield data async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() # add sleep of 0 so read will yield and continue in next io loop iteration await asyncio.sleep(0) self.read_size = size try: return next(self.file_gen) except StopIteration: self.feed_eof() return b''
import os from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: chunk = self.file_pointer.read(self.read_size) if not chunk: self.feed_eof() chunk = b'' yield chunk async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() self.read_size = size return next(self.file_gen)
Update FileStreamReader for new python 3.5 async
Update FileStreamReader for new python 3.5 async
Python
apache-2.0
RCOSDP/waterbutler,felliott/waterbutler,rdhyee/waterbutler,CenterForOpenScience/waterbutler,TomBaxter/waterbutler,Johnetordoff/waterbutler
import os - import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: - data = self.file_pointer.read(self.read_size) + chunk = self.file_pointer.read(self.read_size) - if not data: + if not chunk: - break + self.feed_eof() + chunk = b'' - yield data + yield chunk async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() - # add sleep of 0 so read will yield and continue in next io loop iteration - await asyncio.sleep(0) self.read_size = size - try: - return next(self.file_gen) + return next(self.file_gen) - except StopIteration: - self.feed_eof() - return b''
Update FileStreamReader for new python 3.5 async
## Code Before: import os import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: data = self.file_pointer.read(self.read_size) if not data: break yield data async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() # add sleep of 0 so read will yield and continue in next io loop iteration await asyncio.sleep(0) self.read_size = size try: return next(self.file_gen) except StopIteration: self.feed_eof() return b'' ## Instruction: Update FileStreamReader for new python 3.5 async ## Code After: import os from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: chunk = self.file_pointer.read(self.read_size) if not chunk: self.feed_eof() chunk = b'' yield chunk async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() self.read_size = size return next(self.file_gen)
// ... existing code ... import os // ... modified code ... while True: chunk = self.file_pointer.read(self.read_size) if not chunk: self.feed_eof() chunk = b'' yield chunk ... self.file_gen = self.file_gen or self.read_as_gen() self.read_size = size return next(self.file_gen) // ... rest of the code ...
30044f8272557dbd367eab3dbe7c1ba1076484e9
readux/pages/models.py
readux/pages/models.py
from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), ))
from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent from feincms.content.video.models import VideoContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), )) Page.create_content_type(VideoContent)
Enable video content for cms pages
Enable video content for cms pages [#110289088]
Python
apache-2.0
emory-libraries/readux,emory-libraries/readux,emory-libraries/readux
from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent + from feincms.content.video.models import VideoContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), )) + + Page.create_content_type(VideoContent)
Enable video content for cms pages
## Code Before: from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), )) ## Instruction: Enable video content for cms pages ## Code After: from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent from feincms.content.video.models import VideoContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), )) Page.create_content_type(VideoContent)
// ... existing code ... from feincms.content.medialibrary.models import MediaFileContent from feincms.content.video.models import VideoContent // ... modified code ... )) Page.create_content_type(VideoContent) // ... rest of the code ...
65e6c8466482464333e77a2892fd0ac33ab5c3cb
q_and_a/apps/token_auth/views.py
q_and_a/apps/token_auth/views.py
from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate, login from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): if not self.request.user.is_authenticated() \ and not hasattr(self.request.user, 'organisation_id'): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions')
from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): if (not self.request.user.is_authenticated() and not hasattr(self.request.user, 'organisation_id')): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions')
Fix indent, PEP-8 style and remove dup import.
Fix indent, PEP-8 style and remove dup import.
Python
bsd-3-clause
DemocracyClub/candidate_questions,DemocracyClub/candidate_questions,DemocracyClub/candidate_questions
from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin - from django.contrib.auth import login, authenticate, login + from django.contrib.auth import login, authenticate from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): - if not self.request.user.is_authenticated() \ + if (not self.request.user.is_authenticated() - and not hasattr(self.request.user, 'organisation_id'): + and not hasattr(self.request.user, 'organisation_id')): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions')
Fix indent, PEP-8 style and remove dup import.
## Code Before: from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate, login from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): if not self.request.user.is_authenticated() \ and not hasattr(self.request.user, 'organisation_id'): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions') ## Instruction: Fix indent, PEP-8 style and remove dup import. ## Code After: from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): if (not self.request.user.is_authenticated() and not hasattr(self.request.user, 'organisation_id')): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions')
// ... existing code ... from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate from django.core.exceptions import PermissionDenied // ... modified code ... def get_redirect_url(self, *args, **kwargs): if (not self.request.user.is_authenticated() and not hasattr(self.request.user, 'organisation_id')): auth_user = authenticate(auth_token=self.kwargs['token']) // ... rest of the code ...
b4d9fb47e040b199f88cffb4a0b761c443f390b4
dduplicated/cli.py
dduplicated/cli.py
from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0)
from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() exit() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0)
Update in output to terminal.
Update in output to terminal. Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com>
Python
mit
messiasthi/dduplicated-cli
from os import path as opath, getcwd from pprint import pprint from sys import argv + from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() + exit() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0)
Update in output to terminal.
## Code Before: from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0) ## Instruction: Update in output to terminal. ## Code After: from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() exit() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0)
... from sys import argv from dduplicated import commands ... commands.help() exit() ...
536211012be24a20c34ef0af1fcc555672129354
byceps/util/system.py
byceps/util/system.py
import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' def get_config_env_name_from_env(*, default=None): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) if env is None: if default is None: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) env = default return env
import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' def get_config_env_name_from_env(): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) if not env: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) return env
Remove default argument from function that reads the configuration name from the environment
Remove default argument from function that reads the configuration name from the environment
Python
bsd-3-clause
homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps
import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' - def get_config_env_name_from_env(*, default=None): + def get_config_env_name_from_env(): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) + if not env: - if env is None: - if default is None: - raise Exception( + raise Exception( - "No configuration environment was specified via the '{}' " + "No configuration environment was specified via the '{}' " - "environment variable.".format(CONFIG_ENV_VAR_NAME)) + "environment variable.".format(CONFIG_ENV_VAR_NAME)) - - env = default return env
Remove default argument from function that reads the configuration name from the environment
## Code Before: import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' def get_config_env_name_from_env(*, default=None): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) if env is None: if default is None: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) env = default return env ## Instruction: Remove default argument from function that reads the configuration name from the environment ## Code After: import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' def get_config_env_name_from_env(): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) if not env: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) return env
// ... existing code ... def get_config_env_name_from_env(): """Return the configuration environment name set via environment // ... modified code ... if not env: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) // ... rest of the code ...
a8a56f20dd76f61ec1ea6e99037490922d5cbcb1
setup.py
setup.py
from distutils.core import setup setup( name='grammpy', version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' )
from distutils.core import setup setup( name='grammpy', version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' )
FIX missing Rules directory in package
FIX missing Rules directory in package
Python
mit
PatrikValkovic/grammpy
from distutils.core import setup setup( name='grammpy', version='1.1.1', - packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions'], + packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' )
FIX missing Rules directory in package
## Code Before: from distutils.core import setup setup( name='grammpy', version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' ) ## Instruction: FIX missing Rules directory in package ## Code After: from distutils.core import setup setup( name='grammpy', version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' )
... version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'], url='https://github.com/PatrikValkovic/grammpy', ...
6c564ebe538d2723cc5f9397e09e5945796a257e
pyelevator/message.py
pyelevator/message.py
import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], }
import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1) and not isinstance(self._datas[0], (tuple, list)): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], }
Fix : Range of len(1) have to be a tuple of tuples
Fix : Range of len(1) have to be a tuple of tuples
Python
mit
oleiade/py-elevator
import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: - if (len(self._datas) == 1): + if (len(self._datas) == 1) and not isinstance(self._datas[0], (tuple, list)): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], }
Fix : Range of len(1) have to be a tuple of tuples
## Code Before: import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], } ## Instruction: Fix : Range of len(1) have to be a tuple of tuples ## Code After: import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1) and not isinstance(self._datas[0], (tuple, list)): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], }
// ... existing code ... if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1) and not isinstance(self._datas[0], (tuple, list)): return self._datas[0] // ... rest of the code ...
464bc1b511415459e99700b94101776d00b23796
indra/pre_assemble_for_db/pre_assemble_script.py
indra/pre_assemble_for_db/pre_assemble_script.py
import indra.tools.assemble_corpus as ac def process_statements(stmts): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False) return stmts
import indra.tools.assemble_corpus as ac from indra.db.util import get_statements, insert_pa_stmts def process_statements(stmts, num_procs=1): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False, poolsize=num_procs) return stmts def preassemble_db_stmts(db, num_procs, *clauses): """Run pre-assembly on a set of statements in the database.""" stmts = get_statements(clauses, db=db, do_stmt_count=False) pa_stmts = process_statements(stmts, num_procs) insert_pa_stmts(db, pa_stmts) return pa_stmts
Create function to handle full pipeline.
Create function to handle full pipeline.
Python
bsd-2-clause
bgyori/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,pvtodorov/indra
import indra.tools.assemble_corpus as ac + from indra.db.util import get_statements, insert_pa_stmts + - def process_statements(stmts): + def process_statements(stmts, num_procs=1): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) - stmts = ac.run_preassembly(stmts, return_toplevel=False) + stmts = ac.run_preassembly(stmts, return_toplevel=False, + poolsize=num_procs) return stmts + + def preassemble_db_stmts(db, num_procs, *clauses): + """Run pre-assembly on a set of statements in the database.""" + stmts = get_statements(clauses, db=db, do_stmt_count=False) + pa_stmts = process_statements(stmts, num_procs) + insert_pa_stmts(db, pa_stmts) + return pa_stmts +
Create function to handle full pipeline.
## Code Before: import indra.tools.assemble_corpus as ac def process_statements(stmts): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False) return stmts ## Instruction: Create function to handle full pipeline. ## Code After: import indra.tools.assemble_corpus as ac from indra.db.util import get_statements, insert_pa_stmts def process_statements(stmts, num_procs=1): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False, poolsize=num_procs) return stmts def preassemble_db_stmts(db, num_procs, *clauses): """Run pre-assembly on a set of statements in the database.""" stmts = get_statements(clauses, db=db, do_stmt_count=False) pa_stmts = process_statements(stmts, num_procs) insert_pa_stmts(db, pa_stmts) return pa_stmts
// ... existing code ... import indra.tools.assemble_corpus as ac from indra.db.util import get_statements, insert_pa_stmts def process_statements(stmts, num_procs=1): stmts = ac.map_grounding(stmts) // ... modified code ... stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False, poolsize=num_procs) return stmts def preassemble_db_stmts(db, num_procs, *clauses): """Run pre-assembly on a set of statements in the database.""" stmts = get_statements(clauses, db=db, do_stmt_count=False) pa_stmts = process_statements(stmts, num_procs) insert_pa_stmts(db, pa_stmts) return pa_stmts // ... rest of the code ...
5b7a1a40ea43834feb5563f566d07bd5b31c589d
tests/test-recipes/metadata/always_include_files_glob/run_test.py
tests/test-recipes/metadata/always_include_files_glob/run_test.py
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} if __name__ == '__main__': main()
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
Add error messages to the asserts
Add error messages to the asserts
Python
bsd-3-clause
ilastik/conda-build,shastings517/conda-build,frol/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,ilastik/conda-build,sandhujasmine/conda-build,rmcgibbo/conda-build,sandhujasmine/conda-build,shastings517/conda-build,rmcgibbo/conda-build,shastings517/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,ilastik/conda-build,rmcgibbo/conda-build,sandhujasmine/conda-build,frol/conda-build,frol/conda-build
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': - assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} + assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): - assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} + assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
Add error messages to the asserts
## Code Before: import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} if __name__ == '__main__': main() ## Instruction: Add error messages to the asserts ## Code After: import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
... if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] ...
114eae527cce97423ec5cc5896a4728dc0764d2c
chunsabot/modules/images.py
chunsabot/modules/images.py
import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None path = os.path.join(brain.__temppath__, id_generator(), 'image_processing') if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message
import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None path = os.path.join(brain.__temppath__, "{}_{}".format(id_generator(), 'image_processing')) if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message
Fix some confusion of creating folders
Fix some confusion of creating folders
Python
mit
susemeee/Chunsabot-framework
import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None - path = os.path.join(brain.__temppath__, id_generator(), 'image_processing') + path = os.path.join(brain.__temppath__, "{}_{}".format(id_generator(), 'image_processing')) if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message
Fix some confusion of creating folders
## Code Before: import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None path = os.path.join(brain.__temppath__, id_generator(), 'image_processing') if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message ## Instruction: Fix some confusion of creating folders ## Code After: import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None path = os.path.join(brain.__temppath__, "{}_{}".format(id_generator(), 'image_processing')) if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message
# ... existing code ... path = os.path.join(brain.__temppath__, "{}_{}".format(id_generator(), 'image_processing')) if not os.path.isdir(path): # ... rest of the code ...
c266fbd7a3478d582dc0d6c88fc5e3d8b7a8f62f
survey/views/survey_result.py
survey/views/survey_result.py
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
Fix - Apache error AH02429
Fix - Apache error AH02429 Response header name 'mimetype=' contains invalid characters, aborting request
Python
agpl-3.0
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') - response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
Fix - Apache error AH02429
## Code Before: import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response ## Instruction: Fix - Apache error AH02429 ## Code After: import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
... response = HttpResponse(f.read(), content_type='text/csv') cd = u'attachment; filename="{}.csv"'.format(survey.name) ...
3a6d76201104b928c1b9053317c9e61804814ff5
pyresticd.py
pyresticd.py
import os import getpass import time from twisted.internet import task from twisted.internet import reactor # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program def do_restic_backup(): print "\nStarting Backup at " + str(time.ctime()) os.system(restic_command) print "\nRestic Scheduler\n----------------------------\n" print "Timout ist: " + str(timeout) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run()
import os import getpass import time from twisted.internet import task from twisted.internet import reactor # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program def do_restic_backup(): print('Starting Backup at {}'.format(time.ctime())) os.system(restic_command) print('Restic Scheduler') print('-' * 30) print('Timeout: {}'.format(timeout)) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run()
Use py3-style print and string-formatting
Use py3-style print and string-formatting
Python
mit
Mebus/pyresticd,Mebus/pyresticd
import os import getpass import time from twisted.internet import task from twisted.internet import reactor - # Configuration + # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program + def do_restic_backup(): - print "\nStarting Backup at " + str(time.ctime()) + print('Starting Backup at {}'.format(time.ctime())) os.system(restic_command) - print "\nRestic Scheduler\n----------------------------\n" - print "Timout ist: " + str(timeout) + print('Restic Scheduler') + print('-' * 30) + print('Timeout: {}'.format(timeout)) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run()
Use py3-style print and string-formatting
## Code Before: import os import getpass import time from twisted.internet import task from twisted.internet import reactor # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program def do_restic_backup(): print "\nStarting Backup at " + str(time.ctime()) os.system(restic_command) print "\nRestic Scheduler\n----------------------------\n" print "Timout ist: " + str(timeout) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run() ## Instruction: Use py3-style print and string-formatting ## Code After: import os import getpass import time from twisted.internet import task from twisted.internet import reactor # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program def do_restic_backup(): print('Starting Backup at {}'.format(time.ctime())) os.system(restic_command) print('Restic Scheduler') print('-' * 30) print('Timeout: {}'.format(timeout)) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run()
# ... existing code ... # Configuration # ... modified code ... def do_restic_backup(): print('Starting Backup at {}'.format(time.ctime())) os.system(restic_command) ... print('Restic Scheduler') print('-' * 30) print('Timeout: {}'.format(timeout)) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") # ... rest of the code ...
5d332259e16758bc43201073db91409390be9134
UM/Operations/GroupedOperation.py
UM/Operations/GroupedOperation.py
from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) ## Removes an operation from this group. def removeOperation(self, index): del self._children[index] ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo()
from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo()
Remove removeOperation from grouped operation
Remove removeOperation from grouped operation This function is never used and actually should never be used. The operation may not be modified after it is used, so removing an operation from the list makes no sense.
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) - ## Removes an operation from this group. - def removeOperation(self, index): - del self._children[index] - ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo()
Remove removeOperation from grouped operation
## Code Before: from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) ## Removes an operation from this group. def removeOperation(self, index): del self._children[index] ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo() ## Instruction: Remove removeOperation from grouped operation ## Code After: from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo()
# ... existing code ... ## Undo all operations in this group. # ... rest of the code ...
3864ef6773000d516ee6542a11db3c3b636d5b49
test/framework/killer.py
test/framework/killer.py
from __future__ import print_function import sys, os, signal, time, subprocess32 def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) time.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
from __future__ import print_function import sys, os, signal, time, subprocess32 sys.path.append('../../..') from jenkinsflow.mocked import hyperspeed def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) hyperspeed.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
Prepare kill test for mock - use hyperspeed
Prepare kill test for mock - use hyperspeed
Python
bsd-3-clause
lhupfeldt/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow
from __future__ import print_function import sys, os, signal, time, subprocess32 + + sys.path.append('../../..') + from jenkinsflow.mocked import hyperspeed def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) - time.sleep(1) + hyperspeed.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
Prepare kill test for mock - use hyperspeed
## Code Before: from __future__ import print_function import sys, os, signal, time, subprocess32 def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) time.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)]) ## Instruction: Prepare kill test for mock - use hyperspeed ## Code After: from __future__ import print_function import sys, os, signal, time, subprocess32 sys.path.append('../../..') from jenkinsflow.mocked import hyperspeed def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) hyperspeed.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
// ... existing code ... import sys, os, signal, time, subprocess32 sys.path.append('../../..') from jenkinsflow.mocked import hyperspeed // ... modified code ... print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) hyperspeed.sleep(1) // ... rest of the code ...
f2005fadb9fb2e2bcad32286a9d993c291c1992e
lazyblacksmith/models/api/industry_index.py
lazyblacksmith/models/api/industry_index.py
from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING
from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING if activity_string == 'reaction': return Activity.REACTIONS
Fix celery task for industry indexes by adding missing field
Fix celery task for industry indexes by adding missing field
Python
bsd-3-clause
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING + if activity_string == 'reaction': + return Activity.REACTIONS
Fix celery task for industry indexes by adding missing field
## Code Before: from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING ## Instruction: Fix celery task for industry indexes by adding missing field ## Code After: from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING if activity_string == 'reaction': return Activity.REACTIONS
// ... existing code ... return Activity.COPYING if activity_string == 'reaction': return Activity.REACTIONS // ... rest of the code ...
497313620772c1cb0d520be1a0024c12ca02742e
tests/python_tests/fontset_test.py
tests/python_tests/fontset_test.py
from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) def test_loading_fontset_from_python(): m = mapnik.Map(256,256) fset = mapnik.FontSet('my-set') fset.add_face_name('Comic Sans') fset.add_face_name('Papyrus') m.append_fontset('my-set', fset) sty = mapnik.Style() rule = mapnik.Rule() tsym = mapnik.TextSymbolizer() tsym.fontset = fset rule.symbols.append(tsym) sty.rules.append(rule) m.append_style('Style',sty) serialized_map = mapnik.save_map_to_string(m) eq_('fontset-name="my-set"' in serialized_map,True) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
Python
lgpl-2.1
Mappy/mapnik,qianwenming/mapnik,tomhughes/mapnik,jwomeara/mapnik,pnorman/mapnik,davenquinn/python-mapnik,yiqingj/work,pnorman/mapnik,Mappy/mapnik,yohanboniface/python-mapnik,mapycz/python-mapnik,jwomeara/mapnik,Mappy/mapnik,yiqingj/work,strk/mapnik,kapouer/mapnik,Mappy/mapnik,qianwenming/mapnik,lightmare/mapnik,garnertb/python-mapnik,strk/mapnik,cjmayo/mapnik,lightmare/mapnik,Uli1/mapnik,mapycz/python-mapnik,zerebubuth/mapnik,manz/python-mapnik,CartoDB/mapnik,rouault/mapnik,stefanklug/mapnik,garnertb/python-mapnik,yohanboniface/python-mapnik,cjmayo/mapnik,manz/python-mapnik,pramsey/mapnik,tomhughes/mapnik,mapnik/mapnik,kapouer/mapnik,qianwenming/mapnik,yiqingj/work,mapnik/python-mapnik,garnertb/python-mapnik,rouault/mapnik,pnorman/mapnik,naturalatlas/mapnik,Uli1/mapnik,tomhughes/mapnik,strk/mapnik,manz/python-mapnik,mbrukman/mapnik,davenquinn/python-mapnik,pramsey/mapnik,whuaegeanse/mapnik,Airphrame/mapnik,tomhughes/python-mapnik,tomhughes/python-mapnik,tomhughes/python-mapnik,lightmare/mapnik,zerebubuth/mapnik,zerebubuth/mapnik,Airphrame/mapnik,mbrukman/mapnik,CartoDB/mapnik,mapycz/mapnik,sebastic/python-mapnik,rouault/mapnik,kapouer/mapnik,Uli1/mapnik,mbrukman/mapnik,mapnik/mapnik,stefanklug/mapnik,mapnik/python-mapnik,lightmare/mapnik,Uli1/mapnik,kapouer/mapnik,jwomeara/mapnik,whuaegeanse/mapnik,CartoDB/mapnik,mapycz/mapnik,qianwenming/mapnik,davenquinn/python-mapnik,whuaegeanse/mapnik,yiqingj/work,strk/mapnik,pnorman/mapnik,rouault/mapnik,mapnik/python-mapnik,mbrukman/mapnik,pramsey/mapnik,naturalatlas/mapnik,cjmayo/mapnik,naturalatlas/mapnik,yohanboniface/python-mapnik,sebastic/python-mapnik,tomhughes/mapnik,cjmayo/mapnik,stefanklug/mapnik,jwomeara/mapnik,mapnik/mapnik,qianwenming/mapnik,whuaegeanse/mapnik,Airphrame/mapnik,mapnik/mapnik,Airphrame/mapnik,mapycz/mapnik,pramsey/mapnik,sebastic/python-mapnik,stefanklug/mapnik,naturalatlas/mapnik
from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) + def test_loading_fontset_from_python(): + m = mapnik.Map(256,256) + fset = mapnik.FontSet('my-set') + fset.add_face_name('Comic Sans') + fset.add_face_name('Papyrus') + m.append_fontset('my-set', fset) + sty = mapnik.Style() + rule = mapnik.Rule() + tsym = mapnik.TextSymbolizer() + tsym.fontset = fset + rule.symbols.append(tsym) + sty.rules.append(rule) + m.append_style('Style',sty) + serialized_map = mapnik.save_map_to_string(m) + eq_('fontset-name="my-set"' in serialized_map,True) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
## Code Before: from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run] ## Instruction: Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized ## Code After: from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) def test_loading_fontset_from_python(): m = mapnik.Map(256,256) fset = mapnik.FontSet('my-set') fset.add_face_name('Comic Sans') fset.add_face_name('Papyrus') m.append_fontset('my-set', fset) sty = mapnik.Style() rule = mapnik.Rule() tsym = mapnik.TextSymbolizer() tsym.fontset = fset rule.symbols.append(tsym) sty.rules.append(rule) m.append_style('Style',sty) serialized_map = mapnik.save_map_to_string(m) eq_('fontset-name="my-set"' in serialized_map,True) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
... def test_loading_fontset_from_python(): m = mapnik.Map(256,256) fset = mapnik.FontSet('my-set') fset.add_face_name('Comic Sans') fset.add_face_name('Papyrus') m.append_fontset('my-set', fset) sty = mapnik.Style() rule = mapnik.Rule() tsym = mapnik.TextSymbolizer() tsym.fontset = fset rule.symbols.append(tsym) sty.rules.append(rule) m.append_style('Style',sty) serialized_map = mapnik.save_map_to_string(m) eq_('fontset-name="my-set"' in serialized_map,True) ...
85c509913cc9a6b22036c33eccb07277b39260e3
pygraphc/anomaly/AnomalyScore.py
pygraphc/anomaly/AnomalyScore.py
import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph clusters : dict[list] filename : str """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row)
import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph A graph to be analyzed for its anomaly. clusters : dict[list] Dictionary of list containing node identifier for each clusters. filename : str Filename for anomaly detection result. """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row)
Add description of Parameters section in docstring
Add description of Parameters section in docstring
Python
mit
studiawan/pygraphc
import csv - from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph + A graph to be analyzed for its anomaly. clusters : dict[list] + Dictionary of list containing node identifier for each clusters. filename : str + Filename for anomaly detection result. """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row)
Add description of Parameters section in docstring
## Code Before: import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph clusters : dict[list] filename : str """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row) ## Instruction: Add description of Parameters section in docstring ## Code After: import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph A graph to be analyzed for its anomaly. clusters : dict[list] Dictionary of list containing node identifier for each clusters. filename : str Filename for anomaly detection result. """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row)
... import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction ... graph : graph A graph to be analyzed for its anomaly. clusters : dict[list] Dictionary of list containing node identifier for each clusters. filename : str Filename for anomaly detection result. """ ...
7539a5445d24193395eed5dc658a4e69d8782736
buffpy/tests/test_profile.py
buffpy/tests/test_profile.py
from nose.tools import eq_ from mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS mocked_response = { 'name': 'me', 'service': 'twiter', 'id': 1 } def test_profile_schedules_getter(): ''' Test schedules gettering from buffer api ''' mocked_api = MagicMock() mocked_api.get.return_value = '123' profile = Profile(mocked_api, mocked_response) eq_(profile.schedules, '123') mocked_api.get.assert_called_once_with(url = PATHS['GET_SCHEDULES'] % 1) def test_profile_schedules_setter(): ''' Test schedules setter from buffer api ''' mocked_api = MagicMock() mocked_api.get.return_value = '123' profile = Profile(mocked_api, mocked_response) profile.schedules = { 'times': ['mo'] } mocked_api.post.assert_called_once_with(url=PATHS['UPDATE_SCHEDULES'] % 1, data='schedules[0][times][]=mo&') def test_profile_updates(): ''' Test updates relationship with a profile ''' mocked_api = MagicMock() with patch('buffpy.models.profile.Updates') as mocked_updates: profile = Profile(api=mocked_api, raw_response={'id': 1}) updates = profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
from unittest.mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS mocked_response = { "name": "me", "service": "twiter", "id": 1 } def test_profile_schedules_getter(): """ Should retrieve profiles from buffer's API. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) assert profile.schedules == "123" mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1")) def test_profile_schedules_setter(): """ Should update profile's schedules. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) profile.schedules = { "times": ["mo"] } mocked_api.post.assert_called_once_with( url=PATHS["UPDATE_SCHEDULES"].format("1"), data="schedules[0][times][]=mo&") def test_profile_updates(): """ Should properly call buffer's updates. """ mocked_api = MagicMock() with patch("buffpy.models.profile.Updates") as mocked_updates: profile = Profile(api=mocked_api, raw_response={"id": 1}) assert profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
Migrate profile tests to pytest
Migrate profile tests to pytest
Python
mit
vtemian/buffpy
- from nose.tools import eq_ - from mock import MagicMock, patch + from unittest.mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS + mocked_response = { - 'name': 'me', + "name": "me", - 'service': 'twiter', + "service": "twiter", - 'id': 1 + "id": 1 } + def test_profile_schedules_getter(): + """ Should retrieve profiles from buffer's API. """ - ''' - Test schedules gettering from buffer api - ''' - mocked_api = MagicMock() + mocked_api = MagicMock() - mocked_api.get.return_value = '123' + mocked_api.get.return_value = "123" - profile = Profile(mocked_api, mocked_response) + profile = Profile(mocked_api, mocked_response) - eq_(profile.schedules, '123') + assert profile.schedules == "123" - mocked_api.get.assert_called_once_with(url = PATHS['GET_SCHEDULES'] % 1) + mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1")) + def test_profile_schedules_setter(): + """ Should update profile's schedules. """ - ''' - Test schedules setter from buffer api - ''' - mocked_api = MagicMock() + mocked_api = MagicMock() - mocked_api.get.return_value = '123' + mocked_api.get.return_value = "123" - profile = Profile(mocked_api, mocked_response) + profile = Profile(mocked_api, mocked_response) - profile.schedules = { + profile.schedules = { - 'times': ['mo'] + "times": ["mo"] - } + } - mocked_api.post.assert_called_once_with(url=PATHS['UPDATE_SCHEDULES'] % 1, + mocked_api.post.assert_called_once_with( + url=PATHS["UPDATE_SCHEDULES"].format("1"), - data='schedules[0][times][]=mo&') + data="schedules[0][times][]=mo&") + def test_profile_updates(): + """ Should properly call buffer's updates. """ - ''' - Test updates relationship with a profile - ''' - mocked_api = MagicMock() + mocked_api = MagicMock() - with patch('buffpy.models.profile.Updates') as mocked_updates: + with patch("buffpy.models.profile.Updates") as mocked_updates: - profile = Profile(api=mocked_api, raw_response={'id': 1}) + profile = Profile(api=mocked_api, raw_response={"id": 1}) - updates = profile.updates + assert profile.updates - mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1) + mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
Migrate profile tests to pytest
## Code Before: from nose.tools import eq_ from mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS mocked_response = { 'name': 'me', 'service': 'twiter', 'id': 1 } def test_profile_schedules_getter(): ''' Test schedules gettering from buffer api ''' mocked_api = MagicMock() mocked_api.get.return_value = '123' profile = Profile(mocked_api, mocked_response) eq_(profile.schedules, '123') mocked_api.get.assert_called_once_with(url = PATHS['GET_SCHEDULES'] % 1) def test_profile_schedules_setter(): ''' Test schedules setter from buffer api ''' mocked_api = MagicMock() mocked_api.get.return_value = '123' profile = Profile(mocked_api, mocked_response) profile.schedules = { 'times': ['mo'] } mocked_api.post.assert_called_once_with(url=PATHS['UPDATE_SCHEDULES'] % 1, data='schedules[0][times][]=mo&') def test_profile_updates(): ''' Test updates relationship with a profile ''' mocked_api = MagicMock() with patch('buffpy.models.profile.Updates') as mocked_updates: profile = Profile(api=mocked_api, raw_response={'id': 1}) updates = profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1) ## Instruction: Migrate profile tests to pytest ## Code After: from unittest.mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS mocked_response = { "name": "me", "service": "twiter", "id": 1 } def test_profile_schedules_getter(): """ Should retrieve profiles from buffer's API. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) assert profile.schedules == "123" mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1")) def test_profile_schedules_setter(): """ Should update profile's schedules. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) profile.schedules = { "times": ["mo"] } mocked_api.post.assert_called_once_with( url=PATHS["UPDATE_SCHEDULES"].format("1"), data="schedules[0][times][]=mo&") def test_profile_updates(): """ Should properly call buffer's updates. """ mocked_api = MagicMock() with patch("buffpy.models.profile.Updates") as mocked_updates: profile = Profile(api=mocked_api, raw_response={"id": 1}) assert profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
// ... existing code ... from unittest.mock import MagicMock, patch // ... modified code ... mocked_response = { "name": "me", "service": "twiter", "id": 1 } ... def test_profile_schedules_getter(): """ Should retrieve profiles from buffer's API. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) assert profile.schedules == "123" mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1")) ... def test_profile_schedules_setter(): """ Should update profile's schedules. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) profile.schedules = { "times": ["mo"] } mocked_api.post.assert_called_once_with( url=PATHS["UPDATE_SCHEDULES"].format("1"), data="schedules[0][times][]=mo&") ... def test_profile_updates(): """ Should properly call buffer's updates. """ mocked_api = MagicMock() with patch("buffpy.models.profile.Updates") as mocked_updates: profile = Profile(api=mocked_api, raw_response={"id": 1}) assert profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1) // ... rest of the code ...
959897478bbda18f02aa6e38f2ebdd837581f1f0
tests/test_sct_verify_signature.py
tests/test_sct_verify_signature.py
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is True assert got_output == 'Verified OK\n' assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is False assert got_output == 'Verification Failure\n' assert got_cmd_res.exitcode == 1
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() assert verify_signature(signature_input, signature, pubkey) is True signature_input = b'some invalid signature input' assert verify_signature(signature_input, signature, pubkey) is False
Fix test for changed SctVerificationResult
Fix test for changed SctVerificationResult
Python
mit
theno/ctutlz,theno/ctutlz
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() - got_verified, got_output, got_cmd_res = \ - verify_signature(signature_input, signature, pubkey) + assert verify_signature(signature_input, signature, pubkey) is True - - assert got_verified is True - assert got_output == 'Verified OK\n' - assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' - got_verified, got_output, got_cmd_res = \ - verify_signature(signature_input, signature, pubkey) + assert verify_signature(signature_input, signature, pubkey) is False - assert got_verified is False - assert got_output == 'Verification Failure\n' - assert got_cmd_res.exitcode == 1 -
Fix test for changed SctVerificationResult
## Code Before: from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is True assert got_output == 'Verified OK\n' assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is False assert got_output == 'Verification Failure\n' assert got_cmd_res.exitcode == 1 ## Instruction: Fix test for changed SctVerificationResult ## Code After: from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() assert verify_signature(signature_input, signature, pubkey) is True signature_input = b'some invalid signature input' assert verify_signature(signature_input, signature, pubkey) is False
# ... existing code ... assert verify_signature(signature_input, signature, pubkey) is True # ... modified code ... assert verify_signature(signature_input, signature, pubkey) is False # ... rest of the code ...
dbec204b242ab643de162046ba73dca32043c6c2
space-age/space_age.py
space-age/space_age.py
class SpaceAge(object): def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def on_earth(self): return round(self.years, 2) def on_mercury(self): return round(self.years/0.2408467, 2) def on_venus(self): return round(self.years/0.6151976, 2) def on_mars(self): return round(self.years/1.8808158, 2) def on_jupiter(self): return round(self.years/11.862615, 2) def on_saturn(self): return round(self.years/29.447498, 2) def on_uranus(self): return round(self.years/84.016846, 2) def on_neptune(self): return round(self.years/164.79132, 2)
class SpaceAge(object): YEARS = {"on_earth": 1, "on_mercury": 0.2408467, "on_venus": 0.61519726, "on_mars": 1.8808158, "on_jupiter": 11.862615, "on_saturn": 29.447498, "on_uranus": 84.016846, "on_neptune": 164.79132} def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def __getattr__(self, on_planet): if on_planet in SpaceAge.YEARS: return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) else: raise AttributeError
Implement __getattr__ to reduce code
Implement __getattr__ to reduce code
Python
agpl-3.0
CubicComet/exercism-python-solutions
class SpaceAge(object): + YEARS = {"on_earth": 1, + "on_mercury": 0.2408467, + "on_venus": 0.61519726, + "on_mars": 1.8808158, + "on_jupiter": 11.862615, + "on_saturn": 29.447498, + "on_uranus": 84.016846, + "on_neptune": 164.79132} + def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 - def on_earth(self): - return round(self.years, 2) + def __getattr__(self, on_planet): + if on_planet in SpaceAge.YEARS: + return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) + else: + raise AttributeError - def on_mercury(self): - return round(self.years/0.2408467, 2) - - def on_venus(self): - return round(self.years/0.6151976, 2) - - def on_mars(self): - return round(self.years/1.8808158, 2) - - def on_jupiter(self): - return round(self.years/11.862615, 2) - - def on_saturn(self): - return round(self.years/29.447498, 2) - - def on_uranus(self): - return round(self.years/84.016846, 2) - - def on_neptune(self): - return round(self.years/164.79132, 2) -
Implement __getattr__ to reduce code
## Code Before: class SpaceAge(object): def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def on_earth(self): return round(self.years, 2) def on_mercury(self): return round(self.years/0.2408467, 2) def on_venus(self): return round(self.years/0.6151976, 2) def on_mars(self): return round(self.years/1.8808158, 2) def on_jupiter(self): return round(self.years/11.862615, 2) def on_saturn(self): return round(self.years/29.447498, 2) def on_uranus(self): return round(self.years/84.016846, 2) def on_neptune(self): return round(self.years/164.79132, 2) ## Instruction: Implement __getattr__ to reduce code ## Code After: class SpaceAge(object): YEARS = {"on_earth": 1, "on_mercury": 0.2408467, "on_venus": 0.61519726, "on_mars": 1.8808158, "on_jupiter": 11.862615, "on_saturn": 29.447498, "on_uranus": 84.016846, "on_neptune": 164.79132} def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def __getattr__(self, on_planet): if on_planet in SpaceAge.YEARS: return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) else: raise AttributeError
# ... existing code ... class SpaceAge(object): YEARS = {"on_earth": 1, "on_mercury": 0.2408467, "on_venus": 0.61519726, "on_mars": 1.8808158, "on_jupiter": 11.862615, "on_saturn": 29.447498, "on_uranus": 84.016846, "on_neptune": 164.79132} def __init__(self, seconds): # ... modified code ... def __getattr__(self, on_planet): if on_planet in SpaceAge.YEARS: return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) else: raise AttributeError # ... rest of the code ...
3c65881633daee8d5b19760e5c887dce25ab69c3
froide/helper/db_utils.py
froide/helper/db_utils.py
from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False count = klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count() else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: first_round = False count = klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count() else: raise else: break
from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) count += 1 else: raise else: break
Fix bad initial count in slug creation helper
Fix bad initial count in slug creation helper
Python
mit
stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide
from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False + count = max( - count = klass.objects.filter(**{ + klass.objects.filter(**{ - '%s__startswith' % attr: base_attr + '%s__startswith' % attr: base_attr - }).count() + }).count(), + initial_count + ) else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: + if first_round: - first_round = False + first_round = False + count = max( - count = klass.objects.filter(**{ + klass.objects.filter(**{ - '%s__startswith' % attr: base_attr + '%s__startswith' % attr: base_attr - }).count() + }).count(), + initial_count + ) + count += 1 else: raise else: break
Fix bad initial count in slug creation helper
## Code Before: from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False count = klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count() else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: first_round = False count = klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count() else: raise else: break ## Instruction: Fix bad initial count in slug creation helper ## Code After: from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) count += 1 else: raise else: break
... first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) else: ... if count - initial_count < MAX_COUNT: if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) count += 1 else: ...
b06f0e17541f7d424e73fd200ae10db0722b1a5a
organizer/views.py
organizer/views.py
from django.shortcuts import ( get_object_or_404, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): # create new object from data # show webpage for new object pass else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()})
from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): new_tag = form.save() return redirect(new_tag) else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()})
Create and redirect to Tag in tag_create().
Ch09: Create and redirect to Tag in tag_create().
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
from django.shortcuts import ( - get_object_or_404, render) + get_object_or_404, redirect, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): + new_tag = form.save() + return redirect(new_tag) - # create new object from data - # show webpage for new object - pass else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()})
Create and redirect to Tag in tag_create().
## Code Before: from django.shortcuts import ( get_object_or_404, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): # create new object from data # show webpage for new object pass else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()}) ## Instruction: Create and redirect to Tag in tag_create(). ## Code After: from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): new_tag = form.save() return redirect(new_tag) else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()})
# ... existing code ... from django.shortcuts import ( get_object_or_404, redirect, render) # ... modified code ... if form.is_valid(): new_tag = form.save() return redirect(new_tag) else: # empty data or invalid data # ... rest of the code ...
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
47