commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ee49f4f592cf04199f9d82c2da2af9e34dd1d9d4
|
avwx_api/views.py
|
avwx_api/views.py
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
data = avwx.Station.from_icao(station)
if data:
return jsonify(asdict(data))
return jsonify({'error': f'Station ident "{station}" not found'})
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
try:
return jsonify(asdict(avwx.Station.from_icao(station)))
except avwx.exceptions.BadStation:
return jsonify({
'error': f'Station ident "{station}" not found. Email me if data is missing :)'
})
|
Add error handling to station endpoint
|
Add error handling to station endpoint
|
Python
|
mit
|
flyinactor91/AVWX-API,flyinactor91/AVWX-API,flyinactor91/AVWX-API
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
data = avwx.Station.from_icao(station)
if data:
return jsonify(asdict(data))
return jsonify({'error': f'Station ident "{station}" not found'})
Add error handling to station endpoint
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
try:
return jsonify(asdict(avwx.Station.from_icao(station)))
except avwx.exceptions.BadStation:
return jsonify({
'error': f'Station ident "{station}" not found. Email me if data is missing :)'
})
|
<commit_before>"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
data = avwx.Station.from_icao(station)
if data:
return jsonify(asdict(data))
return jsonify({'error': f'Station ident "{station}" not found'})
<commit_msg>Add error handling to station endpoint<commit_after>
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
try:
return jsonify(asdict(avwx.Station.from_icao(station)))
except avwx.exceptions.BadStation:
return jsonify({
'error': f'Station ident "{station}" not found. Email me if data is missing :)'
})
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
data = avwx.Station.from_icao(station)
if data:
return jsonify(asdict(data))
return jsonify({'error': f'Station ident "{station}" not found'})
Add error handling to station endpoint"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
try:
return jsonify(asdict(avwx.Station.from_icao(station)))
except avwx.exceptions.BadStation:
return jsonify({
'error': f'Station ident "{station}" not found. Email me if data is missing :)'
})
|
<commit_before>"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
data = avwx.Station.from_icao(station)
if data:
return jsonify(asdict(data))
return jsonify({'error': f'Station ident "{station}" not found'})
<commit_msg>Add error handling to station endpoint<commit_after>"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the Quart application
"""
# pylint: disable=W0702
# stdlib
from dataclasses import asdict
# library
import avwx
from quart import Response, jsonify
from quart_openapi.cors import crossdomain
# module
from avwx_api import app
# Static Web Pages
@app.route('/')
@app.route('/home')
async def home() -> Response:
"""
Returns static home page
"""
return await app.send_static_file('html/home.html')
# API Routing Errors
@app.route('/api')
async def no_report() -> Response:
"""
Returns no report msg
"""
return jsonify({'error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
async def no_station() -> Response:
"""
Returns no station msg
"""
return jsonify({'error': 'No station given'}), 400
@app.route('/api/station/<string:station>')
@crossdomain(origin='*')
async def station_endpoint(station: str) -> Response:
"""
Returns raw station info if available
"""
station = station.upper()
try:
return jsonify(asdict(avwx.Station.from_icao(station)))
except avwx.exceptions.BadStation:
return jsonify({
'error': f'Station ident "{station}" not found. Email me if data is missing :)'
})
|
c5c12e1f5aaeb56921b69cbb64a7d6a1b7585936
|
languages_plus/admin.py
|
languages_plus/admin.py
|
from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
|
from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
search_fields = ('name_en', 'name_native')
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
search_fields = ('code', 'language', 'country')
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
|
Define `search_fields` for Admin classes
|
Define `search_fields` for Admin classes
This enables the search box on the admin change list page [1], and can
be used by other apps like django-autocomplete-light [2].
1: https://docs.djangoproject.com/en/1.7/ref/contrib/admin/#django.contrib.admin.ModelAdmin.search_fields
2: https://github.com/yourlabs/django-autocomplete-light/pull/361
|
Python
|
mit
|
cordery/django-languages-plus
|
from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
Define `search_fields` for Admin classes
This enables the search box on the admin change list page [1], and can
be used by other apps like django-autocomplete-light [2].
1: https://docs.djangoproject.com/en/1.7/ref/contrib/admin/#django.contrib.admin.ModelAdmin.search_fields
2: https://github.com/yourlabs/django-autocomplete-light/pull/361
|
from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
search_fields = ('name_en', 'name_native')
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
search_fields = ('code', 'language', 'country')
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
|
<commit_before>from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
<commit_msg>Define `search_fields` for Admin classes
This enables the search box on the admin change list page [1], and can
be used by other apps like django-autocomplete-light [2].
1: https://docs.djangoproject.com/en/1.7/ref/contrib/admin/#django.contrib.admin.ModelAdmin.search_fields
2: https://github.com/yourlabs/django-autocomplete-light/pull/361<commit_after>
|
from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
search_fields = ('name_en', 'name_native')
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
search_fields = ('code', 'language', 'country')
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
|
from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
Define `search_fields` for Admin classes
This enables the search box on the admin change list page [1], and can
be used by other apps like django-autocomplete-light [2].
1: https://docs.djangoproject.com/en/1.7/ref/contrib/admin/#django.contrib.admin.ModelAdmin.search_fields
2: https://github.com/yourlabs/django-autocomplete-light/pull/361from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
search_fields = ('name_en', 'name_native')
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
search_fields = ('code', 'language', 'country')
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
|
<commit_before>from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
<commit_msg>Define `search_fields` for Admin classes
This enables the search box on the admin change list page [1], and can
be used by other apps like django-autocomplete-light [2].
1: https://docs.djangoproject.com/en/1.7/ref/contrib/admin/#django.contrib.admin.ModelAdmin.search_fields
2: https://github.com/yourlabs/django-autocomplete-light/pull/361<commit_after>from django.contrib import admin
from .models import Language, CultureCode
class LanguageAdmin(admin.ModelAdmin):
list_display = ('name_en', 'name_native', 'iso_639_1', 'iso_639_2T', 'iso_639_2B', 'iso_639_2T',
'iso_639_3', 'iso_639_6', 'notes')
list_display_links = ('name_en',)
search_fields = ('name_en', 'name_native')
class CultureCodeAdmin(admin.ModelAdmin):
list_display = ('code', 'language', 'country')
list_display_links = ('code',)
search_fields = ('code', 'language', 'country')
admin.site.register(Language, LanguageAdmin)
admin.site.register(CultureCode, CultureCodeAdmin)
|
2b08ce1d980ff01c2f0ac258aaba52f2ca758427
|
beethoven/urls.py
|
beethoven/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
if not settings.PRODUCTION:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT})
)
|
Fix static file 404 error
|
Fix static file 404 error
|
Python
|
mit
|
lockhawksp/beethoven,lockhawksp/beethoven
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
Fix static file 404 error
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
if not settings.PRODUCTION:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT})
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
<commit_msg>Fix static file 404 error<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
if not settings.PRODUCTION:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT})
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
Fix static file 404 errorfrom django.conf.urls import patterns, include, url
from django.contrib import admin
from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
if not settings.PRODUCTION:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT})
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
<commit_msg>Fix static file 404 error<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'beethoven.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
if not settings.PRODUCTION:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT})
)
|
690d00e7b8f4021ff43e50a2b41ede50745ee4ae
|
fabfile.py
|
fabfile.py
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
'taggit',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
|
Add missing installed app for testing
|
Add missing installed app for testing
|
Python
|
apache-2.0
|
armstrong/armstrong.apps.content,armstrong/armstrong.apps.content
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
Add missing installed app for testing
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
'taggit',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
|
<commit_before>from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
<commit_msg>Add missing installed app for testing<commit_after>
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
'taggit',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
|
from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
Add missing installed app for testingfrom armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
'taggit',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
|
<commit_before>from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
<commit_msg>Add missing installed app for testing<commit_after>from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
'taggit',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
|
6d0fa6dda7613e734ce958f88bc0eaf55cfddf3c
|
st2common/st2common/persistence/pack.py
|
st2common/st2common/persistence/pack.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack',
'ConfigSchema'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
class ConfigSchema(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
|
Add persistance class for ConfigSchema.
|
Add persistance class for ConfigSchema.
|
Python
|
apache-2.0
|
pixelrebel/st2,Plexxi/st2,emedvedev/st2,lakshmi-kannan/st2,StackStorm/st2,punalpatel/st2,Plexxi/st2,peak6/st2,StackStorm/st2,Plexxi/st2,pixelrebel/st2,StackStorm/st2,Plexxi/st2,punalpatel/st2,nzlosh/st2,emedvedev/st2,peak6/st2,emedvedev/st2,punalpatel/st2,peak6/st2,lakshmi-kannan/st2,tonybaloney/st2,nzlosh/st2,tonybaloney/st2,pixelrebel/st2,StackStorm/st2,tonybaloney/st2,lakshmi-kannan/st2,nzlosh/st2,nzlosh/st2
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
Add persistance class for ConfigSchema.
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack',
'ConfigSchema'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
class ConfigSchema(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
|
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
<commit_msg>Add persistance class for ConfigSchema.<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack',
'ConfigSchema'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
class ConfigSchema(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
Add persistance class for ConfigSchema.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack',
'ConfigSchema'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
class ConfigSchema(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
|
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
<commit_msg>Add persistance class for ConfigSchema.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import pack_access
from st2common.persistence import base
__all__ = [
'Pack',
'ConfigSchema'
]
class Pack(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
class ConfigSchema(base.Access):
impl = pack_access
@classmethod
def _get_impl(cls):
return cls.impl
|
f7e218b72a09615259b4d77e9169f5237a4cae32
|
mopidy/core/mixer.py
|
mopidy/core/mixer.py
|
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
|
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
|
Remove test-only code paths in MixerController
|
core: Remove test-only code paths in MixerController
|
Python
|
apache-2.0
|
jmarsik/mopidy,vrs01/mopidy,SuperStarPL/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,pacificIT/mopidy,vrs01/mopidy,diandiankan/mopidy,jcass77/mopidy,tkem/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,bencevans/mopidy,bencevans/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,swak/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,vrs01/mopidy,mokieyue/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,hkariti/mopidy,ZenithDK/mopidy,rawdlite/mopidy,mopidy/mopidy,jmarsik/mopidy,mokieyue/mopidy,swak/mopidy,mopidy/mopidy,bencevans/mopidy,bacontext/mopidy,dbrgn/mopidy,hkariti/mopidy,bacontext/mopidy,quartz55/mopidy,dbrgn/mopidy,adamcik/mopidy,kingosticks/mopidy,hkariti/mopidy,jmarsik/mopidy,ali/mopidy,kingosticks/mopidy,jcass77/mopidy,mokieyue/mopidy,ali/mopidy,bacontext/mopidy,mopidy/mopidy,ZenithDK/mopidy,rawdlite/mopidy,jodal/mopidy,adamcik/mopidy,rawdlite/mopidy,bacontext/mopidy,ali/mopidy,ali/mopidy,adamcik/mopidy,dbrgn/mopidy,quartz55/mopidy,tkem/mopidy,rawdlite/mopidy,jcass77/mopidy,bencevans/mopidy,ZenithDK/mopidy,vrs01/mopidy,jodal/mopidy,swak/mopidy,tkem/mopidy,glogiotatidis/mopidy,swak/mopidy,jodal/mopidy,ZenithDK/mopidy,tkem/mopidy,quartz55/mopidy,diandiankan/mopidy,hkariti/mopidy,jmarsik/mopidy,diandiankan/mopidy,pacificIT/mopidy,quartz55/mopidy
|
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
core: Remove test-only code paths in MixerController
|
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
|
<commit_before>from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
<commit_msg>core: Remove test-only code paths in MixerController<commit_after>
|
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
|
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
core: Remove test-only code paths in MixerControllerfrom __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
|
<commit_before>from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
<commit_msg>core: Remove test-only code paths in MixerController<commit_after>from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
|
e01c2049f9b8c9b576c889433db14d1b6ae7f796
|
frappy/services/pacman.py
|
frappy/services/pacman.py
|
"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
|
"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion,
debug=False):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
|
Add debug argument to main Pacman __init__
|
Add debug argument to main Pacman __init__
|
Python
|
mit
|
durden/frappy
|
"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
Add debug argument to main Pacman __init__
|
"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion,
debug=False):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
|
<commit_before>"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
<commit_msg>Add debug argument to main Pacman __init__<commit_after>
|
"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion,
debug=False):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
|
"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
Add debug argument to main Pacman __init__"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion,
debug=False):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
|
<commit_before>"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
<commit_msg>Add debug argument to main Pacman __init__<commit_after>"""
Wrapper for fictional test service
"""
from frappy.core.api import APICall, DefaultVersion
class Pacman(APICall):
"""
"""
def __init__(self, req_format="json", domain="127.0.0.1:8000",
secure=False, auth=None, api_version=DefaultVersion,
debug=False):
APICall.__init__(self, auth=auth, req_format=req_format, domain=domain,
secure=secure, debug=debug)
|
c2a07e3e53651f8679aacc23ad665822115c5928
|
ibutton.py
|
ibutton.py
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == '\r':
return code
code += byte
print("read: %s" % byte)
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12
return code
code += byte
print("Reading ID: %s" % code)
|
Update for working rfid reader. Test code
|
Update for working rfid reader. Test code
|
Python
|
mit
|
harlanhaskins/DrinkTouchClient-2.0,stevenmirabito/DrinkTouchClient-2.0
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == '\r':
return code
code += byte
print("read: %s" % byte)
Update for working rfid reader. Test code
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12
return code
code += byte
print("Reading ID: %s" % code)
|
<commit_before>import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == '\r':
return code
code += byte
print("read: %s" % byte)
<commit_msg>Update for working rfid reader. Test code<commit_after>
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12
return code
code += byte
print("Reading ID: %s" % code)
|
import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == '\r':
return code
code += byte
print("read: %s" % byte)
Update for working rfid reader. Test codeimport serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12
return code
code += byte
print("Reading ID: %s" % code)
|
<commit_before>import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if byte == '\r':
return code
code += byte
print("read: %s" % byte)
<commit_msg>Update for working rfid reader. Test code<commit_after>import serial
class iButton(object):
def __init__(self, ibutton_address, rfid_address, debug=False):
# self.ibutton_serial = serial.Serial(ibutton_address)
self.rfid_serial = serial.Serial(rfid_address)
self.debug = debug
def read(self):
if self.debug:
with open("ibutton.txt") as ibutton_file:
return ibutton_file.readline().strip()
code = ''
while True:
byte = self.rfid_serial.read()
if len(code)==12
return code
code += byte
print("Reading ID: %s" % code)
|
47273357ac7bd646e8a9326c87688191eb8a1a89
|
airesources/Python/MyBot.py
|
airesources/Python/MyBot.py
|
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
|
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("PythonBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
moves.append(Move(Location(x, y), int(random.random() * 5)))
sendFrame(moves)
|
Revert python mybot to random bot
|
Revert python mybot to random bot
Former-commit-id: b08897ea13c57ce3700439954b432a6453fcfb3f
Former-commit-id: 28471a6712bd57db5dc7fd6d42d614d2f7ae7069
Former-commit-id: 871c6ab61f365689493b0663b761317cfb786507
|
Python
|
mit
|
yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,lanyudhy/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,yangle/HaliteIO,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II
|
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
Revert python mybot to random bot
Former-commit-id: b08897ea13c57ce3700439954b432a6453fcfb3f
Former-commit-id: 28471a6712bd57db5dc7fd6d42d614d2f7ae7069
Former-commit-id: 871c6ab61f365689493b0663b761317cfb786507
|
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("PythonBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
moves.append(Move(Location(x, y), int(random.random() * 5)))
sendFrame(moves)
|
<commit_before>from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
<commit_msg>Revert python mybot to random bot
Former-commit-id: b08897ea13c57ce3700439954b432a6453fcfb3f
Former-commit-id: 28471a6712bd57db5dc7fd6d42d614d2f7ae7069
Former-commit-id: 871c6ab61f365689493b0663b761317cfb786507<commit_after>
|
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("PythonBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
moves.append(Move(Location(x, y), int(random.random() * 5)))
sendFrame(moves)
|
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
Revert python mybot to random bot
Former-commit-id: b08897ea13c57ce3700439954b432a6453fcfb3f
Former-commit-id: 28471a6712bd57db5dc7fd6d42d614d2f7ae7069
Former-commit-id: 871c6ab61f365689493b0663b761317cfb786507from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("PythonBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
moves.append(Move(Location(x, y), int(random.random() * 5)))
sendFrame(moves)
|
<commit_before>from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
<commit_msg>Revert python mybot to random bot
Former-commit-id: b08897ea13c57ce3700439954b432a6453fcfb3f
Former-commit-id: 28471a6712bd57db5dc7fd6d42d614d2f7ae7069
Former-commit-id: 871c6ab61f365689493b0663b761317cfb786507<commit_after>from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("PythonBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
moves.append(Move(Location(x, y), int(random.random() * 5)))
sendFrame(moves)
|
35eceaf748ca415a58f7230fcb4d6a2aa9379f05
|
pdf_generator/medias.py
|
pdf_generator/medias.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
"""
from __future__ import absolute_import
import os.path
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
Medias locator returns a path on the file system from the *src* of an img tag.
.. data:: PLACEHOLDER
A special object that indicates to the renderer to use a placeholder
instead of a media.
"""
from __future__ import absolute_import
import os.path
PLACEHOLDER = object()
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
class DebugMediasLocator(object):
"""
Return :data:`PLACEHOLDER`
"""
def __call__(self, path):
return PLACEHOLDER
|
Add a special PLACEHOLDER value
|
Add a special PLACEHOLDER value
|
Python
|
mit
|
cecedille1/PDF_generator
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
"""
from __future__ import absolute_import
import os.path
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
Add a special PLACEHOLDER value
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
Medias locator returns a path on the file system from the *src* of an img tag.
.. data:: PLACEHOLDER
A special object that indicates to the renderer to use a placeholder
instead of a media.
"""
from __future__ import absolute_import
import os.path
PLACEHOLDER = object()
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
class DebugMediasLocator(object):
"""
Return :data:`PLACEHOLDER`
"""
def __call__(self, path):
return PLACEHOLDER
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
"""
from __future__ import absolute_import
import os.path
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
<commit_msg>Add a special PLACEHOLDER value<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
Medias locator returns a path on the file system from the *src* of an img tag.
.. data:: PLACEHOLDER
A special object that indicates to the renderer to use a placeholder
instead of a media.
"""
from __future__ import absolute_import
import os.path
PLACEHOLDER = object()
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
class DebugMediasLocator(object):
"""
Return :data:`PLACEHOLDER`
"""
def __call__(self, path):
return PLACEHOLDER
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
"""
from __future__ import absolute_import
import os.path
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
Add a special PLACEHOLDER value#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
Medias locator returns a path on the file system from the *src* of an img tag.
.. data:: PLACEHOLDER
A special object that indicates to the renderer to use a placeholder
instead of a media.
"""
from __future__ import absolute_import
import os.path
PLACEHOLDER = object()
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
class DebugMediasLocator(object):
"""
Return :data:`PLACEHOLDER`
"""
def __call__(self, path):
return PLACEHOLDER
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
"""
from __future__ import absolute_import
import os.path
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
<commit_msg>Add a special PLACEHOLDER value<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Medias locator
==============
Medias locator returns a path on the file system from the *src* of an img tag.
.. data:: PLACEHOLDER
A special object that indicates to the renderer to use a placeholder
instead of a media.
"""
from __future__ import absolute_import
import os.path
PLACEHOLDER = object()
class PathMediasLocator(object):
"""
Returns medias relatively to the root directory *base*.
"""
def __init__(self, base):
self.base = base
def __call__(self, path):
path = path.lstrip('/')
return os.path.join(self.base, path)
class NoMediasLocator(object):
"""
Raises an error when a media is asked.
"""
def __call__(self, path):
raise RuntimeError('No media path')
class DebugMediasLocator(object):
"""
Return :data:`PLACEHOLDER`
"""
def __call__(self, path):
return PLACEHOLDER
|
a220aa08071b18edf314a1cfc200cc90c19b3ced
|
antxetamedia/blobs/views.py
|
antxetamedia/blobs/views.py
|
from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('-position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
|
from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
|
Order blobs in the playlist API by lowest position first
|
Order blobs in the playlist API by lowest position first
|
Python
|
agpl-3.0
|
GISAElkartea/amv2,GISAElkartea/amv2,GISAElkartea/amv2
|
from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('-position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
Order blobs in the playlist API by lowest position first
|
from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
|
<commit_before>from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('-position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
<commit_msg>Order blobs in the playlist API by lowest position first<commit_after>
|
from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
|
from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('-position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
Order blobs in the playlist API by lowest position firstfrom django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
|
<commit_before>from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('-position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
<commit_msg>Order blobs in the playlist API by lowest position first<commit_after>from django.http import JsonResponse
from django.views.generic import ListView
from .models import Blob
class PodcastBlobList(ListView):
model = Blob
def get_queryset(self):
qs = super(PodcastBlobList, self).get_queryset()
qs = qs.filter(content_type__app_label=self.kwargs['app_label'],
content_type__model=self.kwargs['model'],
object_id=self.kwargs['id'])
qs = qs.order_by('position')
qs = qs.select_related('content_type').prefetch_related('content_object')
return qs
def get_blob_data(self, blob):
return {
'id': blob.pk,
'podcast': blob.content_object.get_blobs_url(),
'title': blob.content_object.title,
'image': blob.content_object.image.url if blob.content_object.image else None,
'url': blob.link,
}
def get_context_data(self, **kwargs):
kwargs['blob_list'] = [self.get_blob_data(blob) for blob in self.object_list]
return super(PodcastBlobList, self).get_context_data(**kwargs)
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['blob_list'], safe=False)
|
6a9524502ebf3c04dede24fb937baec5c48342ef
|
widgy/contrib/widgy_mezzanine/search_indexes.py
|
widgy/contrib/widgy_mezzanine/search_indexes.py
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
Use a more realistic context to render pages for search
|
Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.
|
Python
|
apache-2.0
|
j00bar/django-widgy,j00bar/django-widgy,j00bar/django-widgy
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
<commit_before>from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
<commit_msg>Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.<commit_after>
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
<commit_before>from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
<commit_msg>Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.<commit_after>from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
b12f869f169cd44c8dba633c4707d1a60b092893
|
mayavi/__init__.py
|
mayavi/__init__.py
|
# Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '5.0.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
|
# Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '4.4.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
|
Update the next version to 4.4.0
|
Update the next version to 4.4.0
|
Python
|
bsd-3-clause
|
dmsurti/mayavi,dmsurti/mayavi,alexandreleroux/mayavi,alexandreleroux/mayavi,liulion/mayavi,liulion/mayavi
|
# Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '5.0.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
Update the next version to 4.4.0
|
# Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '4.4.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
|
<commit_before># Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '5.0.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
<commit_msg>Update the next version to 4.4.0<commit_after>
|
# Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '4.4.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
|
# Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '5.0.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
Update the next version to 4.4.0# Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '4.4.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
|
<commit_before># Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '5.0.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
<commit_msg>Update the next version to 4.4.0<commit_after># Author: Prabhu Ramachandran, Gael Varoquaux
# Copyright (c) 2004-2014, Enthought, Inc.
# License: BSD Style.
""" A tool for easy and interactive visualization of data.
Part of the Mayavi project of the Enthought Tool Suite.
"""
__version__ = '4.4.0'
__requires__ = [
'apptools',
'traits',
'traitsui',
]
__extras_require__ = {
'app': [
'envisage',
],
}
# Try forcing the use of wx 2.8 before any other import.
import sys
if not 'wx' in sys.modules:
try:
# Try forcing the use of wx 2.8
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit in ('wx', ''):
import wxversion
wxversion.ensureMinimal('2.8')
except ImportError:
""" wxversion not installed """
|
edf08b9928558688c2402d1c144f04777f4b4bc5
|
gb/helpers.py
|
gb/helpers.py
|
"""Helpers to facilitate API interaction."""
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
|
"""Helpers to facilitate API interaction."""
from functools import wraps
from datetime import datetime
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
CACHE = {}
MAX_AGE = 60 * 60 * 24 # a day
def memoize_class(func):
"""Decorator to assist with the memoization of class methods."""
@wraps(func)
def wrapper(*args):
expired = False
sig = (func, args)
cached, timestamp = CACHE.get(sig, (None, None,))
if timestamp:
age = datetime.utcnow() - timestamp
if age.total_seconds() > MAX_AGE:
expired = True
if cached and not expired:
return cached
value = func(*args)
CACHE[sig] = value, datetime.utcnow()
return value
return wrapper
|
Add caching feature to API lookup requests
|
Add caching feature to API lookup requests
|
Python
|
mit
|
jaykwon/giantanswers
|
"""Helpers to facilitate API interaction."""
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phraseAdd caching feature to API lookup requests
|
"""Helpers to facilitate API interaction."""
from functools import wraps
from datetime import datetime
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
CACHE = {}
MAX_AGE = 60 * 60 * 24 # a day
def memoize_class(func):
"""Decorator to assist with the memoization of class methods."""
@wraps(func)
def wrapper(*args):
expired = False
sig = (func, args)
cached, timestamp = CACHE.get(sig, (None, None,))
if timestamp:
age = datetime.utcnow() - timestamp
if age.total_seconds() > MAX_AGE:
expired = True
if cached and not expired:
return cached
value = func(*args)
CACHE[sig] = value, datetime.utcnow()
return value
return wrapper
|
<commit_before>"""Helpers to facilitate API interaction."""
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase<commit_msg>Add caching feature to API lookup requests<commit_after>
|
"""Helpers to facilitate API interaction."""
from functools import wraps
from datetime import datetime
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
CACHE = {}
MAX_AGE = 60 * 60 * 24 # a day
def memoize_class(func):
"""Decorator to assist with the memoization of class methods."""
@wraps(func)
def wrapper(*args):
expired = False
sig = (func, args)
cached, timestamp = CACHE.get(sig, (None, None,))
if timestamp:
age = datetime.utcnow() - timestamp
if age.total_seconds() > MAX_AGE:
expired = True
if cached and not expired:
return cached
value = func(*args)
CACHE[sig] = value, datetime.utcnow()
return value
return wrapper
|
"""Helpers to facilitate API interaction."""
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phraseAdd caching feature to API lookup requests"""Helpers to facilitate API interaction."""
from functools import wraps
from datetime import datetime
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
CACHE = {}
MAX_AGE = 60 * 60 * 24 # a day
def memoize_class(func):
"""Decorator to assist with the memoization of class methods."""
@wraps(func)
def wrapper(*args):
expired = False
sig = (func, args)
cached, timestamp = CACHE.get(sig, (None, None,))
if timestamp:
age = datetime.utcnow() - timestamp
if age.total_seconds() > MAX_AGE:
expired = True
if cached and not expired:
return cached
value = func(*args)
CACHE[sig] = value, datetime.utcnow()
return value
return wrapper
|
<commit_before>"""Helpers to facilitate API interaction."""
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase<commit_msg>Add caching feature to API lookup requests<commit_after>"""Helpers to facilitate API interaction."""
from functools import wraps
from datetime import datetime
# Spoken strings come to us as words, not numbers.
NUM_WORD_INT = {
'zero': 0,
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9
}
# The same thing as NUM_WORD_INT, but already stringified.
NUM_WORD_STRING = {k: str(v) for k, v in NUM_WORD_INT.items()}
def word_to_int(phrase, mapping=NUM_WORD_STRING):
"""Take a phrase and replace the number words in it with their digits.
:param phrase: the phrase to mogrify
:param mapping: the mapping of number words to number digits
:returns: the phrase with replacements made
"""
tokens = phrase.split()
for token in tokens:
if token in mapping:
phrase = phrase.replace(token, mapping[token])
return phrase
CACHE = {}
MAX_AGE = 60 * 60 * 24 # a day
def memoize_class(func):
"""Decorator to assist with the memoization of class methods."""
@wraps(func)
def wrapper(*args):
expired = False
sig = (func, args)
cached, timestamp = CACHE.get(sig, (None, None,))
if timestamp:
age = datetime.utcnow() - timestamp
if age.total_seconds() > MAX_AGE:
expired = True
if cached and not expired:
return cached
value = func(*args)
CACHE[sig] = value, datetime.utcnow()
return value
return wrapper
|
750c7bef1483c914e195e26a179a3b362fa3f059
|
pmg/admin/validators.py
|
pmg/admin/validators.py
|
from wtforms.validators import AnyOf
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message)
return any_of(form, field)
|
from wtforms.validators import AnyOf
from wtforms.compat import string_types, text_type
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message,
values_formatter=self.values_formatter)
return any_of(form, field)
@classmethod
def values_formatter(cls, values):
return ', '.join(cls.quoted(text_type(x)) for x in values)
@classmethod
def quoted(cls, value):
return '"%s"' % value
|
Format event title error message titles in quotation marks
|
Format event title error message titles in quotation marks
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
from wtforms.validators import AnyOf
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message)
return any_of(form, field)
Format event title error message titles in quotation marks
|
from wtforms.validators import AnyOf
from wtforms.compat import string_types, text_type
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message,
values_formatter=self.values_formatter)
return any_of(form, field)
@classmethod
def values_formatter(cls, values):
return ', '.join(cls.quoted(text_type(x)) for x in values)
@classmethod
def quoted(cls, value):
return '"%s"' % value
|
<commit_before>from wtforms.validators import AnyOf
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message)
return any_of(form, field)
<commit_msg>Format event title error message titles in quotation marks<commit_after>
|
from wtforms.validators import AnyOf
from wtforms.compat import string_types, text_type
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message,
values_formatter=self.values_formatter)
return any_of(form, field)
@classmethod
def values_formatter(cls, values):
return ', '.join(cls.quoted(text_type(x)) for x in values)
@classmethod
def quoted(cls, value):
return '"%s"' % value
|
from wtforms.validators import AnyOf
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message)
return any_of(form, field)
Format event title error message titles in quotation marksfrom wtforms.validators import AnyOf
from wtforms.compat import string_types, text_type
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message,
values_formatter=self.values_formatter)
return any_of(form, field)
@classmethod
def values_formatter(cls, values):
return ', '.join(cls.quoted(text_type(x)) for x in values)
@classmethod
def quoted(cls, value):
return '"%s"' % value
|
<commit_before>from wtforms.validators import AnyOf
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message)
return any_of(form, field)
<commit_msg>Format event title error message titles in quotation marks<commit_after>from wtforms.validators import AnyOf
from wtforms.compat import string_types, text_type
class BillEventTitleAllowed(object):
"""
Checks that the bill event title is one of the allowed titles when the
event type is "bill-passed".
"""
ALLOWED_TITLES = [
'Bill passed by the National Assembly and transmitted to the NCOP for concurrence',
'Bill passed by both Houses and sent to President for assent',
'Bill passed by the NCOP and returned to the National Assembly for concurrence',
'Bill passed and amended by the NCOP and returned to the National Assembly for concurrence',
'Bill passed by the NCOP and sent to the President for assent',
'The NCOP rescinded its decision',
'Bill remitted',
'Bill revived on this date'
]
def __call__(self, form, field):
bill_type = form['type']
if bill_type.data == 'bill-passed':
message = 'When event type is "Bill passed", event title must be one of: %(values)s.'
any_of = AnyOf(self.ALLOWED_TITLES, message=message,
values_formatter=self.values_formatter)
return any_of(form, field)
@classmethod
def values_formatter(cls, values):
return ', '.join(cls.quoted(text_type(x)) for x in values)
@classmethod
def quoted(cls, value):
return '"%s"' % value
|
fd176b8eae33cac5fa7b2ba4f7a7586d9e6ebf14
|
mlat/connection.py
|
mlat/connection.py
|
# -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
pass
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
pass
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
pass
|
# -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
raise NotImplementedError
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
raise NotImplementedError
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
raise NotImplementedError
|
Raise NotImplemented if methods aren't overridden
|
Raise NotImplemented if methods aren't overridden
|
Python
|
agpl-3.0
|
tmuic/mlat-server,mutability/mlat-server,mutability/mlat-server,tmuic/mlat-server
|
# -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
pass
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
pass
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
pass
Raise NotImplemented if methods aren't overridden
|
# -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
raise NotImplementedError
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
raise NotImplementedError
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
raise NotImplementedError
|
<commit_before># -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
pass
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
pass
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
pass
<commit_msg>Raise NotImplemented if methods aren't overridden<commit_after>
|
# -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
raise NotImplementedError
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
raise NotImplementedError
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
raise NotImplementedError
|
# -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
pass
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
pass
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
pass
Raise NotImplemented if methods aren't overridden# -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
raise NotImplementedError
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
raise NotImplementedError
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
raise NotImplementedError
|
<commit_before># -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
pass
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
pass
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
pass
<commit_msg>Raise NotImplemented if methods aren't overridden<commit_after># -*- mode: python; indent-tabs-mode: nil -*-
class Connection(object):
"""Interface for receiver connections.
A receiver connection is something that can send messages (filter requests,
multilateration results) to a particular receiver. A single connection
may handle only a single receiver, or may multiplex multiple receivers.
This is a duck-typed interface, implementations are not required to inherit
this class as long as they provide methods with equivalent signatures.
"""
def request_traffic(self, receiver, icao_set):
"""Request that a receiver starts sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to start sending
"""
raise NotImplementedError
def suppress_traffic(self, receiver, icao_set):
"""Request that a receiver stops sending traffic for the given
set of aircraft.
receiver: the handle of the concerned receiver
icao_set: a set of ICAO addresses (as ints) to stop sending
"""
raise NotImplementedError
def report_mlat_position(self, receiver,
icao, utc, ecef, ecef_cov, nstations):
"""Report a multilaterated position result.
receiver: the handle of the concerned receiver
icao: the ICAO address of the aircraft (as an int)
utc: the approximate validity time of the position
ecef: an (x,y,z) tuple giving the position in ECEF coordinates
ecef_cov: a 3x3 matrix giving the covariance matrix of ecef
nstations: the number of stations that contributed to the result
"""
raise NotImplementedError
|
8f2d6d2714aa1b60950a2fc355d39297b7f2cdfb
|
keras/activations.py
|
keras/activations.py
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
Add support for time-distributed softmax.
|
Add support for time-distributed softmax.
|
Python
|
mit
|
daviddiazvico/keras,DeepGnosis/keras,kemaswill/keras,keras-team/keras,relh/keras,keras-team/keras,dolaameng/keras,kuza55/keras,nebw/keras
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
Add support for time-distributed softmax.
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
<commit_before>from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
<commit_msg>Add support for time-distributed softmax.<commit_after>
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
Add support for time-distributed softmax.from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
<commit_before>from __future__ import absolute_import
from . import backend as K
def softmax(x):
return K.softmax(x)
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
<commit_msg>Add support for time-distributed softmax.<commit_after>from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x, [], masking=False)
return outputs
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function')
|
0c833808e9c761a98e11ffb4834b8344221db1d5
|
matador/commands/deployment/deploy_sql_script.py
|
matador/commands/deployment/deploy_sql_script.py
|
#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
os.remove(scriptPath)
subprocess.run(
['git', '-C', repo_folder, 'checkout', scriptPath],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
|
#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
|
Remove lines which deleted and checked out file for substitution
|
Remove lines which deleted and checked out file for substitution
|
Python
|
mit
|
Empiria/matador
|
#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
os.remove(scriptPath)
subprocess.run(
['git', '-C', repo_folder, 'checkout', scriptPath],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
Remove lines which deleted and checked out file for substitution
|
#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
|
<commit_before>#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
os.remove(scriptPath)
subprocess.run(
['git', '-C', repo_folder, 'checkout', scriptPath],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
<commit_msg>Remove lines which deleted and checked out file for substitution<commit_after>
|
#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
|
#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
os.remove(scriptPath)
subprocess.run(
['git', '-C', repo_folder, 'checkout', scriptPath],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
Remove lines which deleted and checked out file for substitution#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
|
<commit_before>#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
os.remove(scriptPath)
subprocess.run(
['git', '-C', repo_folder, 'checkout', scriptPath],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
<commit_msg>Remove lines which deleted and checked out file for substitution<commit_after>#!/usr/bin/env python
import os
import shutil
import subprocess
from matador.session import Session
from .deployment_command import DeploymentCommand
from matador.commands.run_sql_script import run_sql_script
class DeploySqlScript(DeploymentCommand):
def _execute(self):
scriptPath = self.args[0]
if len(os.path.dirname(scriptPath)) == 0:
script = os.path.join(Session.ticket_folder, scriptPath)
else:
repo_folder = Session.matador_repository_folder
scriptPath = os.path.join(repo_folder, self.args[0])
commit = self.args[1]
subprocess.run(
['git', '-C', repo_folder, 'checkout', commit],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'),
check=True)
script = shutil.copy(scriptPath, Session.ticket_folder)
run_sql_script(self._logger, script)
|
05b2874c54658e451841637d534156c2407f0b0a
|
streak-podium/render.py
|
streak-podium/render.py
|
import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
|
import matplotlib.pyplot as plt
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
|
Remove weird matplot lib defaults thing that did nothing
|
Remove weird matplot lib defaults thing that did nothing
|
Python
|
mit
|
jollyra/hubot-streak-podium,supermitch/streak-podium,jollyra/hubot-commit-streak,jollyra/hubot-streak-podium,jollyra/hubot-commit-streak,supermitch/streak-podium
|
import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
Remove weird matplot lib defaults thing that did nothing
|
import matplotlib.pyplot as plt
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
|
<commit_before>import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
<commit_msg>Remove weird matplot lib defaults thing that did nothing<commit_after>
|
import matplotlib.pyplot as plt
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
|
import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
Remove weird matplot lib defaults thing that did nothingimport matplotlib.pyplot as plt
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
|
<commit_before>import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
<commit_msg>Remove weird matplot lib defaults thing that did nothing<commit_after>import matplotlib.pyplot as plt
import numpy as np
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
# Only extract those users & streaks for streaks that are non-zero:
users, streaks = zip(*[(user, streak.get(sort)) for user, streak
in sorted_streaks if streak.get(sort) > 0][::-1])
title = 'Top Contributors by {} Streak'.format(sort.title())
figure = plt.figure()
y_pos = np.arange(len(users)) # y-location of bars
print('y_pos', y_pos)
plt.barh(y_pos, streaks, facecolor='#ff9999', edgecolor='grey', align='center')
plt.yticks(y_pos, users)
plt.xlim([0, max(streaks) + 0.5]) # x-limits a bit wider at right
plt.subplots_adjust(left=0.2) # Wider left margin
plt.title(title)
for format in ('png', 'svg'):
figure.savefig('temp/top_{}.{}'.format(sort, format), format=format)
|
242f27f943a107bf7dd2a472f08a71a8382f6467
|
mopidy/__init__.py
|
mopidy/__init__.py
|
import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
Use subprocess instead of os.popen
|
Use subprocess instead of os.popen
|
Python
|
apache-2.0
|
ZenithDK/mopidy,bacontext/mopidy,adamcik/mopidy,kingosticks/mopidy,jcass77/mopidy,jmarsik/mopidy,bacontext/mopidy,mopidy/mopidy,hkariti/mopidy,bencevans/mopidy,ZenithDK/mopidy,dbrgn/mopidy,hkariti/mopidy,SuperStarPL/mopidy,jodal/mopidy,adamcik/mopidy,jcass77/mopidy,jodal/mopidy,vrs01/mopidy,ali/mopidy,diandiankan/mopidy,swak/mopidy,rawdlite/mopidy,hkariti/mopidy,ali/mopidy,bencevans/mopidy,diandiankan/mopidy,liamw9534/mopidy,rawdlite/mopidy,tkem/mopidy,swak/mopidy,glogiotatidis/mopidy,quartz55/mopidy,bacontext/mopidy,bencevans/mopidy,dbrgn/mopidy,mokieyue/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,mokieyue/mopidy,priestd09/mopidy,pacificIT/mopidy,mopidy/mopidy,jodal/mopidy,swak/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,ali/mopidy,tkem/mopidy,abarisain/mopidy,woutervanwijk/mopidy,adamcik/mopidy,ZenithDK/mopidy,vrs01/mopidy,tkem/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,jcass77/mopidy,abarisain/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,tkem/mopidy,dbrgn/mopidy,priestd09/mopidy,swak/mopidy,kingosticks/mopidy,bencevans/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,liamw9534/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,hkariti/mopidy,rawdlite/mopidy,quartz55/mopidy,ali/mopidy,rawdlite/mopidy,kingosticks/mopidy,bacontext/mopidy,mokieyue/mopidy,mopidy/mopidy,jmarsik/mopidy,vrs01/mopidy,pacificIT/mopidy,dbrgn/mopidy,jmarsik/mopidy,priestd09/mopidy,quartz55/mopidy,quartz55/mopidy,vrs01/mopidy
|
import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
Use subprocess instead of os.popen
|
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
<commit_before>import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
<commit_msg>Use subprocess instead of os.popen<commit_after>
|
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
Use subprocess instead of os.popenimport sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
<commit_before>import os
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
VERSION = (0, 4, 0)
def is_in_git_repo():
git_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../.git'))
return os.path.exists(git_dir)
def get_git_version():
if not is_in_git_repo():
return None
git_version = os.popen('git describe').read().strip()
if git_version.startswith('v'):
git_version = git_version[1:]
return git_version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
if is_in_git_repo():
return get_git_version()
else:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
<commit_msg>Use subprocess instead of os.popen<commit_after>import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE)
if process.wait() != 0:
raise Exception|('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
da238e29efcf8ab4cf68d29d87869031d74d761a
|
hooks/pre_gen_project.py
|
hooks/pre_gen_project.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
sys.exit(1)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^(?!pytest)[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
logger.debug('Please do not prepend plugin_name with "pytest"!')
sys.exit(1)
|
Make sure the user does not prepend plugin_name with pytest
|
Make sure the user does not prepend plugin_name with pytest
|
Python
|
mit
|
pytest-dev/cookiecutter-pytest-plugin
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
sys.exit(1)
Make sure the user does not prepend plugin_name with pytest
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^(?!pytest)[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
logger.debug('Please do not prepend plugin_name with "pytest"!')
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
sys.exit(1)
<commit_msg>Make sure the user does not prepend plugin_name with pytest<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^(?!pytest)[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
logger.debug('Please do not prepend plugin_name with "pytest"!')
sys.exit(1)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
sys.exit(1)
Make sure the user does not prepend plugin_name with pytest#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^(?!pytest)[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
logger.debug('Please do not prepend plugin_name with "pytest"!')
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
sys.exit(1)
<commit_msg>Make sure the user does not prepend plugin_name with pytest<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pre_gen_project')
PLUGIN_REGEX = r'^(?!pytest)[_a-zA-Z][_a-zA-Z0-9]+$'
plugin_name = '{{cookiecutter.plugin_name}}'
if not re.match(PLUGIN_REGEX, plugin_name):
logger.error('Invalid value for plugin_name "{}"'.format(plugin_name))
logger.debug('Please do not prepend plugin_name with "pytest"!')
sys.exit(1)
|
8244c811d294f1f5b75d9ad1d9eec4217aed8882
|
src/python/dependencies.py
|
src/python/dependencies.py
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets<3.1.0",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
|
Remove the upper bound constraint on tensorflow_datasets dependency.
|
Remove the upper bound constraint on tensorflow_datasets dependency.
PiperOrigin-RevId: 371241485
|
Python
|
apache-2.0
|
tensorflow/cloud,tensorflow/cloud
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets<3.1.0",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
Remove the upper bound constraint on tensorflow_datasets dependency.
PiperOrigin-RevId: 371241485
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
|
<commit_before># Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets<3.1.0",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
<commit_msg>Remove the upper bound constraint on tensorflow_datasets dependency.
PiperOrigin-RevId: 371241485<commit_after>
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets<3.1.0",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
Remove the upper bound constraint on tensorflow_datasets dependency.
PiperOrigin-RevId: 371241485# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
|
<commit_before># Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets<3.1.0",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
<commit_msg>Remove the upper bound constraint on tensorflow_datasets dependency.
PiperOrigin-RevId: 371241485<commit_after># Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration of TensorFlow Cloud client-side library."""
def make_required_install_packages():
return [
"absl-py",
"docker",
"google-api-python-client",
"google-auth",
"google-cloud-storage",
"keras-tuner",
"tensorboard>=2.3.0",
"tensorflow>=1.15.0,<3.0",
"tensorflow_datasets",
"tensorflow_transform",
]
def make_required_test_packages():
return [
"absl-py",
"flake8",
"mock",
"numpy",
"nbconvert",
]
|
5ac7c07277ef1c7e714336e1b96571cdfea15a13
|
ktbs_bench_manager/benchable_graph.py
|
ktbs_bench_manager/benchable_graph.py
|
import logging
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
|
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
|
Remove unnecessary import of logging
|
Remove unnecessary import of logging
|
Python
|
mit
|
vincent-octo/ktbs_bench_manager,vincent-octo/ktbs_bench_manager
|
import logging
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
Remove unnecessary import of logging
|
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
|
<commit_before>import logging
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
<commit_msg>Remove unnecessary import of logging<commit_after>
|
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
|
import logging
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
Remove unnecessary import of loggingfrom rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
|
<commit_before>import logging
from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
<commit_msg>Remove unnecessary import of logging<commit_after>from rdflib import Graph
class BenchableGraph(object):
"""
Provides a convenient way to use a graph for benchmarks.
"""
def __init__(self, store, graph_id, store_config, graph_create=False):
"""
:param str store: Type of store to use.
:param str graph_id: The graph identifier.
:param store_config: Configuration to open the store.
:type store_config: str or tuple
:param bool graph_create: True to create the graph upon connecting.
"""
self.graph = Graph(store=store, identifier=graph_id)
self._graph_id = graph_id
self._store_config = store_config
self._graph_create = graph_create
def connect(self):
"""Connect to the store.
For some configurations, the connection is postponed until needed
(e.g. when doing a graph.query() or graph.add()).
This behaviour comes from RDFLib implementation of graph.open().
"""
return self.graph.open(configuration=self._store_config, create=self._graph_create)
def close(self, commit_pending_transaction=True):
"""Close a connection to a store.
:param bool commit_pending_transaction: True if to commit pending transaction before closing, False otherwise.
.. note::
The graph.close() method is not implemented for SPARQL Store in RDFLib
"""
self.graph.close(commit_pending_transaction=commit_pending_transaction)
|
81f4f4b1318ff800e3febbc1bd7bbd9ff8e868b1
|
node/dictionary.py
|
node/dictionary.py
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
Add some exception handling for dict
|
Add some exception handling for dict
|
Python
|
mit
|
muddyfish/PYKE,muddyfish/PYKE
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
Add some exception handling for dict
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
<commit_before>#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
<commit_msg>Add some exception handling for dict<commit_after>
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
Add some exception handling for dict#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
<commit_before>#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
<commit_msg>Add some exception handling for dict<commit_after>#!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
|
bb3605bd99892bed37ecb2b6371d2bc88d599e1a
|
caso/__init__.py
|
caso/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s" % __version__
|
# -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s (OpenStack)" % __version__
|
Include "OpenStack" string in the user agent
|
Include "OpenStack" string in the user agent
EGI's accounting team requires that we put "OpenStack" in the UA string.
closes IFCA/caso#38
|
Python
|
apache-2.0
|
alvarolopez/caso,IFCA/caso,IFCA/caso
|
# -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s" % __version__
Include "OpenStack" string in the user agent
EGI's accounting team requires that we put "OpenStack" in the UA string.
closes IFCA/caso#38
|
# -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s (OpenStack)" % __version__
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s" % __version__
<commit_msg>Include "OpenStack" string in the user agent
EGI's accounting team requires that we put "OpenStack" in the UA string.
closes IFCA/caso#38<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s (OpenStack)" % __version__
|
# -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s" % __version__
Include "OpenStack" string in the user agent
EGI's accounting team requires that we put "OpenStack" in the UA string.
closes IFCA/caso#38# -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s (OpenStack)" % __version__
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s" % __version__
<commit_msg>Include "OpenStack" string in the user agent
EGI's accounting team requires that we put "OpenStack" in the UA string.
closes IFCA/caso#38<commit_after># -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s (OpenStack)" % __version__
|
ebf5e05acfb7f1edce0c0987576ee712f3fdea54
|
test/scripts/test_sequana_coverage.py
|
test/scripts/test_sequana_coverage.py
|
from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
|
from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
|
Fix tests to use pytest
|
Fix tests to use pytest
|
Python
|
bsd-3-clause
|
sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana
|
from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
Fix tests to use pytest
|
from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
|
<commit_before>from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
<commit_msg>Fix tests to use pytest<commit_after>
|
from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
|
from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
Fix tests to use pytestfrom sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
|
<commit_before>from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
<commit_msg>Fix tests to use pytest<commit_after>from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
|
2b5c186337bcb396f630c0b86938e43eb06d3e5b
|
tests/test_i10knobs.py
|
tests/test_i10knobs.py
|
from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
|
from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_import(self):
pass
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
|
Add test checking only for imports
|
Add test checking only for imports
|
Python
|
apache-2.0
|
dls-controls/i10switching,dls-controls/i10switching
|
from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
Add test checking only for imports
|
from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_import(self):
pass
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
|
<commit_before>from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
<commit_msg>Add test checking only for imports<commit_after>
|
from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_import(self):
pass
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
|
from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
Add test checking only for importsfrom pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_import(self):
pass
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
|
<commit_before>from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
<commit_msg>Add test checking only for imports<commit_after>from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_import(self):
pass
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
|
a55dd124d54955476411ee8ae830c9fd3c4f00dc
|
tests/test_pdfbuild.py
|
tests/test_pdfbuild.py
|
from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
|
from latex import build_pdf, LatexBuildError
from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
def test_finds_errors_correctly():
broken_latex = r"""
\documentclass{article}
\begin{document}
All good
\undefinedcontrolsequencehere
\end{document}
"""
try:
build_pdf(broken_latex)
except LatexBuildError as e:
assert parse_log(e.log) == e.get_errors()
else:
assert False, 'no exception raised'
|
Test get_errors() method of LatexBuildError.
|
Test get_errors() method of LatexBuildError.
|
Python
|
bsd-3-clause
|
mbr/latex
|
from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
Test get_errors() method of LatexBuildError.
|
from latex import build_pdf, LatexBuildError
from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
def test_finds_errors_correctly():
broken_latex = r"""
\documentclass{article}
\begin{document}
All good
\undefinedcontrolsequencehere
\end{document}
"""
try:
build_pdf(broken_latex)
except LatexBuildError as e:
assert parse_log(e.log) == e.get_errors()
else:
assert False, 'no exception raised'
|
<commit_before>from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
<commit_msg>Test get_errors() method of LatexBuildError.<commit_after>
|
from latex import build_pdf, LatexBuildError
from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
def test_finds_errors_correctly():
broken_latex = r"""
\documentclass{article}
\begin{document}
All good
\undefinedcontrolsequencehere
\end{document}
"""
try:
build_pdf(broken_latex)
except LatexBuildError as e:
assert parse_log(e.log) == e.get_errors()
else:
assert False, 'no exception raised'
|
from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
Test get_errors() method of LatexBuildError.from latex import build_pdf, LatexBuildError
from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
def test_finds_errors_correctly():
broken_latex = r"""
\documentclass{article}
\begin{document}
All good
\undefinedcontrolsequencehere
\end{document}
"""
try:
build_pdf(broken_latex)
except LatexBuildError as e:
assert parse_log(e.log) == e.get_errors()
else:
assert False, 'no exception raised'
|
<commit_before>from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
<commit_msg>Test get_errors() method of LatexBuildError.<commit_after>from latex import build_pdf, LatexBuildError
from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
def test_finds_errors_correctly():
broken_latex = r"""
\documentclass{article}
\begin{document}
All good
\undefinedcontrolsequencehere
\end{document}
"""
try:
build_pdf(broken_latex)
except LatexBuildError as e:
assert parse_log(e.log) == e.get_errors()
else:
assert False, 'no exception raised'
|
8535c59c26e2c5badfd3637d41901f1bc987e200
|
tests/test_requests.py
|
tests/test_requests.py
|
"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
|
"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
def test_call():
request = APIRequest('google.com')
assert request().status_code == 200
|
Add a test for the __call__ method of the APIRequest class.
|
Add a test for the __call__ method of the APIRequest class.
|
Python
|
mit
|
openspending/gobble
|
"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
Add a test for the __call__ method of the APIRequest class.
|
"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
def test_call():
request = APIRequest('google.com')
assert request().status_code == 200
|
<commit_before>"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
<commit_msg>Add a test for the __call__ method of the APIRequest class.<commit_after>
|
"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
def test_call():
request = APIRequest('google.com')
assert request().status_code == 200
|
"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
Add a test for the __call__ method of the APIRequest class."""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
def test_call():
request = APIRequest('google.com')
assert request().status_code == 200
|
<commit_before>"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
<commit_msg>Add a test for the __call__ method of the APIRequest class.<commit_after>"""Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
def test_call():
request = APIRequest('google.com')
assert request().status_code == 200
|
ca7403462588f374cf1af39d537765c02fc7726c
|
mctrl/rest.py
|
mctrl/rest.py
|
from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success):
if success:
return Response("OK\n" + data, status=status)
else:
return Response("BAD REQUEST\n" + data, status=status)
|
from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success, data):
json_data = json.dumps(data)
if success:
return Response("OK\n" + json_data, status=200)
else:
return Response("BAD REQUEST\n" + json_data, status=400)
|
Fix status codes of handled responses
|
Fix status codes of handled responses
|
Python
|
apache-2.0
|
h2020-endeavour/endeavour,h2020-endeavour/endeavour
|
from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success):
if success:
return Response("OK\n" + data, status=status)
else:
return Response("BAD REQUEST\n" + data, status=status)
Fix status codes of handled responses
|
from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success, data):
json_data = json.dumps(data)
if success:
return Response("OK\n" + json_data, status=200)
else:
return Response("BAD REQUEST\n" + json_data, status=400)
|
<commit_before>from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success):
if success:
return Response("OK\n" + data, status=status)
else:
return Response("BAD REQUEST\n" + data, status=status)
<commit_msg>Fix status codes of handled responses<commit_after>
|
from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success, data):
json_data = json.dumps(data)
if success:
return Response("OK\n" + json_data, status=200)
else:
return Response("BAD REQUEST\n" + json_data, status=400)
|
from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success):
if success:
return Response("OK\n" + data, status=status)
else:
return Response("BAD REQUEST\n" + data, status=status)
Fix status codes of handled responsesfrom flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success, data):
json_data = json.dumps(data)
if success:
return Response("OK\n" + json_data, status=200)
else:
return Response("BAD REQUEST\n" + json_data, status=400)
|
<commit_before>from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success):
if success:
return Response("OK\n" + data, status=status)
else:
return Response("BAD REQUEST\n" + data, status=status)
<commit_msg>Fix status codes of handled responses<commit_after>from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success, data):
json_data = json.dumps(data)
if success:
return Response("OK\n" + json_data, status=200)
else:
return Response("BAD REQUEST\n" + json_data, status=400)
|
87bf261345919e90cb88853165fb1556046c80ef
|
tests/mpd/protocol/test_connection.py
|
tests/mpd/protocol/test_connection.py
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
Fix typo in mock usage
|
tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.
|
Python
|
apache-2.0
|
hkariti/mopidy,bencevans/mopidy,diandiankan/mopidy,dbrgn/mopidy,kingosticks/mopidy,mopidy/mopidy,ali/mopidy,jmarsik/mopidy,quartz55/mopidy,mopidy/mopidy,vrs01/mopidy,diandiankan/mopidy,ali/mopidy,adamcik/mopidy,pacificIT/mopidy,tkem/mopidy,pacificIT/mopidy,dbrgn/mopidy,adamcik/mopidy,hkariti/mopidy,jmarsik/mopidy,vrs01/mopidy,ZenithDK/mopidy,jmarsik/mopidy,bacontext/mopidy,jcass77/mopidy,pacificIT/mopidy,jodal/mopidy,ZenithDK/mopidy,swak/mopidy,kingosticks/mopidy,bacontext/mopidy,jcass77/mopidy,diandiankan/mopidy,quartz55/mopidy,swak/mopidy,kingosticks/mopidy,mopidy/mopidy,quartz55/mopidy,jodal/mopidy,dbrgn/mopidy,dbrgn/mopidy,mokieyue/mopidy,ali/mopidy,tkem/mopidy,swak/mopidy,jcass77/mopidy,SuperStarPL/mopidy,bacontext/mopidy,adamcik/mopidy,vrs01/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,ZenithDK/mopidy,jmarsik/mopidy,bencevans/mopidy,SuperStarPL/mopidy,bencevans/mopidy,ZenithDK/mopidy,tkem/mopidy,bencevans/mopidy,ali/mopidy,tkem/mopidy,diandiankan/mopidy,jodal/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,vrs01/mopidy,hkariti/mopidy,swak/mopidy,bacontext/mopidy,mokieyue/mopidy,mokieyue/mopidy,quartz55/mopidy,hkariti/mopidy
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
<commit_before>from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
<commit_msg>tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.<commit_after>
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
<commit_before>from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
<commit_msg>tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.<commit_after>from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
e34bcec834bf4d84168d04a1ea0a98613ad0df4e
|
corehq/apps/locations/management/commands/migrate_new_location_fixture.py
|
corehq/apps/locations/management/commands/migrate_new_location_fixture.py
|
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
from corehq.toggles import FLAT_LOCATION_FIXTURE
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Update apps with locations and not having
FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
The Feature Flag should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
enabled_users = toggle.enabled_users
enabled_domains = [user.split('domain:')[1] for user in enabled_users]
for domain_name in domains_having_locations:
if domain_name not in enabled_domains:
domain_config = LocationFixtureConfiguration.for_domain(domain_name)
# update configs that had not been changed which means both values are at default True
if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
# update them to use hierarchical fixture
domain_config.sync_flat_fixture = False
domain_config.sync_hierarchical_fixture = True
domain_config.save()
|
import json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import SQLLocation
from corehq.apps.domain.models import Domain
from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
apps with locations and having commtrack:enabled in app files
The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = (
SQLLocation.objects.order_by('domain').distinct('domain')
.values_list('domain', flat=True)
)
domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
domains_having_locations
)
toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
for domain in domains_with_hierarchical_fixture:
toggle.add(domain, True, NAMESPACE_DOMAIN)
def find_applications_with_hierarchical_fixture(domains):
search_string = 'commtrack:enabled'
domain_with_application = {}
for domain in domains:
domain_obj = Domain.get_by_name(domain)
for application in domain_obj.applications():
raw_doc = json.dumps(application.get_db().get(application.id))
if search_string in raw_doc:
search_string[domain] = application.id
continue
return domain_with_application
|
Update migration to fetch domains with applications using old location fixture
|
Update migration to fetch domains with applications using old location fixture
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
from corehq.toggles import FLAT_LOCATION_FIXTURE
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Update apps with locations and not having
FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
The Feature Flag should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
enabled_users = toggle.enabled_users
enabled_domains = [user.split('domain:')[1] for user in enabled_users]
for domain_name in domains_having_locations:
if domain_name not in enabled_domains:
domain_config = LocationFixtureConfiguration.for_domain(domain_name)
# update configs that had not been changed which means both values are at default True
if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
# update them to use hierarchical fixture
domain_config.sync_flat_fixture = False
domain_config.sync_hierarchical_fixture = True
domain_config.save()
Update migration to fetch domains with applications using old location fixture
|
import json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import SQLLocation
from corehq.apps.domain.models import Domain
from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
apps with locations and having commtrack:enabled in app files
The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = (
SQLLocation.objects.order_by('domain').distinct('domain')
.values_list('domain', flat=True)
)
domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
domains_having_locations
)
toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
for domain in domains_with_hierarchical_fixture:
toggle.add(domain, True, NAMESPACE_DOMAIN)
def find_applications_with_hierarchical_fixture(domains):
search_string = 'commtrack:enabled'
domain_with_application = {}
for domain in domains:
domain_obj = Domain.get_by_name(domain)
for application in domain_obj.applications():
raw_doc = json.dumps(application.get_db().get(application.id))
if search_string in raw_doc:
search_string[domain] = application.id
continue
return domain_with_application
|
<commit_before>from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
from corehq.toggles import FLAT_LOCATION_FIXTURE
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Update apps with locations and not having
FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
The Feature Flag should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
enabled_users = toggle.enabled_users
enabled_domains = [user.split('domain:')[1] for user in enabled_users]
for domain_name in domains_having_locations:
if domain_name not in enabled_domains:
domain_config = LocationFixtureConfiguration.for_domain(domain_name)
# update configs that had not been changed which means both values are at default True
if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
# update them to use hierarchical fixture
domain_config.sync_flat_fixture = False
domain_config.sync_hierarchical_fixture = True
domain_config.save()
<commit_msg>Update migration to fetch domains with applications using old location fixture<commit_after>
|
import json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import SQLLocation
from corehq.apps.domain.models import Domain
from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
apps with locations and having commtrack:enabled in app files
The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = (
SQLLocation.objects.order_by('domain').distinct('domain')
.values_list('domain', flat=True)
)
domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
domains_having_locations
)
toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
for domain in domains_with_hierarchical_fixture:
toggle.add(domain, True, NAMESPACE_DOMAIN)
def find_applications_with_hierarchical_fixture(domains):
search_string = 'commtrack:enabled'
domain_with_application = {}
for domain in domains:
domain_obj = Domain.get_by_name(domain)
for application in domain_obj.applications():
raw_doc = json.dumps(application.get_db().get(application.id))
if search_string in raw_doc:
search_string[domain] = application.id
continue
return domain_with_application
|
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
from corehq.toggles import FLAT_LOCATION_FIXTURE
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Update apps with locations and not having
FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
The Feature Flag should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
enabled_users = toggle.enabled_users
enabled_domains = [user.split('domain:')[1] for user in enabled_users]
for domain_name in domains_having_locations:
if domain_name not in enabled_domains:
domain_config = LocationFixtureConfiguration.for_domain(domain_name)
# update configs that had not been changed which means both values are at default True
if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
# update them to use hierarchical fixture
domain_config.sync_flat_fixture = False
domain_config.sync_hierarchical_fixture = True
domain_config.save()
Update migration to fetch domains with applications using old location fixtureimport json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import SQLLocation
from corehq.apps.domain.models import Domain
from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
apps with locations and having commtrack:enabled in app files
The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = (
SQLLocation.objects.order_by('domain').distinct('domain')
.values_list('domain', flat=True)
)
domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
domains_having_locations
)
toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
for domain in domains_with_hierarchical_fixture:
toggle.add(domain, True, NAMESPACE_DOMAIN)
def find_applications_with_hierarchical_fixture(domains):
search_string = 'commtrack:enabled'
domain_with_application = {}
for domain in domains:
domain_obj = Domain.get_by_name(domain)
for application in domain_obj.applications():
raw_doc = json.dumps(application.get_db().get(application.id))
if search_string in raw_doc:
search_string[domain] = application.id
continue
return domain_with_application
|
<commit_before>from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
from corehq.toggles import FLAT_LOCATION_FIXTURE
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Update apps with locations and not having
FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
The Feature Flag should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
enabled_users = toggle.enabled_users
enabled_domains = [user.split('domain:')[1] for user in enabled_users]
for domain_name in domains_having_locations:
if domain_name not in enabled_domains:
domain_config = LocationFixtureConfiguration.for_domain(domain_name)
# update configs that had not been changed which means both values are at default True
if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
# update them to use hierarchical fixture
domain_config.sync_flat_fixture = False
domain_config.sync_hierarchical_fixture = True
domain_config.save()
<commit_msg>Update migration to fetch domains with applications using old location fixture<commit_after>import json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import SQLLocation
from corehq.apps.domain.models import Domain
from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
apps with locations and having commtrack:enabled in app files
The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = (
SQLLocation.objects.order_by('domain').distinct('domain')
.values_list('domain', flat=True)
)
domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
domains_having_locations
)
toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
for domain in domains_with_hierarchical_fixture:
toggle.add(domain, True, NAMESPACE_DOMAIN)
def find_applications_with_hierarchical_fixture(domains):
search_string = 'commtrack:enabled'
domain_with_application = {}
for domain in domains:
domain_obj = Domain.get_by_name(domain)
for application in domain_obj.applications():
raw_doc = json.dumps(application.get_db().get(application.id))
if search_string in raw_doc:
search_string[domain] = application.id
continue
return domain_with_application
|
a17ed4f65b7fa5a035efb7c6ff19fcf477a65429
|
categories_i18n/managers.py
|
categories_i18n/managers.py
|
"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
def get_queryset(self):
# Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
# Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
return self._queryset_class(self.model, using=self._db).order_by(
self.tree_id_attr, self.left_attr
)
|
"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
|
Remove remaining django-mptt 0.7 compatibility code
|
Remove remaining django-mptt 0.7 compatibility code
|
Python
|
apache-2.0
|
edoburu/django-categories-i18n,edoburu/django-categories-i18n
|
"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
def get_queryset(self):
# Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
# Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
return self._queryset_class(self.model, using=self._db).order_by(
self.tree_id_attr, self.left_attr
)
Remove remaining django-mptt 0.7 compatibility code
|
"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
|
<commit_before>"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
def get_queryset(self):
# Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
# Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
return self._queryset_class(self.model, using=self._db).order_by(
self.tree_id_attr, self.left_attr
)
<commit_msg>Remove remaining django-mptt 0.7 compatibility code<commit_after>
|
"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
|
"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
def get_queryset(self):
# Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
# Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
return self._queryset_class(self.model, using=self._db).order_by(
self.tree_id_attr, self.left_attr
)
Remove remaining django-mptt 0.7 compatibility code"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
|
<commit_before>"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
def get_queryset(self):
# Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
# Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
return self._queryset_class(self.model, using=self._db).order_by(
self.tree_id_attr, self.left_attr
)
<commit_msg>Remove remaining django-mptt 0.7 compatibility code<commit_after>"""
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
|
e775613d43dac702565cf266d9995c9cd706d7c8
|
pwndbg/commands/cpsr.py
|
pwndbg/commands/cpsr.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
|
Add documentation for the CPSR command
|
Add documentation for the CPSR command
|
Python
|
mit
|
0xddaa/pwndbg,pwndbg/pwndbg,anthraxx/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,zachriggle/pwndbg,anthraxx/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,anthraxx/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg,chubbymaggie/pwndbg,chubbymaggie/pwndbg,disconnect3d/pwndbg,zachriggle/pwndbg,0xddaa/pwndbg,cebrusfs/217gdb,cebrusfs/217gdb,0xddaa/pwndbg,cebrusfs/217gdb,anthraxx/pwndbg
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
Add documentation for the CPSR command
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
<commit_msg>Add documentation for the CPSR command<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
Add documentation for the CPSR command#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
<commit_msg>Add documentation for the CPSR command<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
|
91eca37144d0c378761e47c143e66a79af37c226
|
repo_manage/forms.py
|
repo_manage/forms.py
|
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def save(self, **kwargs):
username = self.cleaned_data['user']
user = User.objects.get(username=username)
self.instance.user = user
return super(CollaborationForm, self).save(**kwargs)
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def clean(self):
cleaned_data = super(CollaborationForm, self).clean()
self.instance.full_clean()
return cleaned_data
def clean_user(self):
username = self.cleaned_data['user']
user = None
try:
user = User.objects.get(username=username)
self.instance.user = user
except User.DoesNotExist:
raise forms.ValidationError("User %(username_s does not exist",
params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
Fix IntegrityError and DoesNotExist 500s
|
Fix IntegrityError and DoesNotExist 500s
|
Python
|
mit
|
vault/bugit,vault/bugit,vault/bugit
|
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def save(self, **kwargs):
username = self.cleaned_data['user']
user = User.objects.get(username=username)
self.instance.user = user
return super(CollaborationForm, self).save(**kwargs)
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
Fix IntegrityError and DoesNotExist 500s
|
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def clean(self):
cleaned_data = super(CollaborationForm, self).clean()
self.instance.full_clean()
return cleaned_data
def clean_user(self):
username = self.cleaned_data['user']
user = None
try:
user = User.objects.get(username=username)
self.instance.user = user
except User.DoesNotExist:
raise forms.ValidationError("User %(username_s does not exist",
params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
<commit_before>
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def save(self, **kwargs):
username = self.cleaned_data['user']
user = User.objects.get(username=username)
self.instance.user = user
return super(CollaborationForm, self).save(**kwargs)
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
<commit_msg>Fix IntegrityError and DoesNotExist 500s<commit_after>
|
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def clean(self):
cleaned_data = super(CollaborationForm, self).clean()
self.instance.full_clean()
return cleaned_data
def clean_user(self):
username = self.cleaned_data['user']
user = None
try:
user = User.objects.get(username=username)
self.instance.user = user
except User.DoesNotExist:
raise forms.ValidationError("User %(username_s does not exist",
params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def save(self, **kwargs):
username = self.cleaned_data['user']
user = User.objects.get(username=username)
self.instance.user = user
return super(CollaborationForm, self).save(**kwargs)
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
Fix IntegrityError and DoesNotExist 500s
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def clean(self):
cleaned_data = super(CollaborationForm, self).clean()
self.instance.full_clean()
return cleaned_data
def clean_user(self):
username = self.cleaned_data['user']
user = None
try:
user = User.objects.get(username=username)
self.instance.user = user
except User.DoesNotExist:
raise forms.ValidationError("User %(username_s does not exist",
params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
<commit_before>
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def save(self, **kwargs):
username = self.cleaned_data['user']
user = User.objects.get(username=username)
self.instance.user = user
return super(CollaborationForm, self).save(**kwargs)
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
<commit_msg>Fix IntegrityError and DoesNotExist 500s<commit_after>
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def clean(self):
cleaned_data = super(CollaborationForm, self).clean()
self.instance.full_clean()
return cleaned_data
def clean_user(self):
username = self.cleaned_data['user']
user = None
try:
user = User.objects.get(username=username)
self.instance.user = user
except User.DoesNotExist:
raise forms.ValidationError("User %(username_s does not exist",
params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
f6ddd5c4d79ada59d9db4b467849d9b52c5fef75
|
landlab/field/__init__.py
|
landlab/field/__init__.py
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
Add GraphFields to package import.
|
Add GraphFields to package import.
|
Python
|
mit
|
cmshobe/landlab,cmshobe/landlab,cmshobe/landlab,RondaStrauch/landlab,amandersillinois/landlab,RondaStrauch/landlab,landlab/landlab,Carralex/landlab,RondaStrauch/landlab,landlab/landlab,amandersillinois/landlab,csherwood-usgs/landlab,Carralex/landlab,Carralex/landlab,csherwood-usgs/landlab,landlab/landlab
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
Add GraphFields to package import.
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
<commit_before>from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
<commit_msg>Add GraphFields to package import.<commit_after>
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
Add GraphFields to package import.from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
<commit_before>from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
<commit_msg>Add GraphFields to package import.<commit_after>from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
e360b4e2a19a526e1541a7833648619bb5fac8e2
|
stock_orderpoint_move_link/models/procurement_rule.py
|
stock_orderpoint_move_link/models/procurement_rule.py
|
# Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in vals['orderpoint_ids']]
return vals
|
# Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in values['orderpoint_ids']]
return vals
|
Fix read of wrong dictionnary
|
Fix read of wrong dictionnary
|
Python
|
agpl-3.0
|
Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse
|
# Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in vals['orderpoint_ids']]
return vals
Fix read of wrong dictionnary
|
# Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in values['orderpoint_ids']]
return vals
|
<commit_before># Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in vals['orderpoint_ids']]
return vals
<commit_msg>Fix read of wrong dictionnary<commit_after>
|
# Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in values['orderpoint_ids']]
return vals
|
# Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in vals['orderpoint_ids']]
return vals
Fix read of wrong dictionnary# Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in values['orderpoint_ids']]
return vals
|
<commit_before># Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in vals['orderpoint_ids']]
return vals
<commit_msg>Fix read of wrong dictionnary<commit_after># Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in values['orderpoint_ids']]
return vals
|
75f236f8fd0ba368197da3070002b60233a01d49
|
tests/test_track_bed.py
|
tests/test_track_bed.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
unittest.TextTestRunner(verbosity=2).run(suite)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
from chromosomer.track.bed import Writer
from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
class TestBedWriter(unittest.TestCase):
def setUp(self):
self.__input_file = os.path.join(
'data', 'bed', 'correct.bed'
)
self.__output_file = os.path.join(
'data', 'bed', 'test.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def tearDown(self):
os.unlink(self.__output_file)
def test_write(self):
"""
Check if BED records are written in the correct way.
"""
bed_input = Reader(self.__input_file)
with Writer(self.__output_file) as bed_output:
for record in bed_input.records():
bed_output.write(record)
# check if the lines are identical
with open(self.__input_file) as original_file, \
open(self.__output_file) as written_file:
for x, y in izip(original_file, written_file):
self.assertEqual(x, y)
|
Test routines to the BED writer added
|
Test routines to the BED writer added
|
Python
|
mit
|
gtamazian/Chromosomer
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
unittest.TextTestRunner(verbosity=2).run(suite)
Test routines to the BED writer added
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
from chromosomer.track.bed import Writer
from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
class TestBedWriter(unittest.TestCase):
def setUp(self):
self.__input_file = os.path.join(
'data', 'bed', 'correct.bed'
)
self.__output_file = os.path.join(
'data', 'bed', 'test.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def tearDown(self):
os.unlink(self.__output_file)
def test_write(self):
"""
Check if BED records are written in the correct way.
"""
bed_input = Reader(self.__input_file)
with Writer(self.__output_file) as bed_output:
for record in bed_input.records():
bed_output.write(record)
# check if the lines are identical
with open(self.__input_file) as original_file, \
open(self.__output_file) as written_file:
for x, y in izip(original_file, written_file):
self.assertEqual(x, y)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Test routines to the BED writer added<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
from chromosomer.track.bed import Writer
from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
class TestBedWriter(unittest.TestCase):
def setUp(self):
self.__input_file = os.path.join(
'data', 'bed', 'correct.bed'
)
self.__output_file = os.path.join(
'data', 'bed', 'test.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def tearDown(self):
os.unlink(self.__output_file)
def test_write(self):
"""
Check if BED records are written in the correct way.
"""
bed_input = Reader(self.__input_file)
with Writer(self.__output_file) as bed_output:
for record in bed_input.records():
bed_output.write(record)
# check if the lines are identical
with open(self.__input_file) as original_file, \
open(self.__output_file) as written_file:
for x, y in izip(original_file, written_file):
self.assertEqual(x, y)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
unittest.TextTestRunner(verbosity=2).run(suite)
Test routines to the BED writer added#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
from chromosomer.track.bed import Writer
from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
class TestBedWriter(unittest.TestCase):
def setUp(self):
self.__input_file = os.path.join(
'data', 'bed', 'correct.bed'
)
self.__output_file = os.path.join(
'data', 'bed', 'test.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def tearDown(self):
os.unlink(self.__output_file)
def test_write(self):
"""
Check if BED records are written in the correct way.
"""
bed_input = Reader(self.__input_file)
with Writer(self.__output_file) as bed_output:
for record in bed_input.records():
bed_output.write(record)
# check if the lines are identical
with open(self.__input_file) as original_file, \
open(self.__output_file) as written_file:
for x, y in izip(original_file, written_file):
self.assertEqual(x, y)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Test routines to the BED writer added<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
from chromosomer.track.bed import Writer
from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
class TestBedWriter(unittest.TestCase):
def setUp(self):
self.__input_file = os.path.join(
'data', 'bed', 'correct.bed'
)
self.__output_file = os.path.join(
'data', 'bed', 'test.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def tearDown(self):
os.unlink(self.__output_file)
def test_write(self):
"""
Check if BED records are written in the correct way.
"""
bed_input = Reader(self.__input_file)
with Writer(self.__output_file) as bed_output:
for record in bed_input.records():
bed_output.write(record)
# check if the lines are identical
with open(self.__input_file) as original_file, \
open(self.__output_file) as written_file:
for x, y in izip(original_file, written_file):
self.assertEqual(x, y)
|
9e666e97b07d7c08e434791a061086010da6e6eb
|
main.py
|
main.py
|
# -*- utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
Add ability to get the latest TwoHeadlines tweet
|
Add ability to get the latest TwoHeadlines tweet
|
Python
|
mit
|
underyx/TheMajorNews
|
# -*- utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
Add ability to get the latest TwoHeadlines tweet
|
# -*- coding: utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
<commit_before># -*- utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
<commit_msg>Add ability to get the latest TwoHeadlines tweet<commit_after>
|
# -*- coding: utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
# -*- utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
Add ability to get the latest TwoHeadlines tweet# -*- coding: utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
<commit_before># -*- utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
<commit_msg>Add ability to get the latest TwoHeadlines tweet<commit_after># -*- coding: utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
|
789ac1de1e94eda1224fb314ccad14c061c58ad4
|
pact/group.py
|
pact/group.py
|
from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
|
from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts=None):
self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
|
Create empty PactGroup if no arguments given
|
Create empty PactGroup if no arguments given
|
Python
|
bsd-3-clause
|
vmalloc/pact
|
from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
Create empty PactGroup if no arguments given
|
from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts=None):
self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
|
<commit_before>from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
<commit_msg>Create empty PactGroup if no arguments given<commit_after>
|
from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts=None):
self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
|
from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
Create empty PactGroup if no arguments givenfrom .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts=None):
self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
|
<commit_before>from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
<commit_msg>Create empty PactGroup if no arguments given<commit_after>from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts=None):
self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
|
d1ec190f1a4dc84db0540481f2489f1db8421799
|
oemof_pg/db.py
|
oemof_pg/db.py
|
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=keyring.get_password(
cfg.get("postGIS", "database"),
cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
|
from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
pw = keyring.get_password(cfg.get("postGIS", "database"),
cfg.get("postGIS", "username"))
if pw is None:
try: pw = cfg.get("postGIS", "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=pw,
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
|
Enable specifying the password in `config.ini`
|
Enable specifying the password in `config.ini`
|
Python
|
mit
|
oemof/oemof.db
|
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=keyring.get_password(
cfg.get("postGIS", "database"),
cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
Enable specifying the password in `config.ini`
|
from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
pw = keyring.get_password(cfg.get("postGIS", "database"),
cfg.get("postGIS", "username"))
if pw is None:
try: pw = cfg.get("postGIS", "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=pw,
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
|
<commit_before>from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=keyring.get_password(
cfg.get("postGIS", "database"),
cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
<commit_msg>Enable specifying the password in `config.ini`<commit_after>
|
from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
pw = keyring.get_password(cfg.get("postGIS", "database"),
cfg.get("postGIS", "username"))
if pw is None:
try: pw = cfg.get("postGIS", "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=pw,
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
|
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=keyring.get_password(
cfg.get("postGIS", "database"),
cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
Enable specifying the password in `config.ini`from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
pw = keyring.get_password(cfg.get("postGIS", "database"),
cfg.get("postGIS", "username"))
if pw is None:
try: pw = cfg.get("postGIS", "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=pw,
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
|
<commit_before>from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=keyring.get_password(
cfg.get("postGIS", "database"),
cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
<commit_msg>Enable specifying the password in `config.ini`<commit_after>from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
pw = keyring.get_password(cfg.get("postGIS", "database"),
cfg.get("postGIS", "username"))
if pw is None:
try: pw = cfg.get("postGIS", "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=pw,
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
|
901a47adf6726d50c01ac743e9661c0caac2b555
|
test_openfolder.py
|
test_openfolder.py
|
import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
|
import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
|
Check to ensure the excpetions return the text we expect.
|
Check to ensure the excpetions return the text we expect.
|
Python
|
mit
|
golliher/dg-tickler-file
|
import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
Check to ensure the excpetions return the text we expect.
|
import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
|
<commit_before>import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
<commit_msg>Check to ensure the excpetions return the text we expect.<commit_after>
|
import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
|
import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
Check to ensure the excpetions return the text we expect.import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
|
<commit_before>import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
<commit_msg>Check to ensure the excpetions return the text we expect.<commit_after>import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
|
b8638ab2befa55029f2aeb8a907acb1a94aba3a9
|
app/rules.py
|
app/rules.py
|
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 10) \
or (rule.trigger == 'light ground' and color > 10):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
|
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 40) \
or (rule.trigger == 'light ground' and color >= 40):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
|
Decrease sensitivity of dark ground checking.
|
Decrease sensitivity of dark ground checking.
|
Python
|
bsd-2-clause
|
legorovers/legoflask,legorovers/legoflask,legorovers/legoflask
|
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 10) \
or (rule.trigger == 'light ground' and color > 10):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
Decrease sensitivity of dark ground checking.
|
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 40) \
or (rule.trigger == 'light ground' and color >= 40):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
|
<commit_before>
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 10) \
or (rule.trigger == 'light ground' and color > 10):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
<commit_msg>Decrease sensitivity of dark ground checking.<commit_after>
|
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 40) \
or (rule.trigger == 'light ground' and color >= 40):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
|
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 10) \
or (rule.trigger == 'light ground' and color > 10):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
Decrease sensitivity of dark ground checking.
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 40) \
or (rule.trigger == 'light ground' and color >= 40):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
|
<commit_before>
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 10) \
or (rule.trigger == 'light ground' and color > 10):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
<commit_msg>Decrease sensitivity of dark ground checking.<commit_after>
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 40) \
or (rule.trigger == 'light ground' and color >= 40):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
|
ea972c89cd7abe4fdb772ce359dd9acd83817242
|
tests/test.py
|
tests/test.py
|
from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
|
from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
|
Add http_transport and websocket_transport methods
|
Add http_transport and websocket_transport methods
|
Python
|
apache-2.0
|
devicehive/devicehive-python
|
from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
Add http_transport and websocket_transport methods
|
from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
|
<commit_before>from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
<commit_msg>Add http_transport and websocket_transport methods<commit_after>
|
from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
|
from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
Add http_transport and websocket_transport methodsfrom devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
|
<commit_before>from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
<commit_msg>Add http_transport and websocket_transport methods<commit_after>from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
|
42a4a8b4480bc481e0467ae7ee46c60400d63f77
|
theme-installer.py
|
theme-installer.py
|
#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())
|
#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# Create tmp directory if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())
|
Create tmp directory if it doesn't exist
|
Create tmp directory if it doesn't exist
|
Python
|
lgpl-2.1
|
kmklr72/LMMS-Theme-Installer
|
#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())Create tmp directory if it doesn't exist
|
#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# Create tmp directory if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())
|
<commit_before>#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())<commit_msg>Create tmp directory if it doesn't exist<commit_after>
|
#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# Create tmp directory if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())
|
#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())Create tmp directory if it doesn't exist#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# Create tmp directory if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())
|
<commit_before>#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())<commit_msg>Create tmp directory if it doesn't exist<commit_after>#!/usr/bin/env python
import sys
from inc.functions import *
from PySide.QtGui import QApplication, QPixmap, QSplashScreen
from ui.mainwindow import MainWindow
# Create tmp directory if it doesn't exist
if not os.path.exists(os.path.join(os.getcwd(), 'tmp')):
os.mkdir(os.path.join(os.getcwd(), 'tmp'))
# The app
if __name__ == '__main__':
# Create app
app = QApplication(sys.argv)
app.setApplicationName('LMMS Theme Installer')
# Show window
window = MainWindow()
window.show()
# Closed connection
app.lastWindowClosed.connect(app.quit)
# Run it
sys.exit(app.exec_())
|
dd40b392b73ddc1bcf88d932418b4f891bcc6a89
|
twine/__init__.py
|
twine/__init__.py
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
Allow star imports from twine
|
Allow star imports from twine
Unicode literals on Python 2 prevent people from being able to use
from twine import *
Closes gh-209
(cherry picked from commit c2cd72d0f4ff4d380845333fbfaaf2c92d6a5674)
|
Python
|
apache-2.0
|
pypa/twine
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
Allow star imports from twine
Unicode literals on Python 2 prevent people from being able to use
from twine import *
Closes gh-209
(cherry picked from commit c2cd72d0f4ff4d380845333fbfaaf2c92d6a5674)
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
<commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
<commit_msg>Allow star imports from twine
Unicode literals on Python 2 prevent people from being able to use
from twine import *
Closes gh-209
(cherry picked from commit c2cd72d0f4ff4d380845333fbfaaf2c92d6a5674)<commit_after>
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
Allow star imports from twine
Unicode literals on Python 2 prevent people from being able to use
from twine import *
Closes gh-209
(cherry picked from commit c2cd72d0f4ff4d380845333fbfaaf2c92d6a5674)# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
<commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
<commit_msg>Allow star imports from twine
Unicode literals on Python 2 prevent people from being able to use
from twine import *
Closes gh-209
(cherry picked from commit c2cd72d0f4ff4d380845333fbfaaf2c92d6a5674)<commit_after># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = (
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
__title__ = "twine"
__summary__ = "Collection of utilities for interacting with PyPI"
__uri__ = "https://github.com/pypa/twine"
__version__ = "1.8.1"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
0c84f6dd314ea62019356b09363f98118a4da776
|
txircd/factory.py
|
txircd/factory.py
|
from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
import re
ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
mapped = ipv4MappedAddr.match(ip)
if mapped:
return mapped.group(1)
return ip
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)
|
from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
from typing import Union
def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
addr = ip_address(ip)
if addr.ipv4_mapped is None:
return addr
return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)
|
Use built-in IP address functionality to unmap IPv4 addresses
|
Use built-in IP address functionality to unmap IPv4 addresses
|
Python
|
bsd-3-clause
|
Heufneutje/txircd
|
from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
import re
ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
mapped = ipv4MappedAddr.match(ip)
if mapped:
return mapped.group(1)
return ip
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)Use built-in IP address functionality to unmap IPv4 addresses
|
from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
from typing import Union
def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
addr = ip_address(ip)
if addr.ipv4_mapped is None:
return addr
return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)
|
<commit_before>from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
import re
ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
mapped = ipv4MappedAddr.match(ip)
if mapped:
return mapped.group(1)
return ip
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)<commit_msg>Use built-in IP address functionality to unmap IPv4 addresses<commit_after>
|
from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
from typing import Union
def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
addr = ip_address(ip)
if addr.ipv4_mapped is None:
return addr
return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)
|
from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
import re
ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
mapped = ipv4MappedAddr.match(ip)
if mapped:
return mapped.group(1)
return ip
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)Use built-in IP address functionality to unmap IPv4 addressesfrom twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
from typing import Union
def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
addr = ip_address(ip)
if addr.ipv4_mapped is None:
return addr
return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)
|
<commit_before>from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
import re
ipv4MappedAddr = re.compile("::ffff:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})")
def unmapIPv4(ip: str) -> str:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
mapped = ipv4MappedAddr.match(ip)
if mapped:
return mapped.group(1)
return ip
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)<commit_msg>Use built-in IP address functionality to unmap IPv4 addresses<commit_after>from twisted.internet.protocol import ClientFactory, Factory
from txircd.server import IRCServer
from txircd.user import IRCUser
from ipaddress import ip_address
from typing import Union
def unmapIPv4(ip: str) -> Union["IPv4Address", "IPv6Address"]:
"""
Converts an IPv6-mapped IPv4 address to a bare IPv4 address.
"""
addr = ip_address(ip)
if addr.ipv4_mapped is None:
return addr
return addr.ipv4_mapped
class UserFactory(Factory):
protocol = IRCUser
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)))
class ServerListenFactory(Factory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), True)
class ServerConnectFactory(ClientFactory):
protocol = IRCServer
def __init__(self, ircd):
self.ircd = ircd
def buildProtocol(self, addr):
return self.protocol(self.ircd, ip_address(unmapIPv4(addr.host)), False)
|
bee5ed1d9815a4c4291179d0de3ec54fe467b219
|
project.py
|
project.py
|
import os, cPickle as pickle
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = pickle.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
fileutil.atomic_write_file(project.filename, data)
|
import os
import json
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
|
Save sessions in JSON format instead of pickle.
|
Save sessions in JSON format instead of pickle.
|
Python
|
mit
|
shaurz/devo
|
import os, cPickle as pickle
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = pickle.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
fileutil.atomic_write_file(project.filename, data)
Save sessions in JSON format instead of pickle.
|
import os
import json
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
|
<commit_before>import os, cPickle as pickle
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = pickle.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
fileutil.atomic_write_file(project.filename, data)
<commit_msg>Save sessions in JSON format instead of pickle.<commit_after>
|
import os
import json
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
|
import os, cPickle as pickle
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = pickle.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
fileutil.atomic_write_file(project.filename, data)
Save sessions in JSON format instead of pickle.import os
import json
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
|
<commit_before>import os, cPickle as pickle
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = pickle.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = pickle.dumps(project.session, pickle.HIGHEST_PROTOCOL)
fileutil.atomic_write_file(project.filename, data)
<commit_msg>Save sessions in JSON format instead of pickle.<commit_after>import os
import json
import fileutil
class Project(object):
def __init__(self, name, rootdir, filename, session=None):
self.name = name
self.rootdir = rootdir
self.filename = filename
self.session = session
def read_project(filename, rootdir):
with open(filename, "rb") as f:
session = json.loads(f.read())
return Project("", rootdir, filename, session)
def write_project(project):
fileutil.mkpath(os.path.dirname(project.filename))
data = json.dumps(project.session, indent=2)
fileutil.atomic_write_file(project.filename, data)
|
a587d48694690957934a159bad98cacd3f012a6a
|
cms/tests/test_externals.py
|
cms/tests/test_externals.py
|
from django.test import TestCase
from ..externals import External
from contextlib import GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
|
Change contextlib import to handle the new location in Python 3.
|
Change contextlib import to handle the new location in Python 3.
|
Python
|
bsd-3-clause
|
danielsamuels/cms,jamesfoley/cms,jamesfoley/cms,jamesfoley/cms,dan-gamble/cms,danielsamuels/cms,dan-gamble/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,danielsamuels/cms,lewiscollard/cms
|
from django.test import TestCase
from ..externals import External
from contextlib import GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
Change contextlib import to handle the new location in Python 3.
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
|
<commit_before>from django.test import TestCase
from ..externals import External
from contextlib import GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
<commit_msg>Change contextlib import to handle the new location in Python 3.<commit_after>
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
|
from django.test import TestCase
from ..externals import External
from contextlib import GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
Change contextlib import to handle the new location in Python 3.from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
|
<commit_before>from django.test import TestCase
from ..externals import External
from contextlib import GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
<commit_msg>Change contextlib import to handle the new location in Python 3.<commit_after>from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
|
278069a0637f7f329ceaff0975e3b95d609a7b9f
|
cosmoscope/cli.py
|
cosmoscope/cli.py
|
# -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for cosmoscope."""
click.echo("Replace this message by putting your code into "
"cosmoscope.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
# -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
from .core.server import launch
@click.command()
@click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
@click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
def main(server_address=None, publisher_address=None):
"""Console interface for the cosmoscope server."""
launch(server_address, publisher_address)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
Improve the command line interface
|
Improve the command line interface
|
Python
|
mit
|
cosmoscope/cosmoscope
|
# -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for cosmoscope."""
click.echo("Replace this message by putting your code into "
"cosmoscope.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
Improve the command line interface
|
# -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
from .core.server import launch
@click.command()
@click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
@click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
def main(server_address=None, publisher_address=None):
"""Console interface for the cosmoscope server."""
launch(server_address, publisher_address)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
<commit_before># -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for cosmoscope."""
click.echo("Replace this message by putting your code into "
"cosmoscope.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
<commit_msg>Improve the command line interface<commit_after>
|
# -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
from .core.server import launch
@click.command()
@click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
@click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
def main(server_address=None, publisher_address=None):
"""Console interface for the cosmoscope server."""
launch(server_address, publisher_address)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
# -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for cosmoscope."""
click.echo("Replace this message by putting your code into "
"cosmoscope.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
Improve the command line interface# -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
from .core.server import launch
@click.command()
@click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
@click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
def main(server_address=None, publisher_address=None):
"""Console interface for the cosmoscope server."""
launch(server_address, publisher_address)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
<commit_before># -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for cosmoscope."""
click.echo("Replace this message by putting your code into "
"cosmoscope.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
<commit_msg>Improve the command line interface<commit_after># -*- coding: utf-8 -*-
"""Console script for cosmoscope."""
import sys
import click
from .core.server import launch
@click.command()
@click.option('--server-address', default="tcp://127.0.0.1:4242", help="Server IP address.")
@click.option('--publisher-address', default="tcp://127.0.0.1:4243", help="Publisher IP address.")
def main(server_address=None, publisher_address=None):
"""Console interface for the cosmoscope server."""
launch(server_address, publisher_address)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
780002f5df7278776a26bb65b4b0f3dce84537a1
|
pentai/db/test_db.py
|
pentai/db/test_db.py
|
import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
z_m.set_db("test.db")
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler('test.log')
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
|
import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
# Use kivy's user_data_dir so we're guaranteed write access
os.environ['KIVY_NO_CONSOLELOG'] = ''
from kivy.app import App
a = App()
d = a.user_data_dir
z_m.set_db(os.path.join(d, "test.db"))
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
|
Allow unit tests to run on iOS. (filesystem restriction)
|
Allow unit tests to run on iOS. (filesystem restriction)
|
Python
|
mit
|
cropleyb/pentai,cropleyb/pentai,cropleyb/pentai
|
import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
z_m.set_db("test.db")
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler('test.log')
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
Allow unit tests to run on iOS. (filesystem restriction)
|
import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
# Use kivy's user_data_dir so we're guaranteed write access
os.environ['KIVY_NO_CONSOLELOG'] = ''
from kivy.app import App
a = App()
d = a.user_data_dir
z_m.set_db(os.path.join(d, "test.db"))
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
|
<commit_before>import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
z_m.set_db("test.db")
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler('test.log')
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
<commit_msg>Allow unit tests to run on iOS. (filesystem restriction)<commit_after>
|
import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
# Use kivy's user_data_dir so we're guaranteed write access
os.environ['KIVY_NO_CONSOLELOG'] = ''
from kivy.app import App
a = App()
d = a.user_data_dir
z_m.set_db(os.path.join(d, "test.db"))
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
|
import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
z_m.set_db("test.db")
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler('test.log')
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
Allow unit tests to run on iOS. (filesystem restriction)import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
# Use kivy's user_data_dir so we're guaranteed write access
os.environ['KIVY_NO_CONSOLELOG'] = ''
from kivy.app import App
a = App()
d = a.user_data_dir
z_m.set_db(os.path.join(d, "test.db"))
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
|
<commit_before>import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
z_m.set_db("test.db")
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler('test.log')
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
<commit_msg>Allow unit tests to run on iOS. (filesystem restriction)<commit_after>import os
import logging
def init():
""" TODO: Call this setUp """
global initialised
try:
if initialised:
return
except:
init_logging()
import zodb_dict as z_m
# Use kivy's user_data_dir so we're guaranteed write access
os.environ['KIVY_NO_CONSOLELOG'] = ''
from kivy.app import App
a = App()
d = a.user_data_dir
z_m.set_db(os.path.join(d, "test.db"))
initialised = True
def init_logging():
logger = logging.getLogger('ZODB.FileStorage')
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
init()
def clear_all():
""" TODO: Call this tearDown """
import zodb_dict as z_m
z_m.sync()
z_m.close()
z_m.delete_all_dangerous()
global initialised
initialised = False
import misc_db
misc_db.reset()
import openings_book as ob_m
ob_m.instance = None
|
4efa9c87264eabb6712f4fb787ab0de42be18de6
|
places/urls.py
|
places/urls.py
|
from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
|
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
Move places urlpatterns to Django 2.0 preferred method
|
Move places urlpatterns to Django 2.0 preferred method
|
Python
|
mit
|
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
|
from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
Move places urlpatterns to Django 2.0 preferred method
|
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
<commit_before>from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
<commit_msg>Move places urlpatterns to Django 2.0 preferred method<commit_after>
|
from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
Move places urlpatterns to Django 2.0 preferred methodfrom django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
<commit_before>from django.conf.urls import url
from . import views
app_name = 'places'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<slug>[-\w]+)/$', views.PlaceView.as_view(), name='place'),
]
<commit_msg>Move places urlpatterns to Django 2.0 preferred method<commit_after>from django.urls import include, path
from . import views
app_name = 'places'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<slug:slug>/', views.PlaceView.as_view(), name='place'),
]
|
a9dc245f99e5c29f3b11cadc77dcfa0f44274b74
|
ctfbackend/backend/urls.py
|
ctfbackend/backend/urls.py
|
from django.conf.urls import url, include
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
url(r'^accounts/', include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^submit$', views.SubmitView.as_view(), name='submit'),
url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
]
|
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$',
auth_views.logout,
{'next_page': '/'},
name='auth_logout'),
url(r'^accounts/',
include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$',
views.HomeView.as_view(),
name='home'),
url(r'^submit$',
login_required(views.SubmitView.as_view()),
name='submit'),
url(r'^scores$',
views.ScoreboardView.as_view(),
name='scores'),
url(r'^chals$',
login_required(views.ChallengesView.as_view()),
name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
login_required(views.ChallengesView.as_view()),
name='buy_hint'),
url(r'^stats$',
views.StatisticsView.as_view(),
name='stats'),
]
|
Add login_required decorator to protected sites
|
Add login_required decorator to protected sites
|
Python
|
agpl-3.0
|
c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend,c0d3z3r0/ctf-backend
|
from django.conf.urls import url, include
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
url(r'^accounts/', include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^submit$', views.SubmitView.as_view(), name='submit'),
url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
]
Add login_required decorator to protected sites
|
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$',
auth_views.logout,
{'next_page': '/'},
name='auth_logout'),
url(r'^accounts/',
include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$',
views.HomeView.as_view(),
name='home'),
url(r'^submit$',
login_required(views.SubmitView.as_view()),
name='submit'),
url(r'^scores$',
views.ScoreboardView.as_view(),
name='scores'),
url(r'^chals$',
login_required(views.ChallengesView.as_view()),
name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
login_required(views.ChallengesView.as_view()),
name='buy_hint'),
url(r'^stats$',
views.StatisticsView.as_view(),
name='stats'),
]
|
<commit_before>from django.conf.urls import url, include
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
url(r'^accounts/', include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^submit$', views.SubmitView.as_view(), name='submit'),
url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
]
<commit_msg>Add login_required decorator to protected sites<commit_after>
|
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$',
auth_views.logout,
{'next_page': '/'},
name='auth_logout'),
url(r'^accounts/',
include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$',
views.HomeView.as_view(),
name='home'),
url(r'^submit$',
login_required(views.SubmitView.as_view()),
name='submit'),
url(r'^scores$',
views.ScoreboardView.as_view(),
name='scores'),
url(r'^chals$',
login_required(views.ChallengesView.as_view()),
name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
login_required(views.ChallengesView.as_view()),
name='buy_hint'),
url(r'^stats$',
views.StatisticsView.as_view(),
name='stats'),
]
|
from django.conf.urls import url, include
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
url(r'^accounts/', include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^submit$', views.SubmitView.as_view(), name='submit'),
url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
]
Add login_required decorator to protected sitesfrom django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$',
auth_views.logout,
{'next_page': '/'},
name='auth_logout'),
url(r'^accounts/',
include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$',
views.HomeView.as_view(),
name='home'),
url(r'^submit$',
login_required(views.SubmitView.as_view()),
name='submit'),
url(r'^scores$',
views.ScoreboardView.as_view(),
name='scores'),
url(r'^chals$',
login_required(views.ChallengesView.as_view()),
name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
login_required(views.ChallengesView.as_view()),
name='buy_hint'),
url(r'^stats$',
views.StatisticsView.as_view(),
name='stats'),
]
|
<commit_before>from django.conf.urls import url, include
from django.http import HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$', auth_views.logout, {'next_page': '/'}, name='auth_logout'),
url(r'^accounts/', include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^submit$', views.SubmitView.as_view(), name='submit'),
url(r'^scores$', views.ScoreboardView.as_view(), name='scores'),
url(r'^chals$', views.ChallengesView.as_view(), name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy', views.ChallengesView.as_view(), name='buy_hint'),
url(r'^stats$', views.StatisticsView.as_view(), name='stats'),
]
<commit_msg>Add login_required decorator to protected sites<commit_after>from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from django.contrib.auth.decorators import login_required
urlpatterns = [
# Authentication
## Override logout next_page
url(r'^accounts/logout/$',
auth_views.logout,
{'next_page': '/'},
name='auth_logout'),
url(r'^accounts/',
include('registration.backends.hmac.urls')),
# Backend urls
url(r'^$',
views.HomeView.as_view(),
name='home'),
url(r'^submit$',
login_required(views.SubmitView.as_view()),
name='submit'),
url(r'^scores$',
views.ScoreboardView.as_view(),
name='scores'),
url(r'^chals$',
login_required(views.ChallengesView.as_view()),
name='chals'),
url(r'^chals/hint/(?P<buy_hint>[0-9]+)/buy',
login_required(views.ChallengesView.as_view()),
name='buy_hint'),
url(r'^stats$',
views.StatisticsView.as_view(),
name='stats'),
]
|
defdf2220804ca492ec889c9f4b6eff9ff56eefc
|
tests/create_references.py
|
tests/create_references.py
|
#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from tests_list import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
|
#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from testcases import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
|
Correct import statement after renaming test_lists.py to testcases
|
Correct import statement after renaming test_lists.py to testcases
|
Python
|
bsd-3-clause
|
Baaaaam/cyBaM,Baaaaam/cyBaM,jlittell/cycamore,cyclus/cycaless,gonuke/cycamore,Baaaaam/cyBaM,Baaaaam/cycamore,rwcarlsen/cycamore,rwcarlsen/cycamore,Baaaaam/cycamore,rwcarlsen/cycamore,Baaaaam/cyCLASS,gonuke/cycamore,rwcarlsen/cycamore,Baaaaam/cyCLASS,cyclus/cycaless,jlittell/cycamore,Baaaaam/cyBaM,jlittell/cycamore,gonuke/cycamore,jlittell/cycamore,gonuke/cycamore,Baaaaam/cycamore
|
#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from tests_list import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
Correct import statement after renaming test_lists.py to testcases
|
#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from testcases import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
|
<commit_before>#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from tests_list import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
<commit_msg>Correct import statement after renaming test_lists.py to testcases<commit_after>
|
#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from testcases import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
|
#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from tests_list import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
Correct import statement after renaming test_lists.py to testcases#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from testcases import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
|
<commit_before>#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from tests_list import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
<commit_msg>Correct import statement after renaming test_lists.py to testcases<commit_after>#! /usr/bin/env python
import os
from cyclus_tools import run_cyclus
from testcases import sim_files
def main():
"""Creates reference databases. Assumes that cyclus is included into PATH.
"""
cwd = os.getcwd()
# Run cyclus
run_cyclus("cyclus", cwd, sim_files)
if __name__ == "__main__": main()
|
6ff6f7ecf75551dc49685c4bb0501e6f4b2de854
|
packages/Python/lldbsuite/test/expression_command/vector_of_enums/TestVectorOfEnums.py
|
packages/Python/lldbsuite/test/expression_command/vector_of_enums/TestVectorOfEnums.py
|
"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
|
"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
|
Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category.
|
Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category.
See differential https://reviews.llvm.org/D59847 for initial change that this fixes
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@357210 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb
|
"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category.
See differential https://reviews.llvm.org/D59847 for initial change that this fixes
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@357210 91177308-0d34-0410-b5e6-96231b3b80d8
|
"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
|
<commit_before>"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
<commit_msg>Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category.
See differential https://reviews.llvm.org/D59847 for initial change that this fixes
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@357210 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
|
"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category.
See differential https://reviews.llvm.org/D59847 for initial change that this fixes
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@357210 91177308-0d34-0410-b5e6-96231b3b80d8"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
|
<commit_before>"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
<commit_msg>Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category.
See differential https://reviews.llvm.org/D59847 for initial change that this fixes
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@357210 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>"""
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
|
df7e834b8418aeeeaee7fb90b953468c2490b93d
|
pypiup/cli.py
|
pypiup/cli.py
|
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
|
import __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
print("\n ______ __ __ ______ __ __ __ ______ ")
print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
print("\nhttps://github.com/ekonstantinidis/pypiup")
print("Version %s" % __init__.__version__)
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
|
Add Ascii Art & Version Number
|
Add Ascii Art & Version Number
|
Python
|
bsd-2-clause
|
ekonstantinidis/pypiup
|
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
Add Ascii Art & Version Number
|
import __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
print("\n ______ __ __ ______ __ __ __ ______ ")
print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
print("\nhttps://github.com/ekonstantinidis/pypiup")
print("Version %s" % __init__.__version__)
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
|
<commit_before>import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
<commit_msg>Add Ascii Art & Version Number<commit_after>
|
import __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
print("\n ______ __ __ ______ __ __ __ ______ ")
print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
print("\nhttps://github.com/ekonstantinidis/pypiup")
print("Version %s" % __init__.__version__)
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
|
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
Add Ascii Art & Version Numberimport __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
print("\n ______ __ __ ______ __ __ __ ______ ")
print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
print("\nhttps://github.com/ekonstantinidis/pypiup")
print("Version %s" % __init__.__version__)
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
|
<commit_before>import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
<commit_msg>Add Ascii Art & Version Number<commit_after>import __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
print("\n ______ __ __ ______ __ __ __ ______ ")
print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
print("\nhttps://github.com/ekonstantinidis/pypiup")
print("Version %s" % __init__.__version__)
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
|
6c9a3e5133115a4724c8499380ee690a9cca0552
|
pmagpy/__init__.py
|
pmagpy/__init__.py
|
from __future__ import absolute_import
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
|
from __future__ import absolute_import
import sys
if sys.version_info <= (3,):
raise Exception("""
You are running Python {}.
This version of pmagpy is only compatible with Python 3.
Make sure you have pip >= 9.0 to avoid this kind of issue,
as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Then you should be able to download the correct version of pmagpy:
$ pip install pmagpy --upgrade
If this still gives you an error, please report the issue:
https://github.com/PmagPy/PmagPy/issues
Thanks!
""".format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
|
Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….)
|
Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….)
|
Python
|
bsd-3-clause
|
lfairchild/PmagPy,lfairchild/PmagPy,Caoimhinmg/PmagPy,lfairchild/PmagPy,Caoimhinmg/PmagPy,Caoimhinmg/PmagPy
|
from __future__ import absolute_import
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….)
|
from __future__ import absolute_import
import sys
if sys.version_info <= (3,):
raise Exception("""
You are running Python {}.
This version of pmagpy is only compatible with Python 3.
Make sure you have pip >= 9.0 to avoid this kind of issue,
as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Then you should be able to download the correct version of pmagpy:
$ pip install pmagpy --upgrade
If this still gives you an error, please report the issue:
https://github.com/PmagPy/PmagPy/issues
Thanks!
""".format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
|
<commit_before>from __future__ import absolute_import
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
<commit_msg>Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….)<commit_after>
|
from __future__ import absolute_import
import sys
if sys.version_info <= (3,):
raise Exception("""
You are running Python {}.
This version of pmagpy is only compatible with Python 3.
Make sure you have pip >= 9.0 to avoid this kind of issue,
as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Then you should be able to download the correct version of pmagpy:
$ pip install pmagpy --upgrade
If this still gives you an error, please report the issue:
https://github.com/PmagPy/PmagPy/issues
Thanks!
""".format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
|
from __future__ import absolute_import
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….)from __future__ import absolute_import
import sys
if sys.version_info <= (3,):
raise Exception("""
You are running Python {}.
This version of pmagpy is only compatible with Python 3.
Make sure you have pip >= 9.0 to avoid this kind of issue,
as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Then you should be able to download the correct version of pmagpy:
$ pip install pmagpy --upgrade
If this still gives you an error, please report the issue:
https://github.com/PmagPy/PmagPy/issues
Thanks!
""".format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
|
<commit_before>from __future__ import absolute_import
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
<commit_msg>Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….)<commit_after>from __future__ import absolute_import
import sys
if sys.version_info <= (3,):
raise Exception("""
You are running Python {}.
This version of pmagpy is only compatible with Python 3.
Make sure you have pip >= 9.0 to avoid this kind of issue,
as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Then you should be able to download the correct version of pmagpy:
$ pip install pmagpy --upgrade
If this still gives you an error, please report the issue:
https://github.com/PmagPy/PmagPy/issues
Thanks!
""".format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
|
d3cbcfa3d134ef7ce158f229eff75a83418afc52
|
tools/dmqmc/extract_n_k.py
|
tools/dmqmc/extract_n_k.py
|
#!/usr/bin/env python
'''Extract the momentum distribution from a analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
# [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
|
#!/usr/bin/env python
'''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
def main(args):
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
if __name__ == '__main__':
main(sys.argv[1:])
|
Write the extraction script properly.
|
Write the extraction script properly.
|
Python
|
lgpl-2.1
|
hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande
|
#!/usr/bin/env python
'''Extract the momentum distribution from a analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
# [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
Write the extraction script properly.
|
#!/usr/bin/env python
'''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
def main(args):
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
'''Extract the momentum distribution from a analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
# [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
<commit_msg>Write the extraction script properly.<commit_after>
|
#!/usr/bin/env python
'''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
def main(args):
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
if __name__ == '__main__':
main(sys.argv[1:])
|
#!/usr/bin/env python
'''Extract the momentum distribution from a analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
# [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
Write the extraction script properly.#!/usr/bin/env python
'''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
def main(args):
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
'''Extract the momentum distribution from a analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
# [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
<commit_msg>Write the extraction script properly.<commit_after>#!/usr/bin/env python
'''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
def main(args):
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
if __name__ == '__main__':
main(sys.argv[1:])
|
bea80411c13ed72b1e7d5a5ac79fdba64b4b4661
|
benchmarks/benchmarks/sparse_csgraph_djisktra.py
|
benchmarks/benchmarks/sparse_csgraph_djisktra.py
|
"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False]
]
param_names = ['n', 'min_only']
def setup(self, n, min_only):
rng = np.random.default_rng(1234)
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
|
"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False],
['random', 'star']
]
param_names = ['n', 'min_only', 'format']
def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
if format == 'random':
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
elif format == 'star':
rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
weights = [i + 1 for i in range(n - 1)] * 2
self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
|
Add star graph for sparse.csgraph.dijkstra benchmark
|
ENH: Add star graph for sparse.csgraph.dijkstra benchmark
|
Python
|
bsd-3-clause
|
scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy
|
"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False]
]
param_names = ['n', 'min_only']
def setup(self, n, min_only):
rng = np.random.default_rng(1234)
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
ENH: Add star graph for sparse.csgraph.dijkstra benchmark
|
"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False],
['random', 'star']
]
param_names = ['n', 'min_only', 'format']
def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
if format == 'random':
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
elif format == 'star':
rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
weights = [i + 1 for i in range(n - 1)] * 2
self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
|
<commit_before>"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False]
]
param_names = ['n', 'min_only']
def setup(self, n, min_only):
rng = np.random.default_rng(1234)
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
<commit_msg>ENH: Add star graph for sparse.csgraph.dijkstra benchmark<commit_after>
|
"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False],
['random', 'star']
]
param_names = ['n', 'min_only', 'format']
def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
if format == 'random':
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
elif format == 'star':
rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
weights = [i + 1 for i in range(n - 1)] * 2
self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
|
"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False]
]
param_names = ['n', 'min_only']
def setup(self, n, min_only):
rng = np.random.default_rng(1234)
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
ENH: Add star graph for sparse.csgraph.dijkstra benchmark"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False],
['random', 'star']
]
param_names = ['n', 'min_only', 'format']
def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
if format == 'random':
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
elif format == 'star':
rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
weights = [i + 1 for i in range(n - 1)] * 2
self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
|
<commit_before>"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False]
]
param_names = ['n', 'min_only']
def setup(self, n, min_only):
rng = np.random.default_rng(1234)
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
<commit_msg>ENH: Add star graph for sparse.csgraph.dijkstra benchmark<commit_after>"""benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False],
['random', 'star']
]
param_names = ['n', 'min_only', 'format']
def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
if format == 'random':
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
elif format == 'star':
rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
weights = [i + 1 for i in range(n - 1)] * 2
self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
|
d1fd32946ba422e8f240bd44bffab3107f4d1057
|
pymoji/__init__.py
|
pymoji/__init__.py
|
"""Python Library Boilerplate contains all the boilerplate you need to create a Python package."""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
def pymoji():
return 'Hello World!'
|
"""Emits HTML from emoji"""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
from .emoji import emoji
def pymoji(text):
if text[0] <> text[:-1] and text[0] <> ':':
text = ':%s:' % text
return emoji(text)
|
Return the emoji and format it
|
Return the emoji and format it
|
Python
|
apache-2.0
|
michaeljoseph/pymoji,michaeljoseph/pymoji
|
"""Python Library Boilerplate contains all the boilerplate you need to create a Python package."""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
def pymoji():
return 'Hello World!'
Return the emoji and format it
|
"""Emits HTML from emoji"""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
from .emoji import emoji
def pymoji(text):
if text[0] <> text[:-1] and text[0] <> ':':
text = ':%s:' % text
return emoji(text)
|
<commit_before>"""Python Library Boilerplate contains all the boilerplate you need to create a Python package."""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
def pymoji():
return 'Hello World!'
<commit_msg>Return the emoji and format it<commit_after>
|
"""Emits HTML from emoji"""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
from .emoji import emoji
def pymoji(text):
if text[0] <> text[:-1] and text[0] <> ':':
text = ':%s:' % text
return emoji(text)
|
"""Python Library Boilerplate contains all the boilerplate you need to create a Python package."""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
def pymoji():
return 'Hello World!'
Return the emoji and format it"""Emits HTML from emoji"""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
from .emoji import emoji
def pymoji(text):
if text[0] <> text[:-1] and text[0] <> ':':
text = ':%s:' % text
return emoji(text)
|
<commit_before>"""Python Library Boilerplate contains all the boilerplate you need to create a Python package."""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
def pymoji():
return 'Hello World!'
<commit_msg>Return the emoji and format it<commit_after>"""Emits HTML from emoji"""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
from .emoji import emoji
def pymoji(text):
if text[0] <> text[:-1] and text[0] <> ':':
text = ':%s:' % text
return emoji(text)
|
bca6ca83ce43f6d9b96ac590bda9c6253384ab69
|
winthrop/people/viaf.py
|
winthrop/people/viaf.py
|
import requests
from django.conf import settings
class ViafAPI(object):
"""Wrapper for ViafAPI"""
def __init__(self):
default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
def search(self, query):
"""Do a GET request to pull in JSON"""
r = requests.get('%s%s' % (self.base_url, query))
# Check to make sure we have a sucesss (i.e. a 200 code)
if 200 <= r.status_code < 300:
return r.json()
else:
return None
@classmethod
def uri_from_id(cls, viaf_id):
return 'https://viaf.org/viaf/%s/' % viaf_id
|
import json
import requests
class ViafAPI(object):
"""Wrapper for Viaf API"""
def __init__(self):
self.base_url = "https://www.viaf.org/"
def suggest(self, query):
"""Do a GET request to pull in JSON"""
url = self.base_url + "viaf/AutoSuggest?query="
r = requests.get("%s%s" % (url, query))
# If result is empty, return an empty list instead of None
if not (r.json())['result']:
return json.dumps({'result': []})
return r.json()
@classmethod
def uri_from_id(cls, viaf_id):
return "https://viaf.org/viaf/%s/" % viaf_id
|
Refactor for other search options later (search -> suggest)
|
Refactor for other search options later (search -> suggest)
|
Python
|
apache-2.0
|
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
|
import requests
from django.conf import settings
class ViafAPI(object):
"""Wrapper for ViafAPI"""
def __init__(self):
default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
def search(self, query):
"""Do a GET request to pull in JSON"""
r = requests.get('%s%s' % (self.base_url, query))
# Check to make sure we have a sucesss (i.e. a 200 code)
if 200 <= r.status_code < 300:
return r.json()
else:
return None
@classmethod
def uri_from_id(cls, viaf_id):
return 'https://viaf.org/viaf/%s/' % viaf_id
Refactor for other search options later (search -> suggest)
|
import json
import requests
class ViafAPI(object):
"""Wrapper for Viaf API"""
def __init__(self):
self.base_url = "https://www.viaf.org/"
def suggest(self, query):
"""Do a GET request to pull in JSON"""
url = self.base_url + "viaf/AutoSuggest?query="
r = requests.get("%s%s" % (url, query))
# If result is empty, return an empty list instead of None
if not (r.json())['result']:
return json.dumps({'result': []})
return r.json()
@classmethod
def uri_from_id(cls, viaf_id):
return "https://viaf.org/viaf/%s/" % viaf_id
|
<commit_before>import requests
from django.conf import settings
class ViafAPI(object):
"""Wrapper for ViafAPI"""
def __init__(self):
default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
def search(self, query):
"""Do a GET request to pull in JSON"""
r = requests.get('%s%s' % (self.base_url, query))
# Check to make sure we have a sucesss (i.e. a 200 code)
if 200 <= r.status_code < 300:
return r.json()
else:
return None
@classmethod
def uri_from_id(cls, viaf_id):
return 'https://viaf.org/viaf/%s/' % viaf_id
<commit_msg>Refactor for other search options later (search -> suggest)<commit_after>
|
import json
import requests
class ViafAPI(object):
"""Wrapper for Viaf API"""
def __init__(self):
self.base_url = "https://www.viaf.org/"
def suggest(self, query):
"""Do a GET request to pull in JSON"""
url = self.base_url + "viaf/AutoSuggest?query="
r = requests.get("%s%s" % (url, query))
# If result is empty, return an empty list instead of None
if not (r.json())['result']:
return json.dumps({'result': []})
return r.json()
@classmethod
def uri_from_id(cls, viaf_id):
return "https://viaf.org/viaf/%s/" % viaf_id
|
import requests
from django.conf import settings
class ViafAPI(object):
"""Wrapper for ViafAPI"""
def __init__(self):
default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
def search(self, query):
"""Do a GET request to pull in JSON"""
r = requests.get('%s%s' % (self.base_url, query))
# Check to make sure we have a sucesss (i.e. a 200 code)
if 200 <= r.status_code < 300:
return r.json()
else:
return None
@classmethod
def uri_from_id(cls, viaf_id):
return 'https://viaf.org/viaf/%s/' % viaf_id
Refactor for other search options later (search -> suggest)import json
import requests
class ViafAPI(object):
"""Wrapper for Viaf API"""
def __init__(self):
self.base_url = "https://www.viaf.org/"
def suggest(self, query):
"""Do a GET request to pull in JSON"""
url = self.base_url + "viaf/AutoSuggest?query="
r = requests.get("%s%s" % (url, query))
# If result is empty, return an empty list instead of None
if not (r.json())['result']:
return json.dumps({'result': []})
return r.json()
@classmethod
def uri_from_id(cls, viaf_id):
return "https://viaf.org/viaf/%s/" % viaf_id
|
<commit_before>import requests
from django.conf import settings
class ViafAPI(object):
"""Wrapper for ViafAPI"""
def __init__(self):
default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
def search(self, query):
"""Do a GET request to pull in JSON"""
r = requests.get('%s%s' % (self.base_url, query))
# Check to make sure we have a sucesss (i.e. a 200 code)
if 200 <= r.status_code < 300:
return r.json()
else:
return None
@classmethod
def uri_from_id(cls, viaf_id):
return 'https://viaf.org/viaf/%s/' % viaf_id
<commit_msg>Refactor for other search options later (search -> suggest)<commit_after>import json
import requests
class ViafAPI(object):
"""Wrapper for Viaf API"""
def __init__(self):
self.base_url = "https://www.viaf.org/"
def suggest(self, query):
"""Do a GET request to pull in JSON"""
url = self.base_url + "viaf/AutoSuggest?query="
r = requests.get("%s%s" % (url, query))
# If result is empty, return an empty list instead of None
if not (r.json())['result']:
return json.dumps({'result': []})
return r.json()
@classmethod
def uri_from_id(cls, viaf_id):
return "https://viaf.org/viaf/%s/" % viaf_id
|
3e2be4a8a597cfaa11b625eb6a94a4a18061df9b
|
readux/__init__.py
|
readux/__init__.py
|
__version_info__ = (1, 6, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
__version_info__ = (1, 7, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
Update develop version to 1.7-dev since 1.6 is in production
|
Update develop version to 1.7-dev since 1.6 is in production
|
Python
|
apache-2.0
|
emory-libraries/readux,emory-libraries/readux,emory-libraries/readux
|
__version_info__ = (1, 6, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
Update develop version to 1.7-dev since 1.6 is in production
|
__version_info__ = (1, 7, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
<commit_before>__version_info__ = (1, 6, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
<commit_msg>Update develop version to 1.7-dev since 1.6 is in production<commit_after>
|
__version_info__ = (1, 7, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
__version_info__ = (1, 6, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
Update develop version to 1.7-dev since 1.6 is in production__version_info__ = (1, 7, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
<commit_before>__version_info__ = (1, 6, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
<commit_msg>Update develop version to 1.7-dev since 1.6 is in production<commit_after>__version_info__ = (1, 7, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
b5068d644ffde56f302e9aee5b77e837a1d3e181
|
pyshelf/app.py
|
pyshelf/app.py
|
import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
|
import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
import logging
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
"""
Prevents Exceptions flying all around the place.
"""
logger = logging.getLogger(__name__)
logger.debug("Exception was caught by the generic exception handler, {0} - {1}".format(type(error), error))
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
|
Add some logging to generic error handler.
|
Add some logging to generic error handler.
|
Python
|
mit
|
not-nexus/shelf,kyle-long/pyshelf,not-nexus/shelf,kyle-long/pyshelf
|
import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
Add some logging to generic error handler.
|
import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
import logging
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
"""
Prevents Exceptions flying all around the place.
"""
logger = logging.getLogger(__name__)
logger.debug("Exception was caught by the generic exception handler, {0} - {1}".format(type(error), error))
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
|
<commit_before>import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
<commit_msg>Add some logging to generic error handler.<commit_after>
|
import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
import logging
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
"""
Prevents Exceptions flying all around the place.
"""
logger = logging.getLogger(__name__)
logger.debug("Exception was caught by the generic exception handler, {0} - {1}".format(type(error), error))
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
|
import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
Add some logging to generic error handler.import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
import logging
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
"""
Prevents Exceptions flying all around the place.
"""
logger = logging.getLogger(__name__)
logger.debug("Exception was caught by the generic exception handler, {0} - {1}".format(type(error), error))
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
|
<commit_before>import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
<commit_msg>Add some logging to generic error handler.<commit_after>import flask
from pyshelf.routes.artifact import artifact
import pyshelf.response_map as response_map
import logging
app = flask.Flask(__name__)
app.register_blueprint(artifact)
@app.errorhandler(Exception)
def generic_exception_handler(error):
"""
Prevents Exceptions flying all around the place.
"""
logger = logging.getLogger(__name__)
logger.debug("Exception was caught by the generic exception handler, {0} - {1}".format(type(error), error))
if not error.message:
error.message = "Internal Server Error"
return response_map.create_500(msg=error.message)
@app.after_request
def format_response(response):
if response.status_code == 404:
response = response_map.create_404()
data = response.get_data()
data += "\n"
response.set_data(data)
return response
|
e503ef58e801cfbc3ba72ba84bc2150c79a401d3
|
girder/molecules/molecules/models/geometry.py
|
girder/molecules/molecules/models/geometry.py
|
from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
|
from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson,
'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
|
Save creatorId as well for geometries
|
Save creatorId as well for geometries
This is to keep track of the creator, even when the provenance
is not the user.
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com>
|
Python
|
bsd-3-clause
|
OpenChemistry/mongochemserver
|
from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
Save creatorId as well for geometries
This is to keep track of the creator, even when the provenance
is not the user.
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com>
|
from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson,
'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
|
<commit_before>from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
<commit_msg>Save creatorId as well for geometries
This is to keep track of the creator, even when the provenance
is not the user.
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com><commit_after>
|
from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson,
'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
|
from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
Save creatorId as well for geometries
This is to keep track of the creator, even when the provenance
is not the user.
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com>from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson,
'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
|
<commit_before>from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
<commit_msg>Save creatorId as well for geometries
This is to keep track of the creator, even when the provenance
is not the user.
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com><commit_after>from bson.objectid import ObjectId
from girder.models.model_base import AccessControlledModel
from girder.constants import AccessType
from .molecule import Molecule as MoleculeModel
class Geometry(AccessControlledModel):
def __init__(self):
super(Geometry, self).__init__()
def initialize(self):
self.name = 'geometry'
self.ensureIndex('moleculeId')
self.exposeFields(level=AccessType.READ, fields=(
'_id', 'moleculeId', 'cjson', 'provenanceType', 'provenanceId'))
def validate(self, doc):
# If we have a moleculeId ensure it is valid.
if 'moleculeId' in doc:
mol = MoleculeModel().load(doc['moleculeId'], force=True)
doc['moleculeId'] = mol['_id']
return doc
def create(self, user, moleculeId, cjson, provenanceType=None,
provenanceId=None, public=False):
geometry = {
'moleculeId': moleculeId,
'cjson': cjson,
'creatorId': user['_id']
}
if provenanceType is not None:
geometry['provenanceType'] = provenanceType
if provenanceId is not None:
geometry['provenanceId'] = provenanceId
self.setUserAccess(geometry, user=user, level=AccessType.ADMIN)
if public:
self.setPublic(geometry, True)
return self.save(geometry)
def find_geometries(self, moleculeId, user=None):
query = {
'moleculeId': ObjectId(moleculeId)
}
return self.findWithPermissions(query, user=user)
|
3429293244359b5635b7d060caf527a36850f3a2
|
orchestrator/__init__.py
|
orchestrator/__init__.py
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.5'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.6'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
Prepare for next dev version to incorporate encofing fixes in flask-hyperschema library
|
Prepare for next dev version to incorporate encofing fixes in flask-hyperschema library
|
Python
|
mit
|
totem/cluster-orchestrator,totem/cluster-orchestrator,totem/cluster-orchestrator
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.5'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
Prepare for next dev version to incorporate encofing fixes in flask-hyperschema library
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.6'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
<commit_before>from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.5'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
<commit_msg>Prepare for next dev version to incorporate encofing fixes in flask-hyperschema library<commit_after>
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.6'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.5'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
Prepare for next dev version to incorporate encofing fixes in flask-hyperschema libraryfrom __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.6'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
<commit_before>from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.5'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
<commit_msg>Prepare for next dev version to incorporate encofing fixes in flask-hyperschema library<commit_after>from __future__ import absolute_import
from celery.signals import setup_logging
import orchestrator.logger
__version__ = '0.3.6'
__author__ = 'sukrit'
orchestrator.logger.init_logging()
setup_logging.connect(orchestrator.logger.init_celery_logging)
|
5006ba3124cd80a4529b9ed645aa8981d06a9886
|
publishconf.py
|
publishconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
#!/usr/bin/env python3
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
Stop generate feeds when publishing
|
Stop generate feeds when publishing
|
Python
|
mit
|
andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org,andrewheiss/scorecarddiplomacy-org
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
Stop generate feeds when publishing
|
#!/usr/bin/env python3
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
<commit_msg>Stop generate feeds when publishing<commit_after>
|
#!/usr/bin/env python3
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
Stop generate feeds when publishing#!/usr/bin/env python3
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
<commit_msg>Stop generate feeds when publishing<commit_after>#!/usr/bin/env python3
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = ''
RELATIVE_URLS = False
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
ddd3373ce078cf9bf40da7ebd8591995e819b750
|
phell/utils.py
|
phell/utils.py
|
# -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
# vim: set ts=4 sw=4 tw=80:
|
# -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
def swap_bytes(value):
if sys.version_info.major < 3:
return "".join([bytes(b) for b in reversed(value)])
return bytes(reversed(value))
# vim: set ts=4 sw=4 tw=80:
|
Add function to swap byte order
|
Add function to swap byte order
|
Python
|
mit
|
bjoernricks/phell
|
# -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
# vim: set ts=4 sw=4 tw=80:
Add function to swap byte order
|
# -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
def swap_bytes(value):
if sys.version_info.major < 3:
return "".join([bytes(b) for b in reversed(value)])
return bytes(reversed(value))
# vim: set ts=4 sw=4 tw=80:
|
<commit_before># -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
# vim: set ts=4 sw=4 tw=80:
<commit_msg>Add function to swap byte order<commit_after>
|
# -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
def swap_bytes(value):
if sys.version_info.major < 3:
return "".join([bytes(b) for b in reversed(value)])
return bytes(reversed(value))
# vim: set ts=4 sw=4 tw=80:
|
# -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
# vim: set ts=4 sw=4 tw=80:
Add function to swap byte order# -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
def swap_bytes(value):
if sys.version_info.major < 3:
return "".join([bytes(b) for b in reversed(value)])
return bytes(reversed(value))
# vim: set ts=4 sw=4 tw=80:
|
<commit_before># -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
# vim: set ts=4 sw=4 tw=80:
<commit_msg>Add function to swap byte order<commit_after># -*- coding: utf-8 -*-
#
# (c) 2016 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'phell' for details.
#
import sys
def to_hex(value):
if sys.version_info.major < 3:
return value.encode('hex')
return "".join("%02x" % b for b in value)
def from_hex(value):
if sys.version_info.major < 3:
return value.decode('hex')
return bytes.fromhex(value)
def swap_bytes(value):
if sys.version_info.major < 3:
return "".join([bytes(b) for b in reversed(value)])
return bytes(reversed(value))
# vim: set ts=4 sw=4 tw=80:
|
c8ffd1fc4c4e06cd71e86d1d48749a3fe527a54e
|
biosys/apps/main/tests/api/test_serializers.py
|
biosys/apps/main/tests/api/test_serializers.py
|
from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
|
from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
|
Fix test to accommodate change of error message.
|
Fix test to accommodate change of error message.
|
Python
|
apache-2.0
|
gaiaresources/biosys,parksandwildlife/biosys,gaiaresources/biosys,serge-gaia/biosys,ropable/biosys,parksandwildlife/biosys,serge-gaia/biosys,ropable/biosys,gaiaresources/biosys,ropable/biosys,serge-gaia/biosys,parksandwildlife/biosys
|
from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
Fix test to accommodate change of error message.
|
from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
|
<commit_before>from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
<commit_msg>Fix test to accommodate change of error message.<commit_after>
|
from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
|
from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
Fix test to accommodate change of error message.from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
|
<commit_before>from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('project, name must make a unique set', errors.get('non_field_errors')[0])
<commit_msg>Fix test to accommodate change of error message.<commit_after>from django.test import TestCase
from main.api.serializers import DatasetSerializer
from main.tests.api import helpers
class TestDatsetSerializer(helpers.BaseUserTestCase):
def test_name_uniqueness(self):
"""
Test that the serializer report an error if the dataset name is not unique within a project
"""
# create a dataset
dataset = self._create_dataset_from_rows([
['What', 'Comment'],
['what', 'comments']
])
dataset.name = 'Test'
dataset.save()
# Trye serializer with a dataset with the same name
data = {
'name': dataset.name,
'project': dataset.project.pk,
'data_package': dataset.data_package,
'type': 'generic'
}
ser = DatasetSerializer(data=data)
self.assertFalse(ser.is_valid(()))
# the errors should be of the form
# {'non_field_errors': ['The fields project, name must make a unique set.']}
errors = ser.errors
self.assertEquals(['non_field_errors'], list(errors.keys()))
self.assertEquals(1, len(errors.get('non_field_errors')))
self.assertIn('A dataset with this name already exists in the project.', errors.get('non_field_errors')[0])
|
27c9da3129c6fbdd8d54276cf054c1f46e665aaf
|
flask_app.py
|
flask_app.py
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
import slack
app = Flask(__name__)
cache = Cache(app, config={"CACHE_TYPE": "simple"})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api/")
@cache.cached(timeout=10800)
def list_entities():
return jsonify({"entities": ["restaurant"]})
@app.route("/api/restaurant/")
@cache.cached(timeout=10800)
def list_restaurants():
return jsonify({"restaurants": main.list_restaurants()})
@app.route("/api/restaurant/<name>/")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return jsonify({"restaurant": data})
|
import flask
import flask_caching
import flask_cors
import main
import slack
app = flask.Flask(__name__)
cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
return flask.jsonify({"entities": ["restaurant"],
"url": flask.url_for("list_entities", _external=True)})
@app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
return flask.jsonify({"restaurants": main.list_restaurants(),
"url": flask.url_for("list_restaurants", _external=True)})
@app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return flask.jsonify({"restaurant": data,
"url": flask.url_for("get_restaurant", name=name, _external=True)})
|
Remove trailing slashes, add origin url to responses
|
Remove trailing slashes, add origin url to responses
|
Python
|
bsd-3-clause
|
talavis/kimenu
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
import slack
app = Flask(__name__)
cache = Cache(app, config={"CACHE_TYPE": "simple"})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api/")
@cache.cached(timeout=10800)
def list_entities():
return jsonify({"entities": ["restaurant"]})
@app.route("/api/restaurant/")
@cache.cached(timeout=10800)
def list_restaurants():
return jsonify({"restaurants": main.list_restaurants()})
@app.route("/api/restaurant/<name>/")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return jsonify({"restaurant": data})
Remove trailing slashes, add origin url to responses
|
import flask
import flask_caching
import flask_cors
import main
import slack
app = flask.Flask(__name__)
cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
return flask.jsonify({"entities": ["restaurant"],
"url": flask.url_for("list_entities", _external=True)})
@app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
return flask.jsonify({"restaurants": main.list_restaurants(),
"url": flask.url_for("list_restaurants", _external=True)})
@app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return flask.jsonify({"restaurant": data,
"url": flask.url_for("get_restaurant", name=name, _external=True)})
|
<commit_before>from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
import slack
app = Flask(__name__)
cache = Cache(app, config={"CACHE_TYPE": "simple"})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api/")
@cache.cached(timeout=10800)
def list_entities():
return jsonify({"entities": ["restaurant"]})
@app.route("/api/restaurant/")
@cache.cached(timeout=10800)
def list_restaurants():
return jsonify({"restaurants": main.list_restaurants()})
@app.route("/api/restaurant/<name>/")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return jsonify({"restaurant": data})
<commit_msg>Remove trailing slashes, add origin url to responses<commit_after>
|
import flask
import flask_caching
import flask_cors
import main
import slack
app = flask.Flask(__name__)
cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
return flask.jsonify({"entities": ["restaurant"],
"url": flask.url_for("list_entities", _external=True)})
@app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
return flask.jsonify({"restaurants": main.list_restaurants(),
"url": flask.url_for("list_restaurants", _external=True)})
@app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return flask.jsonify({"restaurant": data,
"url": flask.url_for("get_restaurant", name=name, _external=True)})
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
import slack
app = Flask(__name__)
cache = Cache(app, config={"CACHE_TYPE": "simple"})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api/")
@cache.cached(timeout=10800)
def list_entities():
return jsonify({"entities": ["restaurant"]})
@app.route("/api/restaurant/")
@cache.cached(timeout=10800)
def list_restaurants():
return jsonify({"restaurants": main.list_restaurants()})
@app.route("/api/restaurant/<name>/")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return jsonify({"restaurant": data})
Remove trailing slashes, add origin url to responsesimport flask
import flask_caching
import flask_cors
import main
import slack
app = flask.Flask(__name__)
cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
return flask.jsonify({"entities": ["restaurant"],
"url": flask.url_for("list_entities", _external=True)})
@app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
return flask.jsonify({"restaurants": main.list_restaurants(),
"url": flask.url_for("list_restaurants", _external=True)})
@app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return flask.jsonify({"restaurant": data,
"url": flask.url_for("get_restaurant", name=name, _external=True)})
|
<commit_before>from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
import slack
app = Flask(__name__)
cache = Cache(app, config={"CACHE_TYPE": "simple"})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api/")
@cache.cached(timeout=10800)
def list_entities():
return jsonify({"entities": ["restaurant"]})
@app.route("/api/restaurant/")
@cache.cached(timeout=10800)
def list_restaurants():
return jsonify({"restaurants": main.list_restaurants()})
@app.route("/api/restaurant/<name>/")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return jsonify({"restaurant": data})
<commit_msg>Remove trailing slashes, add origin url to responses<commit_after>import flask
import flask_caching
import flask_cors
import main
import slack
app = flask.Flask(__name__)
cache = flask_caching.Cache(app, config={"CACHE_TYPE": "simple"})
cors = flask_cors.CORS(app, resources={r"/*": {"origins": "*"}})
app.register_blueprint(slack.blueprint, url_prefix="/api/slack")
@app.route("/api")
@cache.cached(timeout=10800)
def list_entities():
return flask.jsonify({"entities": ["restaurant"],
"url": flask.url_for("list_entities", _external=True)})
@app.route("/api/restaurant")
@cache.cached(timeout=10800)
def list_restaurants():
return flask.jsonify({"restaurants": main.list_restaurants(),
"url": flask.url_for("list_restaurants", _external=True)})
@app.route("/api/restaurant/<name>")
@cache.cached(timeout=10800)
def get_restaurant(name):
data = dict(main.get_restaurant(name))
if not data:
abort(status=404)
data["menu"] = [{"dish": entry} for entry in data["menu"]]
return flask.jsonify({"restaurant": data,
"url": flask.url_for("get_restaurant", name=name, _external=True)})
|
e2f83a6a5d43ebc52d03d4059a7526a579a425c1
|
darkoob/social/models.py
|
darkoob/social/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
Set User Profile Unicode Function
|
Set User Profile Unicode Function
|
Python
|
mit
|
s1na/darkoob,s1na/darkoob,s1na/darkoob
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)Set User Profile Unicode Function
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)<commit_msg>Set User Profile Unicode Function<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)Set User Profile Unicode Functionfrom django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)<commit_msg>Set User Profile Unicode Function<commit_after>from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
SEX_CHOICES = (
('Male', 'Male'),
('Female', 'Female'),
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
sex = models.CharField(max_length = 6, choices = SEX_CHOICES)
birthday = models.DateField(null = True)
def __unicode__(self):
return self.user.get_full_name()
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user = instance)
post_save.connect(create_user_profile, sender = User)
|
74ca49c62ba63b7eb42f3825ea5c036e32b98d50
|
busstops/management/commands/import_tfl_stops.py
|
busstops/management/commands/import_tfl_stops.py
|
"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code)
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
|
"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code) or stop.common_name
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
|
Work around null TfL common names
|
Work around null TfL common names
|
Python
|
mpl-2.0
|
jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk
|
"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code)
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
Work around null TfL common names
|
"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code) or stop.common_name
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
|
<commit_before>"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code)
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
<commit_msg>Work around null TfL common names<commit_after>
|
"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code) or stop.common_name
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
|
"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code)
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
Work around null TfL common names"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code) or stop.common_name
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
|
<commit_before>"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code)
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
<commit_msg>Work around null TfL common names<commit_after>"""
Usage:
./manage.py import_tfl_stops < data/tfl/bus-stops.csv
"""
import requests
from titlecase import titlecase
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from busstops.management.import_from_csv import ImportFromCSVCommand
from busstops.models import StopPoint
class Command(ImportFromCSVCommand):
@staticmethod
def get_name(atco_code):
"""
Given a stop's ATCO code, returns the best-formatted version of its common name from the
TfL API
"""
data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json()
return data.get('commonName')
def handle_row(self, row):
if row['Naptan_Atco'] in (None, '', 'NONE'):
return None
try:
stop = StopPoint.objects.get(pk=row['Naptan_Atco'])
except ObjectDoesNotExist:
try:
stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco'])
except (ObjectDoesNotExist, MultipleObjectsReturned) as e:
print e, row
return None
if row['Heading'] != '':
stop.heading = row['Heading']
stop.common_name = self.get_name(stop.atco_code) or stop.common_name
stop.tfl = True
if stop.street.isupper():
stop.street = titlecase(stop.street)
if stop.landmark.isupper():
stop.landmark = titlecase(stop.landmark)
stop.save()
|
9a8fd944fb78d582f06d7165f097c1e54cb870dc
|
project/asylum/mixins.py
|
project/asylum/mixins.py
|
from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
|
from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
"""Makes sure saves and deletes go via transactions and version control
even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
class CleanSaveMixin(object):
"""Makes sure clean() is checked before object is saved"""
def save(self, *args, **kwargs):
if not kwargs.pop('skip_clean', False):
self.full_clean()
return super().save(*args, **kwargs)
|
Add a mixin for calling full_clean() on save()
|
Add a mixin for calling full_clean() on save()
|
Python
|
mit
|
ojousima/asylum,rambo/asylum,HelsinkiHacklab/asylum,ojousima/asylum,hacklab-fi/asylum,hacklab-fi/asylum,jautero/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,rambo/asylum,jautero/asylum,rambo/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,ojousima/asylum,rambo/asylum,ojousima/asylum
|
from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
Add a mixin for calling full_clean() on save()
|
from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
"""Makes sure saves and deletes go via transactions and version control
even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
class CleanSaveMixin(object):
"""Makes sure clean() is checked before object is saved"""
def save(self, *args, **kwargs):
if not kwargs.pop('skip_clean', False):
self.full_clean()
return super().save(*args, **kwargs)
|
<commit_before>from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
<commit_msg>Add a mixin for calling full_clean() on save()<commit_after>
|
from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
"""Makes sure saves and deletes go via transactions and version control
even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
class CleanSaveMixin(object):
"""Makes sure clean() is checked before object is saved"""
def save(self, *args, **kwargs):
if not kwargs.pop('skip_clean', False):
self.full_clean()
return super().save(*args, **kwargs)
|
from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
Add a mixin for calling full_clean() on save()from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
"""Makes sure saves and deletes go via transactions and version control
even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
class CleanSaveMixin(object):
"""Makes sure clean() is checked before object is saved"""
def save(self, *args, **kwargs):
if not kwargs.pop('skip_clean', False):
self.full_clean()
return super().save(*args, **kwargs)
|
<commit_before>from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
<commit_msg>Add a mixin for calling full_clean() on save()<commit_after>from reversion import revisions
from django.db import transaction
# Monkeypatch the revisions
try:
revisions.create_revision
except AttributeError:
revisions.create_revision = revisions.revision_context_manager.create_revision
class AtomicVersionMixin(object):
"""Makes sure saves and deletes go via transactions and version control
even when objects are modified outside Django Admin"""
def save(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
with transaction.atomic(), revisions.create_revision():
return super().delete(*args, **kwargs)
class CleanSaveMixin(object):
"""Makes sure clean() is checked before object is saved"""
def save(self, *args, **kwargs):
if not kwargs.pop('skip_clean', False):
self.full_clean()
return super().save(*args, **kwargs)
|
39f7530f9082062abdd530255ab34d642097b605
|
Python/Product/PythonTools/ptvsd/setup.py
|
Python/Product/PythonTools/ptvsd/setup.py
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0rc2',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
Update ptvsd version number for 2.2 release.
|
Update ptvsd version number for 2.2 release.
|
Python
|
apache-2.0
|
bolabola/PTVS,christer155/PTVS,gomiero/PTVS,DEVSENSE/PTVS,mlorbetske/PTVS,ChinaQuants/PTVS,christer155/PTVS,dut3062796s/PTVS,Microsoft/PTVS,huguesv/PTVS,msunardi/PTVS,fivejjs/PTVS,alanch-ms/PTVS,Habatchii/PTVS,Microsoft/PTVS,denfromufa/PTVS,fjxhkj/PTVS,crwilcox/PTVS,MetSystem/PTVS,bolabola/PTVS,denfromufa/PTVS,modulexcite/PTVS,denfromufa/PTVS,alanch-ms/PTVS,xNUTs/PTVS,DinoV/PTVS,Habatchii/PTVS,huguesv/PTVS,msunardi/PTVS,MetSystem/PTVS,MetSystem/PTVS,DEVSENSE/PTVS,alanch-ms/PTVS,zooba/PTVS,denfromufa/PTVS,crwilcox/PTVS,crwilcox/PTVS,int19h/PTVS,christer155/PTVS,jkorell/PTVS,dut3062796s/PTVS,Microsoft/PTVS,DEVSENSE/PTVS,modulexcite/PTVS,Habatchii/PTVS,xNUTs/PTVS,alanch-ms/PTVS,int19h/PTVS,DinoV/PTVS,Habatchii/PTVS,zooba/PTVS,MetSystem/PTVS,dut3062796s/PTVS,bolabola/PTVS,ChinaQuants/PTVS,alanch-ms/PTVS,xNUTs/PTVS,DinoV/PTVS,christer155/PTVS,bolabola/PTVS,modulexcite/PTVS,zooba/PTVS,DEVSENSE/PTVS,jkorell/PTVS,mlorbetske/PTVS,huguesv/PTVS,ChinaQuants/PTVS,DinoV/PTVS,MetSystem/PTVS,DinoV/PTVS,mlorbetske/PTVS,mlorbetske/PTVS,jkorell/PTVS,huguesv/PTVS,gomiero/PTVS,Habatchii/PTVS,fjxhkj/PTVS,crwilcox/PTVS,DEVSENSE/PTVS,ChinaQuants/PTVS,xNUTs/PTVS,xNUTs/PTVS,mlorbetske/PTVS,fjxhkj/PTVS,msunardi/PTVS,modulexcite/PTVS,DinoV/PTVS,int19h/PTVS,fivejjs/PTVS,int19h/PTVS,zooba/PTVS,dut3062796s/PTVS,msunardi/PTVS,xNUTs/PTVS,fjxhkj/PTVS,gomiero/PTVS,msunardi/PTVS,Microsoft/PTVS,fjxhkj/PTVS,gomiero/PTVS,christer155/PTVS,fivejjs/PTVS,DEVSENSE/PTVS,mlorbetske/PTVS,fjxhkj/PTVS,MetSystem/PTVS,crwilcox/PTVS,ChinaQuants/PTVS,gomiero/PTVS,Microsoft/PTVS,jkorell/PTVS,denfromufa/PTVS,alanch-ms/PTVS,jkorell/PTVS,modulexcite/PTVS,Habatchii/PTVS,zooba/PTVS,jkorell/PTVS,Microsoft/PTVS,fivejjs/PTVS,bolabola/PTVS,fivejjs/PTVS,modulexcite/PTVS,christer155/PTVS,fivejjs/PTVS,dut3062796s/PTVS,crwilcox/PTVS,gomiero/PTVS,int19h/PTVS,dut3062796s/PTVS,ChinaQuants/PTVS,msunardi/PTVS,denfromufa/PTVS,int19h/PTVS,bolabola/PTVS,zooba/PTVS,huguesv/PTVS,huguesv/PTVS
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0rc2',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
Update ptvsd version number for 2.2 release.
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
<commit_before>#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0rc2',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
<commit_msg>Update ptvsd version number for 2.2 release.<commit_after>
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0rc2',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
Update ptvsd version number for 2.2 release.#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
<commit_before>#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0rc2',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
<commit_msg>Update ptvsd version number for 2.2 release.<commit_after>#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.2.0',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://aka.ms/ptvs',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
|
6794bb897e7e8730b1c3ab2fc6b856865887ac8b
|
scripts/upsrv_schema.py
|
scripts/upsrv_schema.py
|
#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
|
#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
|
Use a simpler trace logger that does not prepend timestamps
|
Use a simpler trace logger that does not prepend timestamps
|
Python
|
apache-2.0
|
sassoftware/rbm,sassoftware/rbm,sassoftware/rbm
|
#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
Use a simpler trace logger that does not prepend timestamps
|
#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
<commit_msg>Use a simpler trace logger that does not prepend timestamps<commit_after>
|
#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
|
#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
Use a simpler trace logger that does not prepend timestamps#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
<commit_msg>Use a simpler trace logger that does not prepend timestamps<commit_after>#!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
|
31e4da5e782c29d7d0c893a3fc9af48260c50a3a
|
src/ansible/views.py
|
src/ansible/views.py
|
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def done(self, form_list, **kwargs):
form_data = {}
for form in form_list:
form.save()
return HttpResponseRedirect('/ansible')
|
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github, Playbook
import sys
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def get_form_step_data(self, form):
data = {}
if self.get_form_prefix() == '0':
github = Github()
github.repository = form.data.dict()['0-repository']
github.username = form.data.dict()['0-username']
github.save()
if self.get_form_prefix() == '1':
playbook = Playbook()
playbook.name = form.data.dict()['1-name']
playbook.inventory = form.data.dict()['1-inventory']
playbook.user = form.data.dict()['1-user']
playbook.save()
return form.data
def done(self, form_list, **kwargs):
return HttpResponseRedirect('/ansible')
|
Save form data to DB on each step
|
Save form data to DB on each step
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def done(self, form_list, **kwargs):
form_data = {}
for form in form_list:
form.save()
return HttpResponseRedirect('/ansible')
Save form data to DB on each step
|
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github, Playbook
import sys
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def get_form_step_data(self, form):
data = {}
if self.get_form_prefix() == '0':
github = Github()
github.repository = form.data.dict()['0-repository']
github.username = form.data.dict()['0-username']
github.save()
if self.get_form_prefix() == '1':
playbook = Playbook()
playbook.name = form.data.dict()['1-name']
playbook.inventory = form.data.dict()['1-inventory']
playbook.user = form.data.dict()['1-user']
playbook.save()
return form.data
def done(self, form_list, **kwargs):
return HttpResponseRedirect('/ansible')
|
<commit_before>from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def done(self, form_list, **kwargs):
form_data = {}
for form in form_list:
form.save()
return HttpResponseRedirect('/ansible')
<commit_msg>Save form data to DB on each step<commit_after>
|
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github, Playbook
import sys
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def get_form_step_data(self, form):
data = {}
if self.get_form_prefix() == '0':
github = Github()
github.repository = form.data.dict()['0-repository']
github.username = form.data.dict()['0-username']
github.save()
if self.get_form_prefix() == '1':
playbook = Playbook()
playbook.name = form.data.dict()['1-name']
playbook.inventory = form.data.dict()['1-inventory']
playbook.user = form.data.dict()['1-user']
playbook.save()
return form.data
def done(self, form_list, **kwargs):
return HttpResponseRedirect('/ansible')
|
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def done(self, form_list, **kwargs):
form_data = {}
for form in form_list:
form.save()
return HttpResponseRedirect('/ansible')
Save form data to DB on each stepfrom django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github, Playbook
import sys
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def get_form_step_data(self, form):
data = {}
if self.get_form_prefix() == '0':
github = Github()
github.repository = form.data.dict()['0-repository']
github.username = form.data.dict()['0-username']
github.save()
if self.get_form_prefix() == '1':
playbook = Playbook()
playbook.name = form.data.dict()['1-name']
playbook.inventory = form.data.dict()['1-inventory']
playbook.user = form.data.dict()['1-user']
playbook.save()
return form.data
def done(self, form_list, **kwargs):
return HttpResponseRedirect('/ansible')
|
<commit_before>from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def done(self, form_list, **kwargs):
form_data = {}
for form in form_list:
form.save()
return HttpResponseRedirect('/ansible')
<commit_msg>Save form data to DB on each step<commit_after>from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect, HttpResponse
from formtools.wizard.views import SessionWizardView
from ansible.models import Github, Playbook
import sys
def index(request):
return HttpResponse("200")
class PlaybookWizard(SessionWizardView):
def get_form_initial(self, step):
initial = {}
if step == '1':
prev_data = self.storage.get_step_data('0')
initial['name'] = prev_data['0-repository']
return self.initial_dict.get(step, initial)
return self.initial_dict.get(step, {})
def get_form_step_data(self, form):
data = {}
if self.get_form_prefix() == '0':
github = Github()
github.repository = form.data.dict()['0-repository']
github.username = form.data.dict()['0-username']
github.save()
if self.get_form_prefix() == '1':
playbook = Playbook()
playbook.name = form.data.dict()['1-name']
playbook.inventory = form.data.dict()['1-inventory']
playbook.user = form.data.dict()['1-user']
playbook.save()
return form.data
def done(self, form_list, **kwargs):
return HttpResponseRedirect('/ansible')
|
c99db48154ed327aead8a56211ac5125529f8ea0
|
count-inversions/count_inversions.py
|
count-inversions/count_inversions.py
|
from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)
|
from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)
|
Change merge_and_count to accept 2 array arguments
|
Change merge_and_count to accept 2 array arguments
|
Python
|
mit
|
timpel/stanford-algs,timpel/stanford-algs
|
from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)Change merge_and_count to accept 2 array arguments
|
from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)
|
<commit_before>from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)<commit_msg>Change merge_and_count to accept 2 array arguments<commit_after>
|
from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)
|
from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)Change merge_and_count to accept 2 array argumentsfrom random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)
|
<commit_before>from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)<commit_msg>Change merge_and_count to accept 2 array arguments<commit_after>from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return 0
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
return 0
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)
|
aa026fb39bd4a053766988383e9374dba20fd7f5
|
scripts/init_tree.py
|
scripts/init_tree.py
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/makefile.deps', 'deps/makefile')
shutil.copyfile('src/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
Use symlinks to avoid weird behavior from removing the CWD while we're in it
|
Use symlinks to avoid weird behavior from removing the CWD while we're in it
|
Python
|
bsd-3-clause
|
lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/makefile.deps', 'deps/makefile')
shutil.copyfile('src/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
Use symlinks to avoid weird behavior from removing the CWD while we're in it
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
<commit_before>import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/makefile.deps', 'deps/makefile')
shutil.copyfile('src/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
<commit_msg>Use symlinks to avoid weird behavior from removing the CWD while we're in it<commit_after>
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/makefile.deps', 'deps/makefile')
shutil.copyfile('src/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
Use symlinks to avoid weird behavior from removing the CWD while we're in itimport os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
<commit_before>import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/makefile.deps', 'deps/makefile')
shutil.copyfile('src/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
<commit_msg>Use symlinks to avoid weird behavior from removing the CWD while we're in it<commit_after>import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
cd48c66406c39ca6dd6bdc6ba7c2be0df623e6ae
|
src/leap/mx/check_recipient_access.py
|
src/leap/mx/check_recipient_access.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
if value is None:
self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
else:
# We do not send the value in this case
self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
# For more info, see:
# http://www.postfix.org/tcp_table.5.html
# http://www.postfix.org/access.5.html
if value is None:
self.sendCode(500, postfix.quote("REJECT"))
else:
self.sendCode(200, postfix.quote("OK"))
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
|
Fix return codes for check recipient
|
Fix return codes for check recipient
|
Python
|
agpl-3.0
|
meskio/leap_mx,meskio/leap_mx,leapcode/leap_mx,micah/leap_mx,leapcode/leap_mx,micah/leap_mx
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
if value is None:
self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
else:
# We do not send the value in this case
self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
Fix return codes for check recipient
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
# For more info, see:
# http://www.postfix.org/tcp_table.5.html
# http://www.postfix.org/access.5.html
if value is None:
self.sendCode(500, postfix.quote("REJECT"))
else:
self.sendCode(200, postfix.quote("OK"))
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
if value is None:
self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
else:
# We do not send the value in this case
self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
<commit_msg>Fix return codes for check recipient<commit_after>
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
# For more info, see:
# http://www.postfix.org/tcp_table.5.html
# http://www.postfix.org/access.5.html
if value is None:
self.sendCode(500, postfix.quote("REJECT"))
else:
self.sendCode(200, postfix.quote("OK"))
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
if value is None:
self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
else:
# We do not send the value in this case
self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
Fix return codes for check recipient#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
# For more info, see:
# http://www.postfix.org/tcp_table.5.html
# http://www.postfix.org/access.5.html
if value is None:
self.sendCode(500, postfix.quote("REJECT"))
else:
self.sendCode(200, postfix.quote("OK"))
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
if value is None:
self.sendCode(500, postfix.quote("NOT FOUND SORRY"))
else:
# We do not send the value in this case
self.sendCode(200)
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
<commit_msg>Fix return codes for check recipient<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# check_recipient_access.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Classes for resolving postfix recipient access
Test this with postmap -v -q "foo" tcp:localhost:2244
"""
from twisted.protocols import postfix
from leap.mx.alias_resolver import AliasResolverFactory
class LEAPPostFixTCPMapserverAccess(postfix.PostfixTCPMapServer):
def _cbGot(self, value):
# For more info, see:
# http://www.postfix.org/tcp_table.5.html
# http://www.postfix.org/access.5.html
if value is None:
self.sendCode(500, postfix.quote("REJECT"))
else:
self.sendCode(200, postfix.quote("OK"))
class CheckRecipientAccessFactory(AliasResolverFactory):
protocol = LEAPPostFixTCPMapserverAccess
|
aefd972c7fb423396f59da03a1d460cd3559d1e1
|
duplicate_questions/data/tokenizers/word_tokenizers.py
|
duplicate_questions/data/tokenizers/word_tokenizers.py
|
class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
# def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
# def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
# def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
|
class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
def get_words_for_indexer(self, text):
return self.tokenize(text)
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
|
Remove unnecesssary comments of old function signatures
|
Remove unnecesssary comments of old function signatures
|
Python
|
mit
|
nelson-liu/paraphrase-id-tensorflow,nelson-liu/paraphrase-id-tensorflow
|
class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
# def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
# def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
# def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
Remove unnecesssary comments of old function signatures
|
class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
def get_words_for_indexer(self, text):
return self.tokenize(text)
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
|
<commit_before>class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
# def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
# def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
# def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
<commit_msg>Remove unnecesssary comments of old function signatures<commit_after>
|
class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
def get_words_for_indexer(self, text):
return self.tokenize(text)
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
|
class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
# def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
# def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
# def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
Remove unnecesssary comments of old function signaturesclass SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
def get_words_for_indexer(self, text):
return self.tokenize(text)
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
|
<commit_before>class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
# def tokenize(self, sentence: str) -> List[str]:
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
# def get_words_for_indexer(self, text: str) -> List[str]:
def get_words_for_indexer(self, text):
return self.tokenize(text)
# def index_text(self, text: str, data_indexer: DataIndexer) -> List:
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
<commit_msg>Remove unnecesssary comments of old function signatures<commit_after>class SpacyWordTokenizer():
"""
A Tokenizer splits strings into word tokens.
"""
def __init__(self):
# Import is here it's slow, and can be unnecessary.
import spacy
self.en_nlp = spacy.load('en')
def tokenize(self, sentence):
return [str(token.lower_) for token in self.en_nlp.tokenizer(sentence)]
def get_words_for_indexer(self, text):
return self.tokenize(text)
def index_text(self, text, data_indexer):
return [data_indexer.get_word_index(word) for word in
self.get_words_for_indexer(text)]
|
d9c677a35d18a878ef8d253a9453e93da3341e96
|
runTwircBot.py
|
runTwircBot.py
|
#!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
|
#!/usr/bin/env python3
from src.TwircBot import TwircBot
from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
module = CommandModule()
bot.print_config()
# bot.start()
|
Add extremely basic template for command modules
|
Add extremely basic template for command modules
|
Python
|
mit
|
johnmarcampbell/twircBot
|
#!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
Add extremely basic template for command modules
|
#!/usr/bin/env python3
from src.TwircBot import TwircBot
from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
module = CommandModule()
bot.print_config()
# bot.start()
|
<commit_before>#!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
<commit_msg>Add extremely basic template for command modules<commit_after>
|
#!/usr/bin/env python3
from src.TwircBot import TwircBot
from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
module = CommandModule()
bot.print_config()
# bot.start()
|
#!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
Add extremely basic template for command modules#!/usr/bin/env python3
from src.TwircBot import TwircBot
from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
module = CommandModule()
bot.print_config()
# bot.start()
|
<commit_before>#!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
<commit_msg>Add extremely basic template for command modules<commit_after>#!/usr/bin/env python3
from src.TwircBot import TwircBot
from src.CommandModule import CommandModule
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
module = CommandModule()
bot.print_config()
# bot.start()
|
16b3e30a88e9101db58c0549e515848df29f29b9
|
raygun4py-sample/test.py
|
raygun4py-sample/test.py
|
import sys, os
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
raise Exception("My exception")
sys.excepthook = handle_exception
methodone()
|
import sys, os, urllib2
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
urllib2.urlopen("gopher://nonsense.org").read()
sys.excepthook = handle_exception
methodone()
|
Set up sample project to throw web exceptions (for request oject)
|
Set up sample project to throw web exceptions (for request oject)
|
Python
|
mit
|
Osmose/raygun4py,MindscapeHQ/raygun4py,ferringb/raygun4py
|
import sys, os
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
raise Exception("My exception")
sys.excepthook = handle_exception
methodone()
Set up sample project to throw web exceptions (for request oject)
|
import sys, os, urllib2
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
urllib2.urlopen("gopher://nonsense.org").read()
sys.excepthook = handle_exception
methodone()
|
<commit_before>import sys, os
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
raise Exception("My exception")
sys.excepthook = handle_exception
methodone()
<commit_msg>Set up sample project to throw web exceptions (for request oject)<commit_after>
|
import sys, os, urllib2
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
urllib2.urlopen("gopher://nonsense.org").read()
sys.excepthook = handle_exception
methodone()
|
import sys, os
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
raise Exception("My exception")
sys.excepthook = handle_exception
methodone()
Set up sample project to throw web exceptions (for request oject)import sys, os, urllib2
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
urllib2.urlopen("gopher://nonsense.org").read()
sys.excepthook = handle_exception
methodone()
|
<commit_before>import sys, os
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
raise Exception("My exception")
sys.excepthook = handle_exception
methodone()
<commit_msg>Set up sample project to throw web exceptions (for request oject)<commit_after>import sys, os, urllib2
import traceback
from provider import raygunprovider
def handle_exception(exc_type, exc_value, exc_traceback):
cl = raygunprovider.RaygunSender("onPbQXtZKqJX38IuN4AQKA==")
cl.set_version("1.2")
print cl.send(exc_type, exc_value, exc_traceback, "myclass", ["tag1", "tag2"], {"key1": 1111, "key2": 2222})
def methodone():
methodtwo()
def methodtwo():
urllib2.urlopen("gopher://nonsense.org").read()
sys.excepthook = handle_exception
methodone()
|
9f3356d06067dbcc77a79afee6bccf80600dab28
|
server/systeminfo.py
|
server/systeminfo.py
|
#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
|
#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
|
Add a method to get the idle time. Also data are directly readed in /proc/uptime.
|
Add a method to get the idle time. Also data are directly readed in /proc/uptime.
|
Python
|
mit
|
juliendelplanque/raspirestmonitor
|
#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
Add a method to get the idle time. Also data are directly readed in /proc/uptime.
|
#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
|
<commit_before>#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
<commit_msg>Add a method to get the idle time. Also data are directly readed in /proc/uptime.<commit_after>
|
#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
|
#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
Add a method to get the idle time. Also data are directly readed in /proc/uptime.#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
|
<commit_before>#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
<commit_msg>Add a method to get the idle time. Also data are directly readed in /proc/uptime.<commit_after>#!/bin/python3
""" This script contains functions to access various system's info.
Author: Julien Delplanque
"""
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
|
595218c33892facf0cf26e5e6b3e16b2c02e737e
|
spring/settings.py
|
spring/settings.py
|
from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
|
from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.fts_updates = 0
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
|
Add a stub for fts_updates
|
Add a stub for fts_updates
Change-Id: Ieb48f98a0072dcd27de0b50027ff6c5f3ecc1513
Reviewed-on: http://review.couchbase.org/70413
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
|
Python
|
apache-2.0
|
pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner
|
from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
Add a stub for fts_updates
Change-Id: Ieb48f98a0072dcd27de0b50027ff6c5f3ecc1513
Reviewed-on: http://review.couchbase.org/70413
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
|
from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.fts_updates = 0
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
|
<commit_before>from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
<commit_msg>Add a stub for fts_updates
Change-Id: Ieb48f98a0072dcd27de0b50027ff6c5f3ecc1513
Reviewed-on: http://review.couchbase.org/70413
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com><commit_after>
|
from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.fts_updates = 0
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
|
from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
Add a stub for fts_updates
Change-Id: Ieb48f98a0072dcd27de0b50027ff6c5f3ecc1513
Reviewed-on: http://review.couchbase.org/70413
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.fts_updates = 0
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
|
<commit_before>from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
<commit_msg>Add a stub for fts_updates
Change-Id: Ieb48f98a0072dcd27de0b50027ff6c5f3ecc1513
Reviewed-on: http://review.couchbase.org/70413
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com><commit_after>from urlparse import urlparse
from logger import logger
class WorkloadSettings(object):
def __init__(self, options):
self.creates = options.creates
self.reads = options.reads
self.updates = options.updates
self.deletes = options.deletes
self.cases = 0 # Stub for library compatibility
self.ops = options.ops
self.throughput = options.throughput
self.doc_gen = options.generator
self.size = options.size
self.items = options.items
self.expiration = options.expiration
self.working_set = options.working_set
self.working_set_access = options.working_set_access
self.async = options.async
self.workers = options.workers
# Stubs for library compatibility
self.query_workers = 0
self.subdoc_workers = 0
self.n1ql_workers = 0
self.operations = False
self.fts_config = None
self.fts_updates = 0
self.index_type = None
self.ddocs = {}
self.qparams = {}
class TargetSettings(object):
def __init__(self, target_uri, prefix):
params = urlparse(target_uri)
if not params.hostname or not params.port or not params.path:
logger.interrupt('Invalid connection URI')
self.node = '{}:{}'.format(params.hostname, params.port)
self.bucket = params.path[1:]
self.password = params.password or ''
self.prefix = prefix
|
a578e2e738a77f8ca0073b2b337a8fa79794500b
|
skeleton/settings.py
|
skeleton/settings.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'skeleton.dev'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'skeleton.dev'
API_TOKEN = 'some-api-token'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
|
Change default SERVER_NAME to localhost:5000
|
Change default SERVER_NAME to localhost:5000
This is to make it work more out of the box, without the need to edit /etc/hosts
|
Python
|
mit
|
peterhil/skeleton,peterhil/ninhursag,peterhil/skeleton,peterhil/ninhursag,peterhil/ninhursag,peterhil/ninhursag,peterhil/skeleton
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'skeleton.dev'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'skeleton.dev'
API_TOKEN = 'some-api-token'
Change default SERVER_NAME to localhost:5000
This is to make it work more out of the box, without the need to edit /etc/hosts
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'skeleton.dev'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'skeleton.dev'
API_TOKEN = 'some-api-token'
<commit_msg>Change default SERVER_NAME to localhost:5000
This is to make it work more out of the box, without the need to edit /etc/hosts<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'skeleton.dev'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'skeleton.dev'
API_TOKEN = 'some-api-token'
Change default SERVER_NAME to localhost:5000
This is to make it work more out of the box, without the need to edit /etc/hosts#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'skeleton.dev'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'skeleton.dev'
API_TOKEN = 'some-api-token'
<commit_msg>Change default SERVER_NAME to localhost:5000
This is to make it work more out of the box, without the need to edit /etc/hosts<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
DEBUG = True
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = 'Skeleton'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
|
963857463cd706260667995bd8817bd2facea5f0
|
setup.py
|
setup.py
|
#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/numan/nydus/tarball/0.10.4#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/disqus/nydus/tarball/master#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
Include the official nydus release
|
Include the official nydus release
|
Python
|
apache-2.0
|
numan/sunspear
|
#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/numan/nydus/tarball/0.10.4#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
Include the official nydus release
|
#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/disqus/nydus/tarball/master#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
<commit_before>#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/numan/nydus/tarball/0.10.4#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
<commit_msg>Include the official nydus release<commit_after>
|
#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/disqus/nydus/tarball/master#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/numan/nydus/tarball/0.10.4#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
Include the official nydus release#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/disqus/nydus/tarball/master#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
<commit_before>#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/numan/nydus/tarball/0.10.4#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
<commit_msg>Include the official nydus release<commit_after>#!/usr/bin/python
from setuptools import setup, find_packages
tests_require=[
'nose',
'mock',
]
setup(
name="sunspear",
license='Apache License 2.0',
version="0.1.0a",
description="Activity streams backed by Riak.",
zip_safe=False,
long_description=open('README.rst', 'r').read(),
author="Numan Sachwani",
author_email="numan856@gmail.com",
url="https://github.com/numan/sunspear",
packages=find_packages(exclude=['tests']),
test_suite='nose.collector',
install_requires=[
'nydus==0.10.4',
'riak==1.5.1',
'python-dateutil==1.5',
'protobuf==2.4.1',
],
dependency_links=[
'https://github.com/disqus/nydus/tarball/master#egg=nydus-0.10.4',
],
options={'easy_install': {'allow_hosts': 'pypi.python.org'}},
tests_require=tests_require,
extras_require={"test": tests_require, "nosetests": tests_require},
include_package_data=True,
classifiers=[
"Intended Audience :: Developers",
'Intended Audience :: System Administrators',
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
)
|
427292a82aea2a2291833ca0cb3f30cee2afd497
|
ixdjango/management/commands/newrelic_notify_deploy.py
|
ixdjango/management/commands/newrelic_notify_deploy.py
|
"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
ini_file = os.environ.get('NEW_RELIC_CONFIG_FILE',
settings.NEW_RELIC_CONFIG)
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
|
"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
try:
ini_file = os.environ['NEW_RELIC_CONFIG_FILE']
except KeyError:
ini_file = settings.NEW_RELIC_CONFIG
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
|
Fix NR deploy notification bug
|
Fix NR deploy notification bug
|
Python
|
mit
|
infoxchange/ixdjango
|
"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
ini_file = os.environ.get('NEW_RELIC_CONFIG_FILE',
settings.NEW_RELIC_CONFIG)
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
Fix NR deploy notification bug
|
"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
try:
ini_file = os.environ['NEW_RELIC_CONFIG_FILE']
except KeyError:
ini_file = settings.NEW_RELIC_CONFIG
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
|
<commit_before>"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
ini_file = os.environ.get('NEW_RELIC_CONFIG_FILE',
settings.NEW_RELIC_CONFIG)
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
<commit_msg>Fix NR deploy notification bug<commit_after>
|
"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
try:
ini_file = os.environ['NEW_RELIC_CONFIG_FILE']
except KeyError:
ini_file = settings.NEW_RELIC_CONFIG
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
|
"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
ini_file = os.environ.get('NEW_RELIC_CONFIG_FILE',
settings.NEW_RELIC_CONFIG)
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
Fix NR deploy notification bug"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
try:
ini_file = os.environ['NEW_RELIC_CONFIG_FILE']
except KeyError:
ini_file = settings.NEW_RELIC_CONFIG
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
|
<commit_before>"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
ini_file = os.environ.get('NEW_RELIC_CONFIG_FILE',
settings.NEW_RELIC_CONFIG)
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
<commit_msg>Fix NR deploy notification bug<commit_after>"""
Management command to enable New Relic notification of deployments
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
import os
from subprocess import call, Popen, PIPE
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
"""
Notify New Relic of the new version
"""
def handle_noargs(self, **options):
# get the current git version
git = Popen(('git', 'describe'), stdout=PIPE)
ver, _ = git.communicate()
ver = ver.strip()
# The the tagger name and email
git = Popen(('git', 'log', ver, '--format=%ae', '-1'), stdout=PIPE)
username, _ = git.communicate()
username = username.strip()
try:
ini_file = os.environ['NEW_RELIC_CONFIG_FILE']
except KeyError:
ini_file = settings.NEW_RELIC_CONFIG
print "Informing New Relic...",
call(['newrelic-admin',
'record-deploy',
ini_file,
ver, # description
ver, # revision
'', # changelog
username])
|
f6f9ce94c33ad959cfd3f9cbb1b19e8bff17126d
|
solar/solar/utils.py
|
solar/solar/utils.py
|
import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
|
import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.safe_dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
|
Fix python object searilization problem in yaml
|
Fix python object searilization problem in yaml
|
Python
|
apache-2.0
|
torgartor21/solar,Mirantis/solar,torgartor21/solar,loles/solar,zen/solar,openstack/solar,pigmej/solar,Mirantis/solar,loles/solar,dshulyak/solar,loles/solar,pigmej/solar,zen/solar,zen/solar,Mirantis/solar,Mirantis/solar,openstack/solar,CGenie/solar,pigmej/solar,openstack/solar,dshulyak/solar,CGenie/solar,loles/solar,zen/solar
|
import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
Fix python object searilization problem in yaml
|
import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.safe_dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
|
<commit_before>import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
<commit_msg>Fix python object searilization problem in yaml<commit_after>
|
import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.safe_dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
|
import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
Fix python object searilization problem in yamlimport io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.safe_dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
|
<commit_before>import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
<commit_msg>Fix python object searilization problem in yaml<commit_after>import io
import glob
import yaml
import logging
import os
from uuid import uuid4
from jinja2 import Template
logger = logging.getLogger(__name__)
def create_dir(dir_path):
logger.debug(u'Creating directory %s', dir_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
def yaml_load(file_path):
with io.open(file_path) as f:
result = yaml.load(f)
return result
def yaml_dump(yaml_data):
return yaml.safe_dump(yaml_data, default_flow_style=False)
def write_to_file(data, file_path):
with open(file_path, 'w') as f:
f.write(data)
def yaml_dump_to(data, file_path):
write_to_file(yaml_dump(data), file_path)
def find_by_mask(mask):
for file_path in glob.glob(mask):
yield os.path.abspath(file_path)
def load_by_mask(mask):
result = []
for file_path in find_by_mask(mask):
result.append(yaml_load(file_path))
return result
def generate_uuid():
return str(uuid4())
def render_template(template_path, params):
with io.open(template_path) as f:
temp = Template(f.read())
return temp.render(**params)
def read_config():
return yaml_load('/vagrant/config.yml')
|
329e74f280537aab41d5b810f8650bfd8d6d81f5
|
tests/test_generate_files.py
|
tests/test_generate_files.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures("clean_system")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
|
Add teardown specific to the former TestCase class
|
Add teardown specific to the former TestCase class
|
Python
|
bsd-3-clause
|
michaeljoseph/cookiecutter,christabor/cookiecutter,cguardia/cookiecutter,janusnic/cookiecutter,michaeljoseph/cookiecutter,cguardia/cookiecutter,vincentbernat/cookiecutter,drgarcia1986/cookiecutter,Vauxoo/cookiecutter,cichm/cookiecutter,benthomasson/cookiecutter,0k/cookiecutter,terryjbates/cookiecutter,atlassian/cookiecutter,lucius-feng/cookiecutter,Springerle/cookiecutter,hackebrot/cookiecutter,moi65/cookiecutter,0k/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,jhermann/cookiecutter,ramiroluz/cookiecutter,kkujawinski/cookiecutter,agconti/cookiecutter,sp1rs/cookiecutter,lgp171188/cookiecutter,kkujawinski/cookiecutter,jhermann/cookiecutter,venumech/cookiecutter,sp1rs/cookiecutter,luzfcb/cookiecutter,janusnic/cookiecutter,vintasoftware/cookiecutter,atlassian/cookiecutter,stevepiercy/cookiecutter,pjbull/cookiecutter,ionelmc/cookiecutter,takeflight/cookiecutter,letolab/cookiecutter,letolab/cookiecutter,pjbull/cookiecutter,hackebrot/cookiecutter,luzfcb/cookiecutter,audreyr/cookiecutter,takeflight/cookiecutter,lgp171188/cookiecutter,agconti/cookiecutter,vintasoftware/cookiecutter,Springerle/cookiecutter,cichm/cookiecutter,ionelmc/cookiecutter,benthomasson/cookiecutter,lucius-feng/cookiecutter,audreyr/cookiecutter,terryjbates/cookiecutter,foodszhang/cookiecutter,foodszhang/cookiecutter,vincentbernat/cookiecutter,ramiroluz/cookiecutter,tylerdave/cookiecutter,tylerdave/cookiecutter,nhomar/cookiecutter,dajose/cookiecutter,stevepiercy/cookiecutter,nhomar/cookiecutter,willingc/cookiecutter,Vauxoo/cookiecutter,drgarcia1986/cookiecutter,moi65/cookiecutter,christabor/cookiecutter,dajose/cookiecutter
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures("clean_system")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
Add teardown specific to the former TestCase class
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures("clean_system")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
<commit_msg>Add teardown specific to the former TestCase class<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures("clean_system")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
Add teardown specific to the former TestCase class#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures("clean_system")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
<commit_msg>Add teardown specific to the former TestCase class<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
"""
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
|
2c0947fcba8e260e01420182fe0da307af5a8426
|
tests/test_online_checker.py
|
tests/test_online_checker.py
|
"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.http.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
|
"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.web.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
|
Rename sys modules access to web module
|
Rename sys modules access to web module
|
Python
|
mit
|
thatsIch/sublime-rainmeter
|
"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.http.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
Rename sys modules access to web module
|
"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.web.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
|
<commit_before>"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.http.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
<commit_msg>Rename sys modules access to web module<commit_after>
|
"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.web.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
|
"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.http.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
Rename sys modules access to web module"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.web.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
|
<commit_before>"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.http.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
<commit_msg>Rename sys modules access to web module<commit_after>"""This module is for testing the online checker."""
import sys
from unittest import TestCase
ONLINE_CHECKER = sys.modules["Rainmeter.web.online_checker"]
class TestRmDocOnlineChecker(TestCase):
"""Test of the online checks for Rainmeter Documentation using unittest."""
def test_is_rm_doc_online(self):
"""Rainmeter Documentation should be up to synchronize with it."""
is_online = ONLINE_CHECKER.is_rm_doc_online()
self.assertTrue(is_online)
class TestGithubOnlineChecker(TestCase):
"""Test of the online checks for Github using unittest."""
def test_is_gh_online(self):
"""Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_online()
self.assertTrue(is_online)
class TestRawGithubOnlineChecker(TestCase):
"""Test of the online checks for Raw Github using unittest since raw is served from different service."""
def test_is_raw_gh_online(self):
"""Raw Github should be up to download stuff from it."""
is_online = ONLINE_CHECKER.is_gh_raw_online()
self.assertTrue(is_online)
|
e8e3a7daaa1e6afc4c8f9853f6db77dcd557f4d3
|
examples/black-whitelist/whitelist_generator.py
|
examples/black-whitelist/whitelist_generator.py
|
"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line:
f.write(str(user_id) + "\n")
print("ADDED.\r")
|
"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line.lower():
f.write(str(user_id) + "\n")
print("ADDED.\r")
|
Call 'lower()' on the input
|
Call 'lower()' on the input
|
Python
|
apache-2.0
|
instagrambot/instabot,ohld/instabot,instagrambot/instabot
|
"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line:
f.write(str(user_id) + "\n")
print("ADDED.\r")
Call 'lower()' on the input
|
"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line.lower():
f.write(str(user_id) + "\n")
print("ADDED.\r")
|
<commit_before>"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line:
f.write(str(user_id) + "\n")
print("ADDED.\r")
<commit_msg>Call 'lower()' on the input<commit_after>
|
"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line.lower():
f.write(str(user_id) + "\n")
print("ADDED.\r")
|
"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line:
f.write(str(user_id) + "\n")
print("ADDED.\r")
Call 'lower()' on the input"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line.lower():
f.write(str(user_id) + "\n")
print("ADDED.\r")
|
<commit_before>"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line:
f.write(str(user_id) + "\n")
print("ADDED.\r")
<commit_msg>Call 'lower()' on the input<commit_after>"""
instabot example
Whitelist generator: generates a list of users which
will not be unfollowed.
"""
import sys
import os
import random
sys.path.append(os.path.join(sys.path[0], '../../'))
from instabot import Bot
bot = Bot()
bot.login()
print("This script will generate whitelist.txt file with users"
"who will not be unfollowed by bot. "
"Press Y to add user to whitelist. Ctrl + C to exit.")
your_following = bot.following
already_whitelisted = bot.read_list_from_file("whitelist.txt")
rest_users = list(set(your_following) - set(already_whitelisted))
random.shuffle(rest_users)
with open("whitelist.txt", "a") as f:
for user_id in rest_users:
user_info = bot.get_user_info(user_id)
print(user_info["username"])
print(user_info["full_name"])
input_line = sys.stdin.readline().lower()
if "y" in input_line.lower():
f.write(str(user_id) + "\n")
print("ADDED.\r")
|
56bc9c79522fd534f2a756bd5a18193635e2adae
|
tests/test_default_security_groups.py
|
tests/test_default_security_groups.py
|
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
|
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
|
Fix missing mock and rename variable
|
tests: Fix missing mock and rename variable
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
tests: Fix missing mock and rename variable
|
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
|
<commit_before>"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
<commit_msg>tests: Fix missing mock and rename variable<commit_after>
|
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
|
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
tests: Fix missing mock and rename variable"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
|
<commit_before>"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
<commit_msg>tests: Fix missing mock and rename variable<commit_after>"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
|
6cf4901344033b50c6e56a9c878a7e89f33d3880
|
2to3/fix_reload.py
|
2to3/fix_reload.py
|
# Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', u'reload', node)
return node
|
# Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', 'reload', node)
return node
|
Fix 2to3 fixers to work with Python 3.
|
Fix 2to3 fixers to work with Python 3.
|
Python
|
bsd-3-clause
|
ProgVal/Limnoria-test,Ban3/Limnoria,ProgVal/Limnoria-test,Ban3/Limnoria
|
# Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', u'reload', node)
return node
Fix 2to3 fixers to work with Python 3.
|
# Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', 'reload', node)
return node
|
<commit_before># Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', u'reload', node)
return node
<commit_msg>Fix 2to3 fixers to work with Python 3.<commit_after>
|
# Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', 'reload', node)
return node
|
# Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', u'reload', node)
return node
Fix 2to3 fixers to work with Python 3.# Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', 'reload', node)
return node
|
<commit_before># Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', u'reload', node)
return node
<commit_msg>Fix 2to3 fixers to work with Python 3.<commit_after># Based on fix_intern.py. Original copyright:
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Attr, touch_import
class FixReload(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'reload'
after=any*
>
"""
def transform(self, node, results):
touch_import('imp', 'reload', node)
return node
|
84929e01bfb9236fd0f51d82ee514d513d018408
|
triangle/triangle.py
|
triangle/triangle.py
|
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
|
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
|
Sort dimensins to reduce code
|
Sort dimensins to reduce code
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
Sort dimensins to reduce code
|
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
|
<commit_before>class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
<commit_msg>Sort dimensins to reduce code<commit_after>
|
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
|
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
Sort dimensins to reduce codeclass TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
|
<commit_before>class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
<commit_msg>Sort dimensins to reduce code<commit_after>class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
|
3dd23df07d7d1f84e361c87345aafcfefeff636a
|
jsk_2016_01_baxter_apc/node_scripts/control_vacuum_gripper.py
|
jsk_2016_01_baxter_apc/node_scripts/control_vacuum_gripper.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', type=str, choices=['start', 'stop'])
limbs = ['left', 'right']
parser.add_argument('limb', type=str, choices=limbs, nargs='?')
args = parser.parse_args()
action = args.action
limbs = ['left', 'right'] if args.limb is None else [args.limb]
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--left', action='store_true',
help='Control left gripper')
parser.add_argument('-r', '--right', action='store_true',
help='Control right gripper')
parser.add_argument('-t', '--start', action='store_true',
help='Start vacuum gripper')
parser.add_argument('-p', '--stop', action='store_true',
help='Stop vacuum gripper')
args = parser.parse_args()
if args.start and not args.stop:
action = 'start'
elif args.stop:
action = 'stop'
else:
print('Please specify one of start or stop action.')
parser.print_help()
quit(1)
if args.left and not args.right:
limbs = ['left']
elif args.right:
limbs = ['right']
else:
limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
|
Order agonistic options to control vacuum gripper
|
Order agonistic options to control vacuum gripper
|
Python
|
bsd-3-clause
|
pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', type=str, choices=['start', 'stop'])
limbs = ['left', 'right']
parser.add_argument('limb', type=str, choices=limbs, nargs='?')
args = parser.parse_args()
action = args.action
limbs = ['left', 'right'] if args.limb is None else [args.limb]
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
Order agonistic options to control vacuum gripper
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--left', action='store_true',
help='Control left gripper')
parser.add_argument('-r', '--right', action='store_true',
help='Control right gripper')
parser.add_argument('-t', '--start', action='store_true',
help='Start vacuum gripper')
parser.add_argument('-p', '--stop', action='store_true',
help='Stop vacuum gripper')
args = parser.parse_args()
if args.start and not args.stop:
action = 'start'
elif args.stop:
action = 'stop'
else:
print('Please specify one of start or stop action.')
parser.print_help()
quit(1)
if args.left and not args.right:
limbs = ['left']
elif args.right:
limbs = ['right']
else:
limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', type=str, choices=['start', 'stop'])
limbs = ['left', 'right']
parser.add_argument('limb', type=str, choices=limbs, nargs='?')
args = parser.parse_args()
action = args.action
limbs = ['left', 'right'] if args.limb is None else [args.limb]
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
<commit_msg>Order agonistic options to control vacuum gripper<commit_after>
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--left', action='store_true',
help='Control left gripper')
parser.add_argument('-r', '--right', action='store_true',
help='Control right gripper')
parser.add_argument('-t', '--start', action='store_true',
help='Start vacuum gripper')
parser.add_argument('-p', '--stop', action='store_true',
help='Stop vacuum gripper')
args = parser.parse_args()
if args.start and not args.stop:
action = 'start'
elif args.stop:
action = 'stop'
else:
print('Please specify one of start or stop action.')
parser.print_help()
quit(1)
if args.left and not args.right:
limbs = ['left']
elif args.right:
limbs = ['right']
else:
limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', type=str, choices=['start', 'stop'])
limbs = ['left', 'right']
parser.add_argument('limb', type=str, choices=limbs, nargs='?')
args = parser.parse_args()
action = args.action
limbs = ['left', 'right'] if args.limb is None else [args.limb]
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
Order agonistic options to control vacuum gripper#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--left', action='store_true',
help='Control left gripper')
parser.add_argument('-r', '--right', action='store_true',
help='Control right gripper')
parser.add_argument('-t', '--start', action='store_true',
help='Start vacuum gripper')
parser.add_argument('-p', '--stop', action='store_true',
help='Stop vacuum gripper')
args = parser.parse_args()
if args.start and not args.stop:
action = 'start'
elif args.stop:
action = 'stop'
else:
print('Please specify one of start or stop action.')
parser.print_help()
quit(1)
if args.left and not args.right:
limbs = ['left']
elif args.right:
limbs = ['right']
else:
limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('action', type=str, choices=['start', 'stop'])
limbs = ['left', 'right']
parser.add_argument('limb', type=str, choices=limbs, nargs='?')
args = parser.parse_args()
action = args.action
limbs = ['left', 'right'] if args.limb is None else [args.limb]
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
<commit_msg>Order agonistic options to control vacuum gripper<commit_after>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import rospy
from std_msgs.msg import Bool
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--left', action='store_true',
help='Control left gripper')
parser.add_argument('-r', '--right', action='store_true',
help='Control right gripper')
parser.add_argument('-t', '--start', action='store_true',
help='Start vacuum gripper')
parser.add_argument('-p', '--stop', action='store_true',
help='Stop vacuum gripper')
args = parser.parse_args()
if args.start and not args.stop:
action = 'start'
elif args.stop:
action = 'stop'
else:
print('Please specify one of start or stop action.')
parser.print_help()
quit(1)
if args.left and not args.right:
limbs = ['left']
elif args.right:
limbs = ['right']
else:
limbs = ['left', 'right']
rospy.init_node('control_vacuum_gripper')
pubs = []
for limb in limbs:
pub = rospy.Publisher(
'/vacuum_gripper/limb/{}'.format(limb), Bool, queue_size=1)
pubs.append(pub)
# this sleep is necessary to register publisher in actual
rospy.sleep(1)
for limb, pub in zip(limbs, pubs):
print('{action}-ing {limb} hand vacuum gripper'
.format(action=action, limb=limb))
pub.publish(Bool(data=action == 'start'))
if __name__ == '__main__':
main()
|
74e9f4f4f64ed0c9501d2527b3948dbcde423cc6
|
jetcomcrawl/modes/items.py
|
jetcomcrawl/modes/items.py
|
from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
|
from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
try:
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
except:
logging.info(html.text)
raise
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
|
Increase log verbosity on exception
|
Increase log verbosity on exception
|
Python
|
mit
|
tdickman/jetcom-crawl
|
from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
Increase log verbosity on exception
|
from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
try:
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
except:
logging.info(html.text)
raise
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
|
<commit_before>from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
<commit_msg>Increase log verbosity on exception<commit_after>
|
from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
try:
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
except:
logging.info(html.text)
raise
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
|
from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
Increase log verbosity on exceptionfrom bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
try:
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
except:
logging.info(html.text)
raise
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
|
<commit_before>from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
<commit_msg>Increase log verbosity on exception<commit_after>from bs4 import BeautifulSoup
import logging
from jetcomcrawl import browser
import jetcomcrawl.libs.queue
class Worker(object):
def __init__(self):
self.queue_categories = jetcomcrawl.libs.queue.Queue('queue_categories')
self.queue_items = jetcomcrawl.libs.queue.Queue('queue_items')
def work(self):
'''Keeps running indefinitely, retrieving jobs from sqs'''
while True:
# TODO: Handle no items left in queue
data = self.queue_categories.retrieve()
cid = data['cid']
page = data['page']
logging.info('Finding products for category {}, page {}'.format(cid, page))
html = browser.get('https://jet.com/search/results?category={}&page={}'.format(cid, page))
try:
soup = BeautifulSoup(html.text, 'html.parser')
results = []
for item in soup.find('div', {'class': 'products'}).findAll('div', {'class': 'product mobile'}):
url = item.a['href']
uid = url.split('/')[-1]
results.append({'uid': uid, 'url': url})
except:
logging.info(html.text)
raise
logging.info('{} products found for category {}, page {}, inserting into sqs'.format(len(results), cid, page))
self.queue_items.insert_bulk(results)
self.queue_categories.remove_processed()
|
bf0e192b190efbde1b594cdf85c6552b343c2f0c
|
run_samples.py
|
run_samples.py
|
#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.multisim.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
|
#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.simulation.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
|
Use new API correctly, v2...
|
Use new API correctly, v2...
|
Python
|
agpl-3.0
|
janmedlock/HIV-95-vaccine
|
#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.multisim.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
Use new API correctly, v2...
|
#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.simulation.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
|
<commit_before>#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.multisim.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
<commit_msg>Use new API correctly, v2...<commit_after>
|
#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.simulation.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
|
#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.multisim.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
Use new API correctly, v2...#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.simulation.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
|
<commit_before>#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.multisim.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
<commit_msg>Use new API correctly, v2...<commit_after>#!/usr/bin/python3
'''
Run simulations with parameter samples.
'''
import model
countries = model.datasheet.get_country_list()
# Move these to the front.
countries_to_plot = ['United States of America',
'South Africa',
'Uganda',
'Nigeria',
'India',
'Rwanda']
for c in countries_to_plot:
countries.remove(c)
countries = countries_to_plot + countries
def _run_country(country, target):
print('Running {}, {!s}.'.format(country, target))
parametersamples = model.parameters.Samples(country)
multisim = model.simulation.MultiSim(parametersamples, target)
return multisim
def _main():
for country in countries:
for target in model.target.all_:
if not model.results.exists(country, target):
results = _run_country(country, target)
model.results.dump(results)
if __name__ == '__main__':
_main()
|
27b0a5b95e188a5bd77ae662bbb43e06dfde4749
|
slack/views.py
|
slack/views.py
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
Use the id of the channel and unquote all of the text first.
|
Use the id of the channel and unquote all of the text first.
|
Python
|
mit
|
DuaneGarber/slack-meme,joeynebula/slack-meme,tezzutezzu/slack-meme,nicolewhite/slack-meme
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200Use the id of the channel and unquote all of the text first.
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
<commit_before>from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200<commit_msg>Use the id of the channel and unquote all of the text first.<commit_after>
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200Use the id of the channel and unquote all of the text first.from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
<commit_before>from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_name"]
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
params = [unquote(x) for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel=%23{2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200<commit_msg>Use the id of the channel and unquote all of the text first.<commit_after>from flask import Flask, request
import requests
from urllib import unquote
app = Flask(__name__)
@app.route("/")
def meme():
domain = request.args["team_domain"]
slackbot = request.args["slackbot"]
text = request.args["text"]
channel = request.args["channel_id"]
text = unquote(text)
text = text[:-1] if text[-1] == ";" else text
params = text.split(";")
params = [x.strip().replace(" ", "-") for x in params]
if not len(params) == 3:
response = "Your syntax should be in the form: /meme template; top; bottom;"
else:
template = params[0]
top = params[1]
bottom = params[2]
response = "http://memegen.link/{0}/{1}/{2}.jpg".format(template, top, bottom)
url = "https://{0}.slack.com/services/hooks/slackbot?token={1}&channel={2}".format(domain, slackbot, channel)
requests.post(url, data=response)
return "ok", 200
|
ddd45afa0708682bb11d606e03e38aed111d7b9c
|
fireplace/cards/game/all.py
|
fireplace/cards/game/all.py
|
"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
|
"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
# Big Banana
class TB_006:
play = Buff(TARGET, "TB_006e")
# Deviate Banana
class TB_007:
play = Buff(TARGET, "TB_007e")
# Rotten Banana
class TB_008:
play = Hit(TARGET, 1)
|
Implement Big Banana, Deviate Banana, Rotten Banana
|
Implement Big Banana, Deviate Banana, Rotten Banana
|
Python
|
agpl-3.0
|
liujimj/fireplace,Ragowit/fireplace,butozerca/fireplace,butozerca/fireplace,smallnamespace/fireplace,amw2104/fireplace,smallnamespace/fireplace,beheh/fireplace,NightKev/fireplace,Meerkov/fireplace,Meerkov/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Ragowit/fireplace,amw2104/fireplace,jleclanche/fireplace,oftc-ftw/fireplace
|
"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
Implement Big Banana, Deviate Banana, Rotten Banana
|
"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
# Big Banana
class TB_006:
play = Buff(TARGET, "TB_006e")
# Deviate Banana
class TB_007:
play = Buff(TARGET, "TB_007e")
# Rotten Banana
class TB_008:
play = Hit(TARGET, 1)
|
<commit_before>"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
<commit_msg>Implement Big Banana, Deviate Banana, Rotten Banana<commit_after>
|
"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
# Big Banana
class TB_006:
play = Buff(TARGET, "TB_006e")
# Deviate Banana
class TB_007:
play = Buff(TARGET, "TB_007e")
# Rotten Banana
class TB_008:
play = Hit(TARGET, 1)
|
"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
Implement Big Banana, Deviate Banana, Rotten Banana"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
# Big Banana
class TB_006:
play = Buff(TARGET, "TB_006e")
# Deviate Banana
class TB_007:
play = Buff(TARGET, "TB_007e")
# Rotten Banana
class TB_008:
play = Hit(TARGET, 1)
|
<commit_before>"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
<commit_msg>Implement Big Banana, Deviate Banana, Rotten Banana<commit_after>"""
GAME set and other special cards
"""
from ..utils import *
# The Coin
class GAME_005:
play = ManaThisTurn(CONTROLLER, 1)
# Big Banana
class TB_006:
play = Buff(TARGET, "TB_006e")
# Deviate Banana
class TB_007:
play = Buff(TARGET, "TB_007e")
# Rotten Banana
class TB_008:
play = Hit(TARGET, 1)
|
b0bde22e3ff0d2df2773f41aeaf8eb0ba6d0fa3f
|
tools/getapifield.py
|
tools/getapifield.py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
parser.add_argument('--default', help='Default value to output if field is not present')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0] and args.default:
print(args.default)
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
|
Allow a default value to be specified when fetching a field value
|
Allow a default value to be specified when fetching a field value
|
Python
|
apache-2.0
|
jskeet/gcloud-dotnet,jskeet/google-cloud-dotnet,googleapis/google-cloud-dotnet,googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet,googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/google-cloud-dotnet
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
Allow a default value to be specified when fetching a field value
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
parser.add_argument('--default', help='Default value to output if field is not present')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0] and args.default:
print(args.default)
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
|
<commit_before># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
<commit_msg>Allow a default value to be specified when fetching a field value<commit_after>
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
parser.add_argument('--default', help='Default value to output if field is not present')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0] and args.default:
print(args.default)
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
Allow a default value to be specified when fetching a field value# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
parser.add_argument('--default', help='Default value to output if field is not present')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0] and args.default:
print(args.default)
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
|
<commit_before># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
<commit_msg>Allow a default value to be specified when fetching a field value<commit_after># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import sys
import argparse
parser = argparse.ArgumentParser(description='Fetches a field from a single API in the catalog')
parser.add_argument('file', help='File to load')
parser.add_argument('id', help='ID of API to fetch')
parser.add_argument('field', help='Field to find and output')
parser.add_argument('--default', help='Default value to output if field is not present')
args = parser.parse_args()
filename = sys.argv[1]
file = open(filename, "r")
catalog = json.load(file)
query = [api.get(args.field) for api in catalog["apis"] if api["id"] == args.id]
if len(query) != 1:
raise Exception(f"API {args.id} not found (or has duplicate definitions)")
elif not query[0] and args.default:
print(args.default)
elif not query[0]:
raise Exception(f"API {args.id} has no field {args.field}")
else:
print(query[0])
|
f51915a6c373de39785d8273b2a9f6e11ff67b9e
|
test_dimuon.py
|
test_dimuon.py
|
from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
|
from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
particles = [None]
pairs = find_pairs(particles)
assert len(pairs) == 0
|
Test for no pairs from one particle
|
Test for no pairs from one particle
|
Python
|
mit
|
benwaugh/dimuon
|
from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
Test for no pairs from one particle
|
from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
particles = [None]
pairs = find_pairs(particles)
assert len(pairs) == 0
|
<commit_before>from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
<commit_msg>Test for no pairs from one particle<commit_after>
|
from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
particles = [None]
pairs = find_pairs(particles)
assert len(pairs) == 0
|
from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
Test for no pairs from one particlefrom dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
particles = [None]
pairs = find_pairs(particles)
assert len(pairs) == 0
|
<commit_before>from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
<commit_msg>Test for no pairs from one particle<commit_after>from dimuon import find_pairs
def test_find_pairs():
particles = None
pairs = find_pairs(particles)
def test_no_particles():
particles = []
pairs = find_pairs(particles)
assert len(pairs) == 0
def test_one_particle():
particles = [None]
pairs = find_pairs(particles)
assert len(pairs) == 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.