content
stringlengths 128
6.74k
|
---|
<commit_msg>Mark vectors test as xfail (temporary)
<commit_before>
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
<commit_after>
from __future__ import unicode_literals
from io import StringIO
import pytest
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
@pytest.mark.xfail
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
<commit_msg>Update bulk image generation command
<commit_before>from django.core.management.base import BaseCommand
import easy_thumbnails
from mediaman.models import ArtefactRepresentation
import os
class Command(BaseCommand):
help = "Generate thumbnails for Artefact Representations"
def handle(self, *args, **options):
unbuffered = os.fdopen(self.stdout.fileno(), 'w', 0)
self.stdout = unbuffered
ars = ArtefactRepresentation.objects.all()
self.stdout.write("Found %s images\n" % ars.count())
for ar in ars:
# self.stdout.write(str(ar.image) + "\n")
if ar.image.storage.exists(ar.image):
easy_thumbnails.files.generate_all_aliases(
ar.image, include_global=True)
self.stdout.write('.')
else:
self.stdout.write('n')
self.stdout.write("\nProcessed all images\n")
<commit_after>from django.core.management.base import BaseCommand
import easy_thumbnails
from mediaman.models import ArtefactRepresentation
import os
#import ImageFile
from PIL import ImageFile
class Command(BaseCommand):
help = "Generate thumbnails for Artefact Representations"
def handle(self, *args, **options):
unbuffered = os.fdopen(self.stdout.fileno(), 'w', 0)
self.stdout = unbuffered
ImageFile.MAXBLOCK = 1024 * 1024 * 10 # default is 64k, fixes "Suspension not allowed here" error from PIL
ars = ArtefactRepresentation.objects.filter(public=True)
self.stdout.write("Found %s public images\n" % ars.count())
for ar in ars:
# self.stdout.write(str(ar.image) + "\n")
if ar.image.storage.exists(ar.image):
easy_thumbnails.files.generate_all_aliases(
ar.image, include_global=True)
self.stdout.write('.')
else:
self.stdout.write('n')
self.stdout.write("\nProcessed all images\n")
|
<commit_msg>Move judges info to separate folder
Judges JSON contains not only declarations
<commit_before>const NAVBAR = [
{
title : 'Про нас',
state : 'about'
},
{
title : 'Головна',
state : 'home'
},
{
title : 'Судді',
state : 'list'
}
];
const SOURCE = '/source';
const URLS = {
listUrl : `${SOURCE}/judges.json`,
dictionaryUrl : `${SOURCE}/dictionary.json`,
dictionaryTimeStamp : `${SOURCE}/dictionary.json.timestamp`,
textUrl : `${SOURCE}/texts.json`,
textTimeStamp : `${SOURCE}/dictionary.json.timestamp`,
details : `/declarations/:key.json`
};
export { URLS, NAVBAR };
<commit_after>const NAVBAR = [
{
title : 'Про нас',
state : 'about'
},
{
title : 'Головна',
state : 'home'
},
{
title : 'Судді',
state : 'list'
}
];
const SOURCE = '/source';
const URLS = {
listUrl : `${SOURCE}/judges.json`,
dictionaryUrl : `${SOURCE}/dictionary.json`,
dictionaryTimeStamp : `${SOURCE}/dictionary.json.timestamp`,
textUrl : `${SOURCE}/texts.json`,
textTimeStamp : `${SOURCE}/dictionary.json.timestamp`,
details : `/judges/:key.json`
};
export { URLS, NAVBAR };
|
<commit_msg>Add rig_assets.json as package data.
<commit_before>version = '0.1.0'
with open('requirements.txt', 'r') as f:
install_requires = [x.strip() for x in f.readlines()]
from setuptools import setup, find_packages
setup(
name='bodylabs-rigger',
version=version,
author='Body Labs',
author_email='david.smith@bodylabs.com',
description="Utilities for rigging a mesh from Body Labs' BodyKit API.",
url='https://github.com/bodylabs/rigger',
license='BSD',
packages=find_packages(),
install_requires=install_requires,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_after>version = '0.1.0'
with open('requirements.txt', 'r') as f:
install_requires = [x.strip() for x in f.readlines()]
from setuptools import setup, find_packages
setup(
name='bodylabs-rigger',
version=version,
author='Body Labs',
author_email='david.smith@bodylabs.com',
description="Utilities for rigging a mesh from Body Labs' BodyKit API.",
url='https://github.com/bodylabs/rigger',
license='BSD',
packages=find_packages(),
package_data={
'bodylabs_rigger.static': ['rig_assets.json']
},
install_requires=install_requires,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_msg>Include README.md and LICENSE in the package
<commit_before>
import os
from distutils.core import setup
from evelink import __version__
__readme_path = os.path.join(os.path.dirname(__file__), "README.md")
__readme_contents = open(__readme_path).read()
setup(
name="EVELink",
version=__version__,
description="Python Bindings for the EVE Online API",
long_description=__readme_contents,
license="MIT License",
author="Valkyries of Night",
author_email="d-eve-lopment@googlegroups.com",
maintainer="Amber Yust",
maintainer_email="amber.yust@gmail.com",
url="https://github.com/eve-val/evelink",
download_url="https://github.com/eve-val/evelink/downloads",
packages=[
"evelink",
"evelink.cache",
"evelink.parsing",
],
scripts=["bin/evelink"],
provides=["evelink"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Games/Entertainment",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
# vim: set et ts=4 sts=4 sw=4:
<commit_after>
import os
from distutils.core import setup
from evelink import __version__
__readme_path = os.path.join(os.path.dirname(__file__), "README.md")
__readme_contents = open(__readme_path).read()
setup(
name="EVELink",
version=__version__,
description="Python Bindings for the EVE Online API",
long_description=__readme_contents,
license="MIT License",
author="Valkyries of Night",
author_email="d-eve-lopment@googlegroups.com",
maintainer="Amber Yust",
maintainer_email="amber.yust@gmail.com",
url="https://github.com/eve-val/evelink",
download_url="https://github.com/eve-val/evelink/downloads",
packages=[
"evelink",
"evelink.cache",
"evelink.parsing",
],
data_files=[
('', ['README.md', 'LICENSE']),
],
scripts=["bin/evelink"],
provides=["evelink"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Games/Entertainment",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
# vim: set et ts=4 sts=4 sw=4:
|
<commit_msg>Use array if fixed size length for errors
<commit_before>package lib
import (
"errors"
"strings"
)
// MultiError implements error interface.
// An instance of MultiError has zero or more errors.
type MultiError struct {
errs []error
}
// Push adds an error to MultiError.
func (m *MultiError) Push(errString string) {
m.errs = append(m.errs, errors.New(errString))
}
// HasError checks if MultiError has any error.
func (m *MultiError) HasError() *MultiError {
if len(m.errs) == 0 {
return nil
}
return m
}
// Error implements error interface.
func (m *MultiError) Error() string {
var formattedError []string
for _, e := range m.errs {
formattedError = append(formattedError, e.Error())
}
return strings.Join(formattedError, "\n")
}
<commit_after>package lib
import (
"errors"
"strings"
)
// MultiError implements error interface.
// An instance of MultiError has zero or more errors.
type MultiError struct {
errs []error
}
// Push adds an error to MultiError.
func (m *MultiError) Push(errString string) {
m.errs = append(m.errs, errors.New(errString))
}
// HasError checks if MultiError has any error.
func (m *MultiError) HasError() *MultiError {
if len(m.errs) == 0 {
return nil
}
return m
}
// Error implements error interface.
func (m *MultiError) Error() string {
formattedError := make([]string, len(m.errs))
for i, e := range m.errs {
formattedError[i] = e.Error()
}
return strings.Join(formattedError, "\n")
}
|
<commit_msg>Remove use of deprecated types.
Reviewer: Hugo Parente Lima <e250cbdf6b5a11059e9d944a6e5e9282be80d14c@openbossa.org>,
Luciano Wolf <luciano.wolf@openbossa.org>
<commit_before>'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl, QStringList, QVariant
from PySide.QtGui import QPushButton
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = QStringList(["Item 1", "Item 2", "Item 3", "Item 4"])
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
<commit_after>'''Test cases for QDeclarativeView'''
import unittest
from PySide.QtCore import QUrl
from PySide.QtDeclarative import QDeclarativeView
from helper import adjust_filename, TimedQApplication
class TestQDeclarativeView(TimedQApplication):
def testQDeclarativeViewList(self):
view = QDeclarativeView()
dataList = ["Item 1", "Item 2", "Item 3", "Item 4"]
ctxt = view.rootContext()
ctxt.setContextProperty("myModel", dataList)
url = QUrl.fromLocalFile(adjust_filename('view.qml', __file__))
view.setSource(url)
view.show()
self.assertEqual(view.status(), QDeclarativeView.Ready)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
<commit_msg>Exclude tests and cmt folder from installed packages.
<commit_before>from setuptools import setup, find_packages
import versioneer
def read_requirements():
import os
path = os.path.dirname(os.path.abspath(__file__))
requirements_file = os.path.join(path, 'requirements.txt')
try:
with open(requirements_file, 'r') as req_fp:
requires = req_fp.read().split()
except IOError:
return []
else:
return [require.split() for require in requires]
setup(name='PyMT',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='The CSDMS Python Modeling Toolkit',
author='Eric Hutton',
author_email='huttone@colorado.edu',
url='http://csdms.colorado.edu',
setup_requires=['setuptools', ],
packages=find_packages(),
entry_points={
'console_scripts': [
'cmt-config=cmt.cmd.cmt_config:main',
],
},
)
<commit_after>from setuptools import setup, find_packages
import versioneer
def read_requirements():
import os
path = os.path.dirname(os.path.abspath(__file__))
requirements_file = os.path.join(path, 'requirements.txt')
try:
with open(requirements_file, 'r') as req_fp:
requires = req_fp.read().split()
except IOError:
return []
else:
return [require.split() for require in requires]
setup(name='PyMT',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='The CSDMS Python Modeling Toolkit',
author='Eric Hutton',
author_email='huttone@colorado.edu',
url='http://csdms.colorado.edu',
setup_requires=['setuptools', ],
packages=find_packages(exclude=("tests*", "cmt")),
entry_points={
'console_scripts': [
'cmt-config=cmt.cmd.cmt_config:main',
],
},
)
|
<commit_msg>Add the missing license header
<commit_before>package com.woinject;
import com.google.inject.Injector;
/**
* @author <a href="mailto:hprange@gmail.com">Henrique Prange</a>
*/
public class Interceptor {
public static Object injectMembers(Object object) {
Injector injector = InjectableApplication.application().injector();
if (injector == null) {
return object;
}
injector.injectMembers(object);
return object;
}
// public static Object intercept(Class<?> clazz, Object[] parameters) {
// System.out.println("\tClass: " + clazz + " Parameters: " +
// java.util.Arrays.toString(parameters));
//
// if (WOSession.class.isAssignableFrom(clazz)) {
// System.out.println("Creating an instace of " + clazz.getName());
//
// Object instance = injector().getInstance(clazz);
//
// return instance;
// }
//
// if (EOEnterpriseObject.class.isAssignableFrom(clazz)) {
// System.out.println("Creating an instace of " + clazz.getName());
//
// Object instance = injector().getInstance(clazz);
//
// return instance;
// }
//
// return null;
// }
}
<commit_after>/**
* Copyright (C) 2010 hprange <hprange@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.woinject;
import com.google.inject.Injector;
/**
* @author <a href="mailto:hprange@gmail.com">Henrique Prange</a>
*/
public class Interceptor {
public static Object injectMembers(Object object) {
Injector injector = InjectableApplication.application().injector();
if (injector == null) {
return object;
}
injector.injectMembers(object);
return object;
}
// public static Object intercept(Class<?> clazz, Object[] parameters) {
// System.out.println("\tClass: " + clazz + " Parameters: " +
// java.util.Arrays.toString(parameters));
//
// if (WOSession.class.isAssignableFrom(clazz)) {
// System.out.println("Creating an instace of " + clazz.getName());
//
// Object instance = injector().getInstance(clazz);
//
// return instance;
// }
//
// if (EOEnterpriseObject.class.isAssignableFrom(clazz)) {
// System.out.println("Creating an instace of " + clazz.getName());
//
// Object instance = injector().getInstance(clazz);
//
// return instance;
// }
//
// return null;
// }
}
|
<commit_msg>Make Public Body document search an EdgeNgram Field to improve search<commit_before>from haystack import indexes
from haystack import site
from publicbody.models import PublicBody
class PublicBodyIndex(indexes.SearchIndex):
text = indexes.CharField(document=True, use_template=True)
name = indexes.CharField(model_attr='name')
geography = indexes.CharField(model_attr='geography')
topic_auto = indexes.EdgeNgramField(model_attr='topic')
name_auto = indexes.EdgeNgramField(model_attr='name')
url = indexes.CharField(model_attr='get_absolute_url')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return PublicBody.objects.get_for_search_index()
site.register(PublicBody, PublicBodyIndex)
<commit_after>from haystack import indexes
from haystack import site
from publicbody.models import PublicBody
class PublicBodyIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
name = indexes.CharField(model_attr='name')
geography = indexes.CharField(model_attr='geography')
topic_auto = indexes.EdgeNgramField(model_attr='topic')
name_auto = indexes.EdgeNgramField(model_attr='name')
url = indexes.CharField(model_attr='get_absolute_url')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return PublicBody.objects.get_for_search_index()
site.register(PublicBody, PublicBodyIndex)
|
<commit_msg>Add test for implied timescales plot
<commit_before>import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
<commit_after>import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
def test_plot_implied_timescales():
lag_times = [1, 10, 50, 100, 200, 250, 500]
msm_objs = []
for lag in lag_times:
# Construct MSM
msm = MarkovStateModel(lag_time=lag, n_timescales=5)
msm.fit(clustered_trajs)
msm_objs.append(msm)
ax = plot_implied_timescales(msm_objs)
assert isinstance(ax, SubplotBase)
|
<commit_msg>Remove unused import of EAgainException
<commit_before>package nanomsg;
import nanomsg.exceptions.IOException;
import nanomsg.exceptions.EAgainException;
/**
* Common interface that should implement all sockets.
*/
public interface ISocket {
public void close();
public int getNativeSocket();
public void bind(final String dir) throws IOException;
public void connect(final String dir) throws IOException;
public void subscribe(final String data) throws IOException;
public int sendString(final String data, final boolean blocking) throws IOException, IOException;
public int sendString(final String data) throws IOException, IOException;
public int sendBytes(final byte[] data, final boolean blocking) throws IOException, IOException;
public int sendBytes(final byte[] data) throws IOException, IOException;
public String recvString(final boolean blocking) throws IOException, IOException;
public String recvString() throws IOException, IOException;
public byte[] recvBytes(final boolean blocking) throws IOException, IOException;
public byte[] recvBytes() throws IOException, IOException;
public int getFd(final int flag) throws IOException;
public void setSendTimeout(final int milis);
public void setRecvTimeout(final int milis);
}
<commit_after>package nanomsg;
import nanomsg.exceptions.IOException;
/**
* Common interface that should implement all sockets.
*/
public interface ISocket {
public void close();
public int getNativeSocket();
public void bind(final String dir) throws IOException;
public void connect(final String dir) throws IOException;
public void subscribe(final String data) throws IOException;
public int sendString(final String data, final boolean blocking) throws IOException, IOException;
public int sendString(final String data) throws IOException, IOException;
public int sendBytes(final byte[] data, final boolean blocking) throws IOException, IOException;
public int sendBytes(final byte[] data) throws IOException, IOException;
public String recvString(final boolean blocking) throws IOException, IOException;
public String recvString() throws IOException, IOException;
public byte[] recvBytes(final boolean blocking) throws IOException, IOException;
public byte[] recvBytes() throws IOException, IOException;
public int getFd(final int flag) throws IOException;
public void setSendTimeout(final int milis);
public void setRecvTimeout(final int milis);
}
|
<commit_msg>Fix code that gets post-processed command-line args
<commit_before>// Command aws-gen-gocli parses a JSON description of an AWS API and generates a
// Go file containing a client for the API.
//
// aws-gen-gocli EC2 apis/ec2/2014-10-01.api.json service/ec2/ec2.go
package main
import (
"flag"
"fmt"
"os"
"github.com/awslabs/aws-sdk-go/model"
)
func main() {
var svcPath string
var forceService bool
flag.StringVar(&svcPath, "path", "service", "generate in a specific directory (default: 'service')")
flag.BoolVar(&forceService, "force", false, "force re-generation of PACKAGE/service.go")
flag.Parse()
api := os.Args[len(os.Args)-flag.NArg()]
in, err := os.Open(api)
if err != nil {
panic(err)
}
defer in.Close()
if err := model.Load(in); err != nil {
panic(err)
}
if err := model.Generate(svcPath, forceService); err != nil {
fmt.Fprintf(os.Stderr, "error generating %s\n", os.Args[1])
panic(err)
}
}
<commit_after>// Command aws-gen-gocli parses a JSON description of an AWS API and generates a
// Go file containing a client for the API.
//
// aws-gen-gocli EC2 apis/ec2/2014-10-01.api.json service/ec2/ec2.go
package main
import (
"flag"
"fmt"
"os"
"github.com/awslabs/aws-sdk-go/model"
)
func main() {
var svcPath string
var forceService bool
flag.StringVar(&svcPath, "path", "service", "generate in a specific directory (default: 'service')")
flag.BoolVar(&forceService, "force", false, "force re-generation of PACKAGE/service.go")
flag.Parse()
api := flag.Arg(0)
in, err := os.Open(api)
if err != nil {
panic(err)
}
defer in.Close()
if err := model.Load(in); err != nil {
panic(err)
}
if err := model.Generate(svcPath, forceService); err != nil {
fmt.Fprintf(os.Stderr, "error generating %s\n", api)
panic(err)
}
}
|
<commit_msg>Remove 'is_live' from draft visibility check.
<commit_before>OWNER_PERMISSIONS = ['challenges.%s_submission' % v for v in ['edit', 'delete']]
class SubmissionBackend(object):
"""Provide custom permission logic for submissions."""
supports_object_permissions = True
supports_anonymous_user = True
def authenticate(self):
"""This backend doesn't provide any authentication functionality."""
return None
def has_perm(self, user_obj, perm, obj=None):
if perm in OWNER_PERMISSIONS:
# Owners can edit and delete their own submissions
if obj is not None and user_obj == obj.created_by.user:
return True
if perm == 'challenges.view_submission' and obj is not None:
# Live, non-draft submissions are visible to anyone. Other
# submissions are visible only to admins and their owners
return ((obj.is_live and not obj.is_draft) or
user_obj == obj.created_by.user)
return False
<commit_after>OWNER_PERMISSIONS = ['challenges.%s_submission' % v for v in ['edit', 'delete']]
class SubmissionBackend(object):
"""Provide custom permission logic for submissions."""
supports_object_permissions = True
supports_anonymous_user = True
def authenticate(self):
"""This backend doesn't provide any authentication functionality."""
return None
def has_perm(self, user_obj, perm, obj=None):
if perm in OWNER_PERMISSIONS:
# Owners can edit and delete their own submissions
if obj is not None and user_obj == obj.created_by.user:
return True
if perm == 'challenges.view_submission' and obj is not None:
# Live, non-draft submissions are visible to anyone. Other
# submissions are visible only to admins and their owners
return ((not obj.is_draft) or user_obj == obj.created_by.user)
return False
|
<commit_msg>Add TARGET_PLATFORM macro for target selection
<commit_before>
void checkErr(cl_int err, const char *name)
{
if (err == CL_SUCCESS)
return;
std::cerr << "[ERROR] In " << name << ", with code: " << err << std::endl;
exit(1);
}
int main(int argc, char **argv)
{
cl_int err;
std::vector<cl::Platform> platformList;
cl::Platform::get(&platformList);
checkErr(platformList.size() != 0 ? CL_SUCCESS : -1, "cl::Platform::get");
std::clog << "Platform number is: " << platformList.size() << std::endl;std::string platformVendor;
platformList[0].getInfo((cl_platform_info)CL_PLATFORM_VENDOR, &platformVendor);
std::clog << "Platform is by: " << platformVendor << "\n";
cl_context_properties cprops[] = {CL_CONTEXT_PLATFORM, (cl_context_properties)(platformList[0])(), 0};
cl::Context context(CL_DEVICE_TYPE_GPU, cprops, NULL, NULL, &err);
checkErr(err, "Context::Context()");
return 0;
}
<commit_after>
void checkErr(cl_int err, const char *name)
{
if (err == CL_SUCCESS)
return;
std::cerr << "[ERROR] In " << name << ", with code: " << err << std::endl;
exit(1);
}
int main(int argc, char **argv)
{
cl_int err;
std::vector<cl::Platform> platformList;
cl::Platform::get(&platformList);
checkErr(platformList.size() != 0 ? CL_SUCCESS : -1, "cl::Platform::get");
std::clog << "Platform number is: " << platformList.size() << std::endl;std::string platformVendor;
platformList[0].getInfo((cl_platform_info)CL_PLATFORM_VENDOR, &platformVendor);
std::clog << "Platform is by: " << platformVendor << "\n";
cl_context_properties cprops[] = {CL_CONTEXT_PLATFORM, (cl_context_properties)(platformList[0])(), 0};
cl::Context context(GET_TARGET_PLATFORM, cprops, NULL, NULL, &err);
checkErr(err, "Context::Context()");
return 0;
}
|
<commit_msg>Use argparse (instead of getopt) to get the usage information
<commit_before>
import sys
import getopt
import Convert
def usage():
print "Usage: to be done."
def main(argv):
try:
opts, args = getopt.getopt(argv, "hb:d", ["help", "book="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(2)
elif opt == "-d":
global _debug
_debug = 1
elif opt in ("-b","--book"):
filePath = arg
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main(sys.argv[1:])<commit_after>
import sys
import argparse
import Convert
def main():
parser = argparse.ArgumentParser(description='Generate a classic book with the desired format.')
parser.add_argument('book', type=str, help='a book file')
args = parser.parse_args()
filePath = args.book
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
<commit_msg>Use relaxed memory ordering for speed.
<commit_before>// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include "retrytransienterrorspolicy.h"
#include <vespa/messagebus/errorcode.h>
namespace mbus {
RetryTransientErrorsPolicy::RetryTransientErrorsPolicy() :
_enabled(true),
_baseDelay(1.0)
{}
RetryTransientErrorsPolicy &
RetryTransientErrorsPolicy::setEnabled(bool enabled) {
_enabled = enabled;
return *this;
}
RetryTransientErrorsPolicy &
RetryTransientErrorsPolicy::setBaseDelay(double baseDelay) {
_baseDelay = baseDelay;
return *this;
}
bool
RetryTransientErrorsPolicy::canRetry(uint32_t errorCode) const {
return _enabled && errorCode < ErrorCode::FATAL_ERROR;
}
double
RetryTransientErrorsPolicy::getRetryDelay(uint32_t retry) const {
return _baseDelay * retry;
}
}
<commit_after>// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include "retrytransienterrorspolicy.h"
#include <vespa/messagebus/errorcode.h>
namespace mbus {
RetryTransientErrorsPolicy::RetryTransientErrorsPolicy() :
_enabled(true),
_baseDelay(1.0)
{}
RetryTransientErrorsPolicy &
RetryTransientErrorsPolicy::setEnabled(bool enabled) {
_enabled = enabled;
return *this;
}
RetryTransientErrorsPolicy &
RetryTransientErrorsPolicy::setBaseDelay(double baseDelay) {
_baseDelay = baseDelay;
return *this;
}
bool
RetryTransientErrorsPolicy::canRetry(uint32_t errorCode) const {
return _enabled.load(std::memory_order_relaxed) && errorCode < ErrorCode::FATAL_ERROR;
}
double
RetryTransientErrorsPolicy::getRetryDelay(uint32_t retry) const {
return _baseDelay.load(std::memory_order_relaxed) * retry;
}
}
|
<commit_msg>[FIX] medical_patient_species: Remove tests for is_person (default is False, human set to True in xml).
Re-add test ensuring warning raised if trying to unlink Human.
<commit_before>
from openerp.tests.common import TransactionCase
class TestMedicalPatientSpecies(TransactionCase):
def setUp(self):
super(TestMedicalPatientSpecies, self).setUp()
self.human = self.env.ref('medical_patient_species.human')
self.dog = self.env.ref('medical_patient_species.dog')
def test_create_is_person(self):
''' Tests on creation if Human, is_person is True '''
self.assertTrue(
self.human.is_person, 'Should be True if Human'
)
def test_create_not_is_person(self):
''' Tests on creation if not Human, is_person is False '''
self.assertFalse(
self.dog.is_person, 'Should be False if not Human'
)
<commit_after>
from openerp.tests.common import TransactionCase
from openerp.exceptions import Warning
class TestMedicalPatientSpecies(TransactionCase):
def setUp(self):
super(TestMedicalPatientSpecies, self).setUp()
self.human = self.env.ref('medical_patient_species.human')
self.dog = self.env.ref('medical_patient_species.dog')
def test_unlink_human(self):
''' Test raises Warning if unlinking human '''
with self.assertRaises(Warning):
self.human.unlink()
|
<commit_msg>Make it more clear that users should enter courses
<commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from dal import autocomplete
from .models import Course, Options
class OptionsForm(forms.ModelForm):
"""
A form solely used for autocompleting Courses in the admin,
using django-autocomplete-light,
"""
self_chosen_courses = forms.ModelMultipleChoiceField(
label=_('Dine fag'),
queryset=Course.objects.all(),
widget=autocomplete.ModelSelect2Multiple(
url='semesterpage-course-autocomplete',
attrs = {
'data-placeholder': _('Tast inn fagkode eller fagnavn'),
# Only trigger autocompletion after 3 characters have been typed
'data-minimum-input-length': 3,
},
)
)
class Meta:
model = Options
# The fields are further restricted in .admin.py
fields = ('__all__')
<commit_after>from django import forms
from django.utils.translation import ugettext_lazy as _
from dal import autocomplete
from .models import Course, Options
class OptionsForm(forms.ModelForm):
"""
A form solely used for autocompleting Courses in the admin,
using django-autocomplete-light,
"""
self_chosen_courses = forms.ModelMultipleChoiceField(
label=_('Skriv inn dine fag'),
queryset=Course.objects.all(),
widget=autocomplete.ModelSelect2Multiple(
url='semesterpage-course-autocomplete',
attrs = {
'data-placeholder': _('Tast inn fagkode eller fagnavn'),
# Only trigger autocompletion after 3 characters have been typed
'data-minimum-input-length': 3,
},
)
)
def __init__(self, *args, **kwargs):
super(OptionsForm, self).__init__(*args, **kwargs)
self.fields['self_chosen_courses'].help_text = _(
'Tast inn fagkode eller fagnavn for å legge til et nytt fag\
på hjemmesiden din.'
)
class Meta:
model = Options
# The fields are further restricted in .admin.py
fields = ('__all__')
|
<commit_msg>Fix narrowing conversion from double to float
<commit_before>
constexpr inline float Deg2Rad(float x) noexcept { return x * float{M_PI/180.0}; }
constexpr inline float Rad2Deg(float x) noexcept { return x * float{180.0/M_PI}; }
#endif /* AL_MATH_DEFS_H */
<commit_after>
constexpr inline float Deg2Rad(float x) noexcept { return x * static_cast<float>(M_PI/180.0); }
constexpr inline float Rad2Deg(float x) noexcept { return x * static_cast<float>(180.0/M_PI); }
#endif /* AL_MATH_DEFS_H */
|
<commit_msg>Update depracted flask extension code
<commit_before>from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask.ext.uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask.ext.redis import FlaskRedis
redis_store = FlaskRedis()
<commit_after>from auth import jwt
from spark import Spark
spark = Spark()
from gc3pie import GC3Pie
gc3pie = GC3Pie()
from flask_uwsgi_websocket import GeventWebSocket
websocket = GeventWebSocket()
from flask_redis import FlaskRedis
redis_store = FlaskRedis()
|
<commit_msg>Fix module exports related warning
src\core\se\jbee\inject\bootstrap\Supply.java:52:
warning: [exports] class Logger in module java.logging
is not indirectly exported using requires transitive
public static final Supplier<Logger> LOGGER = (dep, context) //
<commit_before>/*
* Copyright (c) 2012-2020, Jan Bernitt
*
* Licensed under the Apache License, Version 2.0, http://www.apache.org/licenses/LICENSE-2.0
*/
/**
* Defines the API of Silk DI.
*
* @uses se.jbee.inject.declare.Bundle
* @uses se.jbee.inject.declare.ModuleWith
*/
module se.jbee.inject {
exports se.jbee.inject;
exports se.jbee.inject.action;
exports se.jbee.inject.bind;
exports se.jbee.inject.bind.serviceloader;
exports se.jbee.inject.bootstrap;
exports se.jbee.inject.config;
exports se.jbee.inject.container;
exports se.jbee.inject.declare;
exports se.jbee.inject.event;
exports se.jbee.inject.extend;
exports se.jbee.inject.scope;
requires java.logging;
requires java.desktop;
requires java.management;
uses se.jbee.inject.declare.Bundle;
uses se.jbee.inject.declare.ModuleWith;
}
<commit_after>/*
* Copyright (c) 2012-2020, Jan Bernitt
*
* Licensed under the Apache License, Version 2.0, http://www.apache.org/licenses/LICENSE-2.0
*/
/**
* Defines the API of Silk DI.
*
* @uses se.jbee.inject.declare.Bundle
* @uses se.jbee.inject.declare.ModuleWith
*/
module se.jbee.inject {
exports se.jbee.inject;
exports se.jbee.inject.action;
exports se.jbee.inject.bind;
exports se.jbee.inject.bind.serviceloader;
exports se.jbee.inject.bootstrap;
exports se.jbee.inject.config;
exports se.jbee.inject.container;
exports se.jbee.inject.declare;
exports se.jbee.inject.event;
exports se.jbee.inject.extend;
exports se.jbee.inject.scope;
requires transitive java.logging;
requires java.desktop;
requires java.management;
uses se.jbee.inject.declare.Bundle;
uses se.jbee.inject.declare.ModuleWith;
}
|
<commit_msg>Add API for datatype conversion.
git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@6920 94b884b6-d6fd-0310-90d3-974f1d3f35e1
<commit_before>
NPY_NO_EXPORT PyObject *
PyArray_CastToType(PyArrayObject *mp, PyArray_Descr *at, int fortran);
#endif
<commit_after>
NPY_NO_EXPORT PyObject *
PyArray_CastToType(PyArrayObject *mp, PyArray_Descr *at, int fortran);
NPY_NO_EXPORT int
PyArray_CastTo(PyArrayObject *out, PyArrayObject *mp);
NPY_NO_EXPORT PyArray_VectorUnaryFunc *
PyArray_GetCastFunc(PyArray_Descr *descr, int type_num);
NPY_NO_EXPORT int
PyArray_CanCastSafely(int fromtype, int totype);
NPY_NO_EXPORT Bool
PyArray_CanCastTo(PyArray_Descr *from, PyArray_Descr *to);
NPY_NO_EXPORT int
PyArray_ObjectType(PyObject *op, int minimum_type);
NPY_NO_EXPORT PyArrayObject **
PyArray_ConvertToCommonType(PyObject *op, int *retn);
NPY_NO_EXPORT int
PyArray_ValidType(int type);
#endif
|
<commit_msg>Add git sha to version if available
<commit_before>
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception as e:
VERSION = 'unknown'
from raven.base import * # NOQA
from raven.conf import * # NOQA
<commit_after>
import os
import os.path
from raven.base import * # NOQA
from raven.conf import * # NOQA
__all__ = ('VERSION', 'Client', 'load', 'get_version')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception as e:
VERSION = 'unknown'
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read().strip()[:7]
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, os.pardir, os.pardir))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
def get_version():
base = VERSION
if __build__:
base = '%s (%s)' % (base, __build__)
return base
__build__ = get_revision()
__docformat__ = 'restructuredtext en'
|
<commit_msg>Add changes to correct changes
<commit_before>int getCursorPosition(int *rows, int *cols)
{
}
<commit_after>
//
// Use the ESC [6n escape sequence to query the horizontal cursor position
// and return it. On error -1 is returned, on success the position of the
// cursor is stored at *rows and *cols and 0 is returned.
//
int getCursorPosition(int ifd, int ofd, int *rows, int *cols) {
char buf[32];
unsigned int i = 0;
/* Report cursor location */
if (write(ofd, "\x1b[6n", 4) != 4) return -1;
/* Read the response: ESC [ rows ; cols R */
while (i < sizeof(buf)-1) {
if (read(ifd,buf+i,1) != 1) break;
if (buf[i] == 'R') break;
i++;
}
buf[i] = '\0';
/* Parse it. */
if (buf[0] != ESC || buf[1] != '[') return -1;
if (sscanf(buf+2,"%d;%d",rows,cols) != 2) return -1;
return 0;
}
|
<commit_msg>Add missing header file to the list of sources<commit_before>try:
from setuptools import setup
from setuptools.extension import Extension
except ImportError:
from distutils.core import setup, Extension
def main():
module = Extension('rrdtool',
sources=['rrdtoolmodule.c'],
include_dirs=['/usr/local/include'],
library_dirs=['/usr/local/lib'],
libraries=['rrd'])
kwargs = dict(
name='rrdtool',
version='0.1.7',
description='Python bindings for rrdtool',
keywords=['rrdtool'],
author='Christian Kroeger, Hye-Shik Chang',
author_email='commx@commx.ws',
license='LGPL',
url='https://github.com/commx/python-rrdtool',
ext_modules=[module],
test_suite="tests"
)
setup(**kwargs)
if __name__ == '__main__':
main()
<commit_after>try:
from setuptools import setup
from setuptools.extension import Extension
except ImportError:
from distutils.core import setup, Extension
def main():
module = Extension('rrdtool',
sources=['rrdtoolmodule.h', 'rrdtoolmodule.c'],
include_dirs=['/usr/local/include'],
library_dirs=['/usr/local/lib'],
libraries=['rrd'])
kwargs = dict(
name='rrdtool',
version='0.1.7',
description='Python bindings for rrdtool',
keywords=['rrdtool'],
author='Christian Kroeger, Hye-Shik Chang',
author_email='commx@commx.ws',
license='LGPL',
url='https://github.com/commx/python-rrdtool',
ext_modules=[module],
test_suite="tests"
)
setup(**kwargs)
if __name__ == '__main__':
main()
|
<commit_msg>Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: http://src.chromium.org/svn/trunk/src@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: 7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d<commit_before>
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
<commit_after>
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
<commit_msg>Add some tests for new decimal() params
<commit_before>from rich import filesize
def test_traditional():
assert filesize.decimal(0) == "0 bytes"
assert filesize.decimal(1) == "1 byte"
assert filesize.decimal(2) == "2 bytes"
assert filesize.decimal(1000) == "1.0 kB"
assert filesize.decimal(1.5 * 1000 * 1000) == "1.5 MB"
def test_pick_unit_and_suffix():
units = ["bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
assert filesize.pick_unit_and_suffix(50, units, 1024) == (1, "bytes")
assert filesize.pick_unit_and_suffix(2048, units, 1024) == (1024, "KB")
<commit_after>from rich import filesize
def test_traditional():
assert filesize.decimal(0) == "0 bytes"
assert filesize.decimal(1) == "1 byte"
assert filesize.decimal(2) == "2 bytes"
assert filesize.decimal(1000) == "1.0 kB"
assert filesize.decimal(1.5 * 1000 * 1000) == "1.5 MB"
assert filesize.decimal(0, precision=2) == "0 bytes"
assert filesize.decimal(1111, precision=0) == "1 kB"
assert filesize.decimal(1111, precision=1) == "1.1 kB"
assert filesize.decimal(1111, precision=2) == "1.11 kB"
assert filesize.decimal(1111, separator="") == "1.1kB"
def test_pick_unit_and_suffix():
units = ["bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]
assert filesize.pick_unit_and_suffix(50, units, 1024) == (1, "bytes")
assert filesize.pick_unit_and_suffix(2048, units, 1024) == (1024, "KB")
|
<commit_msg>Add comment to new test
<commit_before>from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
<commit_after>'''Test that models with no pretrained vectors can be deserialized correctly
after vectors are added.'''
from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
|
<commit_msg>Use date-format function for file-name
<commit_before>import os
from datetime import datetime
from time import time
class Result:
def __init__(self, directory):
date = datetime.fromtimestamp(time())
name = '%d-%d-%d_%d-%d-%d' % (
date.year,
date.month,
date.day,
date.hour,
date.minute,
date.second)
self.file = File(directory, name)
class File:
def __init__(self, directory, name):
if not os.path.exists(directory):
os.makedirs(directory)
self.path = os.path.join(directory, name)
if not os.path.exists(self.path):
open(self.path, 'w').close()
def write_line(self, text):
stream = open(self.path, 'a')
stream.write('%s\n' % text)
stream.close()
<commit_after>import os
from datetime import datetime
from time import time
class Result:
def __init__(self, directory):
date = datetime.fromtimestamp(time())
self.file = File(directory, date.strftime('%Y-%m-%d_%H-%M-%S'))
class File:
def __init__(self, directory, name):
if not os.path.exists(directory):
os.makedirs(directory)
self.path = os.path.join(directory, name)
if not os.path.exists(self.path):
open(self.path, 'w').close()
def write_line(self, text):
stream = open(self.path, 'a')
stream.write('%s\n' % text)
stream.close()
|
<commit_msg>Make compiler invocation more Makefile-friendly
<commit_before>from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('filenames', metavar='FILE', nargs='+',
help='script to be compiled to Rescript')
args = parser.parse_args()
for filename in args.filenames:
with open(filename, "rt") as file_:
input_code = file_.read()
base_filename, extension = splitext(filename)
with open(base_filename + ".ais", "wt") as file_:
file_.write(compile_script(filename, input_code))
<commit_after>from redux.codegenerator import compile_script
from argparse import ArgumentParser
from os.path import splitext
parser = ArgumentParser(description='Compile a Redux script to Rescript.')
parser.add_argument('input_filename', metavar='FILE',
help='script to be compiled to Rescript')
parser.add_argument('output_filename', metavar='FILE',
help='script to be compiled to Rescript')
args = parser.parse_args()
filename = args.input_filename
assert filename, "no input file given"
with open(filename, "rt") as file_:
input_code = file_.read()
output_code = compile_script(filename, input_code)
base_filename, extension = splitext(filename)
with open(args.output_filename, "wt") as file_:
file_.write(output_code)
|
<commit_msg>Add magicPower to WeaponStats fragment
<commit_before>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
import gql from 'graphql-tag';
export const WeaponStatsFragment = gql`
fragment WeaponStats on WeaponStat_Single {
piercingDamage
piercingBleed
piercingArmorPenetration
slashingDamage
slashingBleed
slashingArmorPenetration
crushingDamage
fallbackCrushingDamage
disruption
deflectionAmount
physicalProjectileSpeed
knockbackAmount
stability
falloffMinDistance
falloffMaxDistance
falloffReduction
deflectionRecovery
staminaCost
physicalPreparationTime
physicalRecoveryTime
range
}
`;
<commit_after>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
import gql from 'graphql-tag';
export const WeaponStatsFragment = gql`
fragment WeaponStats on WeaponStat_Single {
piercingDamage
piercingBleed
piercingArmorPenetration
slashingDamage
slashingBleed
slashingArmorPenetration
crushingDamage
fallbackCrushingDamage
magicPower
disruption
deflectionAmount
physicalProjectileSpeed
knockbackAmount
stability
falloffMinDistance
falloffMaxDistance
falloffReduction
deflectionRecovery
staminaCost
physicalPreparationTime
physicalRecoveryTime
range
}
`;
|
<commit_msg>Support Google Apps drive urls
<commit_before>import re
__author__ = 'tigge'
from wand.image import Image
def fix_image(filename, max_width):
with Image(filename=filename) as img:
img.auto_orient()
if img.width > max_width:
ratio = img.height / img.width
img.resize(width=max_width, height=round(max_width * ratio))
img.type = 'optimize'
img.compression_quality = 80
img.save(filename=filename)
def fix_google_drive_download_url(url):
url = re.sub(r"https://drive\.google\.com/file/d/(.*?)/view\?usp=.*",
r"https://docs.google.com/uc?authuser=0&id=\1&export=download", url)
return url
def fix_dropbox_download_url(url):
url = url[:-5] + "?dl=1" if url.startswith("https://www.dropbox.com") and url.endswith("?dl=0") else url
return url
<commit_after>import re
__author__ = 'tigge'
from wand.image import Image
def fix_image(filename, max_width):
with Image(filename=filename) as img:
img.auto_orient()
if img.width > max_width:
ratio = img.height / img.width
img.resize(width=max_width, height=round(max_width * ratio))
img.type = 'optimize'
img.compression_quality = 80
img.save(filename=filename)
def fix_google_drive_download_url(url):
url = re.sub(r"https://drive\.google\.com/(?:a/.*){0,1}file/d/(.*?)/view\?usp=.*",
r"https://drive.google.com/uc?authuser=0&id=\1&export=download", url)
return url
def fix_dropbox_download_url(url):
url = url[:-5] + "?dl=1" if url.startswith("https://www.dropbox.com") and url.endswith("?dl=0") else url
return url
|
<commit_msg>Revert "Added a prod option to the rquirements checker"
This reverts commit 5b9ae76d157d068ef456d5caa5c4352a139f528b.
<commit_before>import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def _get_file_contents(self, name):
req_file = open('requirements/%s.txt' % name)
reqs = req_file.read()
req_file.close()
req_list = reqs.split('\n')
if req_list[0].startswith('-r'):
req_list = req_list[1:]
return req_list
def handle(self, *args, **options):
check_prod = False
if len(args) == 1:
if args[0] == 'prod':
check_prod = True
else:
print "Unrecognized option %s; defaulting to checking dev requirements." % args[0]
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
req_list = self._get_file_contents('common')
if check_prod:
req_list.extend(self._get_file_contents('prod'))
else:
req_list.extend(self._get_file_contents('dev'))
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
for req_item in req_list:
if req_item not in freeze_results:
print "Required item is not installed: %s" % req_item
<commit_after>import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def handle(self, *args, **options):
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
common_file = open('requirements/common.txt')
reqs = common_file.read()
common_file.close()
req_list = reqs.split('\n')
dev_file = open('requirements/dev.txt')
reqs = dev_file.read()
dev_file.close()
req_list.extend(reqs.split('\n')[1:])
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
|
<commit_msg>Update metadata used by pypi
<commit_before>from setuptools import setup, find_packages
setup(
name='aweber_api',
version='1.1.3',
packages=find_packages(exclude=['tests']),
url='https://github.com/aweber/AWeber-API-Python-Library',
install_requires = [
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require = [
'dingus',
'coverage',
],
setup_requires = [
'nose',
],
include_package_data=True
)
<commit_after>from setuptools import setup, find_packages
from sys import version
if version < '2.2.3':
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(
name='aweber_api',
version='1.1.3',
author='AWeber Dev Team',
author_email='api@aweber.com',
maintainer='AWeber API Team',
maintainer_email='api@aweber.com',
url='https://github.com/aweber/AWeber-API-Python-Library',
download_url='http://pypi.python.org/pypi/aweber_api',
description='The AWeber API Python Library allows you to quickly get up '
'and running with integrating access to the AWeber API into your '
'Python applications.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
packages=find_packages(exclude=['tests']),
install_requires=[
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require=[
'dingus',
'coverage',
],
setup_requires=[
'nose',
],
include_package_data=True
)
|
<commit_msg>Add test for reading XML with UTF8 BOM
<commit_before>package org.zwobble.mammoth.tests;
import org.junit.Test;
import org.zwobble.mammoth.Mammoth;
import org.zwobble.mammoth.results.Result;
import java.io.File;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.zwobble.mammoth.results.Result.success;
import static org.zwobble.mammoth.tests.DeepReflectionMatcher.deepEquals;
public class MammothTests {
@Test
public void emptyParagraphsAreIgnoredByDefault() {
assertThat(
convertToHtml("empty.docx").getValue(),
is(""));
}
@Test
public void docxContainingOneParagraphIsConvertedToSingleParagraphElement() {
assertThat(
convertToHtml("single-paragraph.docx"),
deepEquals(success("<p>Walking on imported air</p>")));
}
private Result<String> convertToHtml(String name) {
File file = TestData.file(name);
return Mammoth.convertToHtml(file);
}
}
<commit_after>package org.zwobble.mammoth.tests;
import org.junit.Test;
import org.zwobble.mammoth.Mammoth;
import org.zwobble.mammoth.results.Result;
import java.io.File;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.zwobble.mammoth.results.Result.success;
import static org.zwobble.mammoth.tests.DeepReflectionMatcher.deepEquals;
public class MammothTests {
@Test
public void emptyParagraphsAreIgnoredByDefault() {
assertThat(
convertToHtml("empty.docx").getValue(),
is(""));
}
@Test
public void docxContainingOneParagraphIsConvertedToSingleParagraphElement() {
assertThat(
convertToHtml("single-paragraph.docx"),
deepEquals(success("<p>Walking on imported air</p>")));
}
@Test
public void canReadFilesWithUtf8Bom() {
assertThat(
convertToHtml("utf8-bom.docx"),
deepEquals(success("<p>This XML has a byte order mark.</p>")));
}
private Result<String> convertToHtml(String name) {
File file = TestData.file(name);
return Mammoth.convertToHtml(file);
}
}
|
<commit_msg>Make sql validators private static
<commit_before>package com.yahoo.squidb.data;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import com.yahoo.squidb.data.SqlValidatorFactory.SqlValidator;
/*package*/ class SqlValidatorFactory {
interface SqlValidator {
void compileStatement(SQLiteDatabase db, String sql);
}
private static final SqlValidator INSTANCE;
static {
int version = VERSION.SDK_INT;
if (version >= VERSION_CODES.JELLY_BEAN) {
INSTANCE = new DefaultSqlValidator();
} else if (version >= VERSION_CODES.ICE_CREAM_SANDWICH) {
INSTANCE = new IcsSqlValidator();
} else {
// included for forks that reduce minSdk below 14
INSTANCE = new DefaultSqlValidator();
}
}
private SqlValidatorFactory() {
//no instance
}
static SqlValidator getValidator() {
return INSTANCE;
}
}
/*package*/ class DefaultSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
db.compileStatement(sql);
}
}
/*package*/ class IcsSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
Cursor c = db.rawQuery(sql, null);
if (c != null) {
c.close();
}
}
}
<commit_after>package com.yahoo.squidb.data;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
/*package*/ class SqlValidatorFactory {
interface SqlValidator {
void compileStatement(SQLiteDatabase db, String sql);
}
private static final SqlValidator INSTANCE;
static {
int version = VERSION.SDK_INT;
if (version >= VERSION_CODES.JELLY_BEAN) {
INSTANCE = new DefaultSqlValidator();
} else if (version >= VERSION_CODES.ICE_CREAM_SANDWICH) {
INSTANCE = new IcsSqlValidator();
} else {
// included for forks that reduce minSdk below 14
INSTANCE = new DefaultSqlValidator();
}
}
private SqlValidatorFactory() {
//no instance
}
static SqlValidator getValidator() {
return INSTANCE;
}
private static class DefaultSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
db.compileStatement(sql);
}
}
private static class IcsSqlValidator implements SqlValidator {
@Override
public void compileStatement(SQLiteDatabase db, String sql) {
Cursor c = db.rawQuery(sql, null);
if (c != null) {
c.close();
}
}
}
}
|
<commit_msg>Return some text on a health check
<commit_before>from django.http import HttpResponse
def health_view(request):
return HttpResponse()
<commit_after>from django.http import HttpResponse
def health_view(request):
return HttpResponse("I am okay.", content_type="text/plain")
|
<commit_msg>Add tests for StoreContext() and LoadContext()
<commit_before>package passhash
import (
"testing"
)
func TestDummyCredentialStoreStore(t *testing.T) {
store := DummyCredentialStore{}
credential := &Credential{}
if err := store.Store(credential); err != nil {
t.Error("Got error storing credential.", err)
}
}
func TestDummyCredentialStoreLoad(t *testing.T) {
store := DummyCredentialStore{}
userID := UserID(0)
credential, err := store.Load(userID)
if err == nil {
t.Error("Got error loading credential.", err)
}
if credential != nil {
t.Error("DummyCredentialStore provided credential.", credential)
}
}
<commit_after>package passhash
import (
"context"
"testing"
)
func TestDummyCredentialStoreStore(t *testing.T) {
store := DummyCredentialStore{}
credential := &Credential{}
if err := store.Store(credential); err != nil {
t.Error("Got error storing credential.", err)
}
}
func TestDummyCredentialStoreStoreContext(t *testing.T) {
store := DummyCredentialStore{}
credential := &Credential{}
if err := store.StoreContext(context.Background(), credential); err != nil {
t.Error("Got error storing credential.", err)
}
}
func TestDummyCredentialStoreLoad(t *testing.T) {
store := DummyCredentialStore{}
userID := UserID(0)
credential, err := store.Load(userID)
if err == nil {
t.Error("Got error loading credential.", err)
}
if credential != nil {
t.Error("DummyCredentialStore provided credential.", credential)
}
}
func TestDummyCredentialStoreLoadContext(t *testing.T) {
store := DummyCredentialStore{}
userID := UserID(0)
credential, err := store.LoadContext(context.Background(), userID)
if err == nil {
t.Error("Got error loading credential.", err)
}
if credential != nil {
t.Error("DummyCredentialStore provided credential.", credential)
}
}
|
<commit_msg>Fix imports style and made feed_name optional
<commit_before>from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from saleor.integrations.feeds import SaleorFeed
from saleor.integrations import utils
class Command(BaseCommand):
help = 'Updates integration feeds. '
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='+', type=str)
def handle(self, *args, **options):
feed_names = options['feed_name'] or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
<commit_after>from __future__ import unicode_literals
from django.core.management import CommandError, BaseCommand
from ....integrations.feeds import SaleorFeed
from ....integrations import utils
class Command(BaseCommand):
help = ('Updates integration feeds.'
'If feed name not provided, updates all available feeds')
feed_classes = {'saleor': SaleorFeed}
def add_arguments(self, parser):
parser.add_argument('feed_name', nargs='*', type=str, default=None)
def handle(self, *args, **options):
feed_names = options.get('feed_name') or self.feed_classes.keys()
for feed_name in feed_names:
feed = self.feed_classes.get(feed_name)
if feed is None:
raise CommandError('Feed "%s" does not exist' % feed_name)
utils.update_feed(feed())
|
<commit_msg>[Join] Check if we're already in the channel; Improved parameter parsing
<commit_before>from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['join']
helptext = "Makes me join another channel, if I'm allowed to at least"
def execute(self, message):
"""
:type message: IrcMessage
"""
replytext = ""
if message.messagePartsLength < 1:
replytext = "Please provide a channel for me to join"
else:
channel = message.messageParts[0]
if channel.replace('#', '') not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress):
replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission"
else:
replytext = "All right, I'll go to {}. See you there!".format(channel)
message.bot.join(channel)
message.reply(replytext, "say")<commit_after>from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['join']
helptext = "Makes me join another channel, if I'm allowed to at least"
def execute(self, message):
"""
:type message: IrcMessage
"""
replytext = ""
if message.messagePartsLength < 1:
replytext = "Please provide a channel for me to join"
else:
channel = message.messageParts[0].lower()
if channel.startswith('#'):
channel = channel.lstrip('#')
if '#' + channel in message.bot.channelsUserList:
replytext = "I'm already there, waiting for you. You're welcome!"
elif channel not in message.bot.factory.settings['allowedChannels'] and not message.bot.factory.isUserAdmin(message.user, message.userNickname, message.userAddress):
replytext = "I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission"
else:
replytext = "All right, I'll go to #{}. See you there!".format(channel)
message.bot.join(channel)
message.reply(replytext, "say")
|
<commit_msg>Add ProvisionalUser, which goes in the db
<commit_before>-- | Core definitions for the Auth subsite (see AuthSite.hs)
module AuthSiteTypes where
import Yesod.Core
-- | The loginDest parameter is the route that users should be directed to
-- after logging in.
data AuthSite loginDest = AuthSite loginDest
mkYesodSubData "AuthSite" [parseRoutes|
/login LoginR GET POST
/logout LogoutR POST
|]
<commit_after>-- | Core definitions for the Auth subsite (see AuthSite.hs)
module AuthSiteTypes where
import ClassyPrelude
import Database.Persist.TH
import Yesod.Core
-- | The loginDest parameter is the route that users should be directed to
-- after logging in.
data AuthSite loginDest = AuthSite loginDest
mkYesodSubData "AuthSite" [parseRoutes|
/login LoginR GET POST
/logout LogoutR POST
|]
share [mkPersist sqlSettings{mpsPrefixFields = False}
, mkMigrate "migrateAuth"
] [persistLowerCase|
ProvisionalUser
provisionalEmail Text
provisionalDigest ByteString
verificationDigest ByteString
puCreationTime UTCTime
|]
|
<commit_msg>Add default test database name to default settings
<commit_before>import pkg_resources # part of setuptools
USER_AGENT_NAME = 'bookmarks'
VERSION_NUMBER = pkg_resources.require('bookmarks')[0].version
SECRET_KEY = 'development key'
DATABASE_USERNAME = 'bookmarks'
DATABASE_PASSWORD = ''
DATABASE_HOST = 'localhost'
DATABASE_NAME = 'bookmarks'
<commit_after>import pkg_resources # part of setuptools
USER_AGENT_NAME = 'bookmarks'
VERSION_NUMBER = pkg_resources.require('bookmarks')[0].version
SECRET_KEY = 'development key'
DATABASE_USERNAME = 'bookmarks'
DATABASE_PASSWORD = ''
DATABASE_HOST = 'localhost'
DATABASE_NAME = 'bookmarks'
TEST_DATABASE_NAME = 'bookmarks_test'
|
<commit_msg>Add factory for capakey and utility method for doing a request.
<commit_before>from suds.client import Client
def crab_factory(**kwargs):
if 'wsdl' in kwargs:
wsdl = kwargs['wsdl']
del kwargs['wsdl']
else:
wsdl = "http://crab.agiv.be/wscrab/wscrab.svc?wsdl"
if 'proxy' in kwargs:
proxy = kwargs['proxy']
del kwargs['proxy']
c = Client(
wsdl,
**kwargs
)
return c
<commit_after>from suds.client import Client
def crab_factory(**kwargs):
if 'wsdl' in kwargs:
wsdl = kwargs['wsdl']
del kwargs['wsdl']
else:
wsdl = "http://crab.agiv.be/wscrab/wscrab.svc?wsdl"
if 'proxy' in kwargs:
proxy = kwargs['proxy']
del kwargs['proxy']
c = Client(
wsdl,
**kwargs
)
return c
def capakey_factory(**kwargs):
from suds.wsse import Security
from suds_passworddigest.token import UsernameDigestToken
from datetime import datetime
if 'wsdl' in kwargs:
wsdl = kwargs['wsdl']
del kwargs['wsdl']
else:
wsdl = "http://ws.agiv.be/capakeyws/nodataset.asmx?WSDL"
if 'user' in kwargs and 'password' in kwargs:
user = kwargs['user']
password = kwargs['password']
del kwargs['user']
del kwargs['password']
else:
raise ValueError(
"You must specify a 'user' and a 'password'."
)
if 'proxy' in kwargs:
proxy = kwargs['proxy']
del kwargs['proxy']
c = Client(
wsdl,
**kwargs
)
security = Security()
token = UsernameDigestToken(user, password)
# Service can't handle microseconds.
utc = datetime.utcnow()
utc = datetime(utc.year, utc.month, utc.day, utc.hour, utc.minute, utc.second, tzinfo=utc.tzinfo)
token.setcreated(utc)
security.tokens.append(token)
c.set_options(wsse=security)
return c
def capakey_request(client, action, *args):
from crabpy.wsa import Action, MessageID, To
cm = getattr(client.service, action)
a = Action(cm.method.soap.action)
mid = MessageID()
t = To('http://ws.agiv.be/capakeyws/nodataset.asmx')
client.set_options(soapheaders=[a.xml(), t.xml(), mid.xml()])
return getattr(client.service, action)(*args)
|
<commit_msg>Add counter for cleanup tasks not following the decorator
<commit_before>from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.experimental.stats import RCount
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
<commit_after>from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.experimental.stats import RCount, incr
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
incr('cleanup_unfinished')
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
|
<commit_msg>Implement tournament selection to obtain some convergence
<commit_before>extern crate rand;
use rand::thread_rng;
use rand::distributions::Range;
use individual::Individual;
mod individual;
fn main() {
let mut rng = thread_rng();
let range = Range::new(-512.03_f64, 511.97); // range for Schwefel problem
// initialize population
let mut population: Vec<_> = (0..128).map(|_| {
Individual::new(&range, &mut rng)
}).collect();
for i in 0..10000 {
// generate mutated offspring
population = population.iter().map(|x| {
x.mutate(&range, &mut rng)
}).collect();
let best = population.iter().min().unwrap();
if i % 100 == 0 {
println!("{}th fitness: {}", i, best.fitness);
}
if best.fitness < 1000_f64 {
println!("Solution: {:?}", best.solution);
return;
}
}
println!("Failed to converge.");
}
<commit_after>extern crate rand;
use rand::{Rng, thread_rng};
use rand::distributions::Range;
use individual::Individual;
mod individual;
fn select<'a, R: Rng>(population: &'a Vec<Individual>, rng: &mut R)
-> &'a Individual {
let population: Vec<_> = (0..4).map(|_| rng.choose(population)).collect();
if let Some(selected) = population.iter().min() {
return selected.unwrap();
}
unimplemented!();
}
fn main() {
let mut rng = thread_rng();
let range = Range::new(-512.03_f64, 511.97); // range for Schwefel problem
// initialize population
let mut population: Vec<_> = (0..128).map(|_| {
Individual::new(&range, &mut rng)
}).collect();
for i in 0..10000 {
// select and mutate individuals for next population
population = (0..128).map(|_| {
select(&population, &mut rng).mutate(&range, &mut rng)
}).collect();
let best = population.iter().min().unwrap();
if i % 100 == 0 {
println!("{}th fitness: {}", i, best.fitness);
}
if best.fitness < 1000_f64 {
println!("{}th solution converged at {}: {:?}",
i, best.fitness, best.solution);
return;
}
}
println!("Failed to converge.");
}
|
<commit_msg>Make TextField also work with a QLabel view, which doesn't allow editing.
<commit_before>
class TextField:
def __init__(self, model, view):
self.model = model
self.view = view
self.model.view = self
self.view.editingFinished.connect(self.editingFinished)
def editingFinished(self):
self.model.text = self.view.text()
# model --> view
def refresh(self):
self.view.setText(self.model.text)
<commit_after>
class TextField:
def __init__(self, model, view):
self.model = model
self.view = view
self.model.view = self
# Make TextField also work for QLabel, which doesn't allow editing
if hasattr(self.view, 'editingFinished'):
self.view.editingFinished.connect(self.editingFinished)
def editingFinished(self):
self.model.text = self.view.text()
# model --> view
def refresh(self):
self.view.setText(self.model.text)
|
<commit_msg>Add missing functions in the steps method of formContext.data.process
<commit_before>export class StepMock implements Xrm.ProcessFlow.Step {
public required: boolean;
public name: string;
public attribute: string;
constructor(name: string, attribute: string, required: boolean) {
this.name = name;
this.attribute = attribute;
this.required = required;
}
public getAttribute(): string {
return this.attribute;
}
public getName(): string {
return this.name;
}
public isRequired(): boolean {
return this.required;
}
}
<commit_after>export class StepMock implements Xrm.ProcessFlow.Step {
public required: boolean;
public name: string;
public attribute: string;
constructor(name: string, attribute: string, required: boolean) {
this.name = name;
this.attribute = attribute;
this.required = required;
}
public getAttribute(): string {
return this.attribute;
}
public getName(): string {
return this.name;
}
public isRequired(): boolean {
return this.required;
}
public getProgress(): number {
throw new Error("getProgress not implemented");
}
public setProgress(stepProgress: number, message: string): string {
throw new Error("setProgress not implemented");
}
}
|
<commit_msg>aura: Fix chrome compile after the switch from Task to base::Bind.
TBR=jhawkins@chromium.org
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/8322004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@105817 0039d316-1c4b-4281-b951-d872f2087c98
<commit_before>// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/automation/ui_controls.h"
#include "base/logging.h"
#include "views/view.h"
namespace ui_controls {
bool SendKeyPress(gfx::NativeWindow window,
ui::KeyboardCode key,
bool control,
bool shift,
bool alt,
bool command) {
NOTIMPLEMENTED();
return true;
}
bool SendKeyPressNotifyWhenDone(gfx::NativeWindow window,
ui::KeyboardCode key,
bool control,
bool shift,
bool alt,
bool command,
Task* task) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseMove(long x, long y) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseMoveNotifyWhenDone(long x, long y, Task* task) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseEvents(MouseButton type, int state) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseEventsNotifyWhenDone(MouseButton type, int state, Task* task) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseClick(MouseButton type) {
return SendMouseEvents(type, UP | DOWN);
}
void MoveMouseToCenterAndPress(views::View* view, MouseButton button,
int state, Task* task) {
NOTIMPLEMENTED();
}
} // namespace ui_controls
<commit_after>// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/automation/ui_controls.h"
#include "base/logging.h"
#include "views/view.h"
namespace ui_controls {
bool SendKeyPress(gfx::NativeWindow window,
ui::KeyboardCode key,
bool control,
bool shift,
bool alt,
bool command) {
NOTIMPLEMENTED();
return true;
}
bool SendKeyPressNotifyWhenDone(gfx::NativeWindow window,
ui::KeyboardCode key,
bool control,
bool shift,
bool alt,
bool command,
const base::Closure& task) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseMove(long x, long y) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseMoveNotifyWhenDone(long x, long y, const base::Closure& task) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseEvents(MouseButton type, int state) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseEventsNotifyWhenDone(MouseButton type, int state,
const base::Closure& task) {
NOTIMPLEMENTED();
return true;
}
bool SendMouseClick(MouseButton type) {
return SendMouseEvents(type, UP | DOWN);
}
void MoveMouseToCenterAndPress(views::View* view, MouseButton button,
int state, const base::Closure& task) {
NOTIMPLEMENTED();
}
} // namespace ui_controls
|
<commit_msg>Put data folder in ./ instead of ./src/mcedit when running from source (xxx work on this)
<commit_before>import os
import sys
def getUserFilesDirectory():
exe = sys.executable
if hasattr(sys, 'frozen'):
folder = os.path.dirname(exe)
else:
script = sys.argv[0]
if exe.endswith("python") or exe.endswith("python.exe"):
folder = os.path.dirname(script)
else:
folder = os.path.dirname(exe)
dataDir = os.path.join(folder, "MCEdit User Data")
if not os.path.exists(dataDir):
os.makedirs(dataDir)
return dataDir
<commit_after>import os
import sys
def getUserFilesDirectory():
exe = sys.executable
if hasattr(sys, 'frozen'):
folder = os.path.dirname(exe)
else:
script = sys.argv[0]
if exe.endswith("python") or exe.endswith("python.exe"):
folder = os.path.dirname(os.path.dirname(os.path.dirname(script))) # from src/mcedit, ../../
else:
folder = os.path.dirname(exe)
dataDir = os.path.join(folder, "MCEdit User Data")
if not os.path.exists(dataDir):
os.makedirs(dataDir)
return dataDir
|
<commit_msg>Adjust to code style rules
<commit_before>import {Observable} from 'rxjs';
export interface SyncProcess {
url: string;
cancel(): void;
observer: Observable<SyncStatus>;
}
export enum SyncStatus {
Offline = "OFFLINE",
Pushing = "PUSHING",
Pulling = "PULLING",
InSync = "IN_SYNC",
Error = "ERROR",
AuthenticationError = "AUTHENTICATION_ERROR",
AuthorizationError = "AUTHORIZATION_ERROR"
}
<commit_after>import {Observable} from 'rxjs';
export interface SyncProcess {
url: string;
cancel(): void;
observer: Observable<SyncStatus>;
}
export enum SyncStatus {
Offline = 'OFFLINE',
Pushing = 'PUSHING',
Pulling = 'PULLING',
InSync = 'IN_SYNC',
Error = 'ERROR',
AuthenticationError = 'AUTHENTICATION_ERROR',
AuthorizationError = 'AUTHORIZATION_ERROR'
}
|
<commit_msg>Mark test with JUnit4 runner
<commit_before>package com.google.auto.factory;
import static org.truth0.Truth.ASSERT;
import org.junit.Test;
import com.google.inject.Guice;
import dagger.ObjectGraph;
public class DependencyInjectionIntegrationTest {
@Test public void daggerInjectedFactory() {
FactoryGeneratedFactory factoryGeneratedFactory =
ObjectGraph.create(DaggerModule.class).get(FactoryGeneratedFactory.class);
FactoryGenerated one = factoryGeneratedFactory.create("A");
FactoryGenerated two = factoryGeneratedFactory.create("B");
ASSERT.that(one.name()).isEqualTo("A");
ASSERT.that(one.dependency()).isNotNull();
ASSERT.that(two.name()).isEqualTo("B");
ASSERT.that(two.dependency()).isNotNull();
ASSERT.that(one.dependency()).isNotEqualTo(two.dependency());
}
@Test public void guiceInjectedFactory() {
FactoryGeneratedFactory factoryGeneratedFactory =
Guice.createInjector(new GuiceModule())
.getInstance(FactoryGeneratedFactory.class);
FactoryGenerated one = factoryGeneratedFactory.create("A");
FactoryGenerated two = factoryGeneratedFactory.create("B");
ASSERT.that(one.name()).isEqualTo("A");
ASSERT.that(one.dependency()).isNotNull();
ASSERT.that(two.name()).isEqualTo("B");
ASSERT.that(two.dependency()).isNotNull();
ASSERT.that(one.dependency()).isNotEqualTo(two.dependency());
}
}
<commit_after>package com.google.auto.factory;
import static org.truth0.Truth.ASSERT;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import com.google.inject.Guice;
import dagger.ObjectGraph;
@RunWith(JUnit4.class)
public class DependencyInjectionIntegrationTest {
@Test public void daggerInjectedFactory() {
FactoryGeneratedFactory factoryGeneratedFactory =
ObjectGraph.create(DaggerModule.class).get(FactoryGeneratedFactory.class);
FactoryGenerated one = factoryGeneratedFactory.create("A");
FactoryGenerated two = factoryGeneratedFactory.create("B");
ASSERT.that(one.name()).isEqualTo("A");
ASSERT.that(one.dependency()).isNotNull();
ASSERT.that(two.name()).isEqualTo("B");
ASSERT.that(two.dependency()).isNotNull();
ASSERT.that(one.dependency()).isNotEqualTo(two.dependency());
}
@Test public void guiceInjectedFactory() {
FactoryGeneratedFactory factoryGeneratedFactory =
Guice.createInjector(new GuiceModule())
.getInstance(FactoryGeneratedFactory.class);
FactoryGenerated one = factoryGeneratedFactory.create("A");
FactoryGenerated two = factoryGeneratedFactory.create("B");
ASSERT.that(one.name()).isEqualTo("A");
ASSERT.that(one.dependency()).isNotNull();
ASSERT.that(two.name()).isEqualTo("B");
ASSERT.that(two.dependency()).isNotNull();
ASSERT.that(one.dependency()).isNotEqualTo(two.dependency());
}
}
|
<commit_msg>Use VISION_BONNET_MODELS_PATH env var for custom models path.
Change-Id: I687ca96e4cf768617fa45d50d68dadffde750b87
<commit_before>"""Utility to load compute graphs from diffrent sources."""
import os
def load_compute_graph(name):
path = os.path.join('/opt/aiy/models', name)
with open(path, 'rb') as f:
return f.read()
<commit_after>"""Utility to load compute graphs from diffrent sources."""
import os
def load_compute_graph(name):
path = os.environ.get('VISION_BONNET_MODELS_PATH', '/opt/aiy/models')
with open(os.path.join(path, name), 'rb') as f:
return f.read()
|
<commit_msg>Make setup.py smoke test more specific again as requested in review<commit_before>
"""Tests for setup.py."""
import doctest
import os
import subprocess
import sys
from testtools import (
TestCase,
)
from testtools.matchers import (
DocTestMatches,
)
class TestCanSetup(TestCase):
def test_bdist(self):
# Single smoke test to make sure we can build a package.
path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py')
proc = subprocess.Popen([sys.executable, path, 'bdist'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output, _ = proc.communicate()
self.assertEqual(0, proc.returncode)
self.assertThat(output,
DocTestMatches("...running bdist...", doctest.ELLIPSIS))
<commit_after>
"""Tests for setup.py."""
import doctest
import os
import subprocess
import sys
from testtools import (
TestCase,
)
from testtools.matchers import (
DocTestMatches,
)
class TestCanSetup(TestCase):
def test_bdist(self):
# Single smoke test to make sure we can build a package.
path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py')
proc = subprocess.Popen([sys.executable, path, 'bdist'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, universal_newlines=True)
output, _ = proc.communicate()
self.assertEqual(0, proc.returncode)
self.assertThat(output, DocTestMatches("""...
running install_scripts
...
adding '...testr'
...""", doctest.ELLIPSIS))
|
<commit_msg>Check if Onboarding exists before creating.
<commit_before>from __future__ import unicode_literals
from django.db import migrations, models
def forwards(apps, schema_editor):
"""Create Onboarding group."""
Group = apps.get_model('auth', 'Group')
Group.objects.create(name='Onboarding')
def backwards(apps, schema_editor):
"""Delete Onboarding group."""
Group = apps.get_model('auth', 'Group')
Group.objects.filter(name='Onboarding').delete()
class Migration(migrations.Migration):
dependencies = [
('profiles', '0010_auto_20171221_0112'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
<commit_after>from __future__ import unicode_literals
from django.db import migrations, models
def forwards(apps, schema_editor):
"""Create Onboarding group."""
Group = apps.get_model('auth', 'Group')
if not Group.objects.filter(name='Onboarding').exists():
Group.objects.create(name='Onboarding')
def backwards(apps, schema_editor):
"""Delete Onboarding group."""
Group = apps.get_model('auth', 'Group')
if Group.objects.filter(name='Onboarding').exists():
Group.objects.filter(name='Onboarding').delete()
class Migration(migrations.Migration):
dependencies = [
('profiles', '0010_auto_20171221_0112'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
|
<commit_msg>Make return character an attribute
<commit_before>import time
from netmiko.base_connection import BaseConnection
class NetscalerSSH(BaseConnection):
""" Netscaler SSH class. """
def session_preparation(self):
"""Prepare the session after the connection has been established."""
# 0 will defer to the global delay factor
delay_factor = self.select_delay_factor(delay_factor=0)
self._test_channel_read()
self.set_base_prompt()
self.disable_paging(command="\nset cli mode -page OFF\n")
time.sleep(1 * delay_factor)
self.set_base_prompt()
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def send_config_set(self, config_commands=None, exit_config_mode=True, **kwargs):
""" Nothing to exit"""
return super(NetscalerSSH, self).send_config_set(config_commands=config_commands,
exit_config_mode=False, **kwargs)
def strip_prompt(self, a_string):
""" Strip 'Done' from command output """
output = super(NetscalerSSH, self).strip_prompt(a_string)
lines = output.split('\n')
if "Done" in lines[-1]:
return '\n'.join(lines[:-1])
else:
return output
<commit_after>import time
from netmiko.base_connection import BaseConnection
class NetscalerSSH(BaseConnection):
""" Netscaler SSH class. """
def session_preparation(self):
"""Prepare the session after the connection has been established."""
# 0 will defer to the global delay factor
delay_factor = self.select_delay_factor(delay_factor=0)
self._test_channel_read()
self.set_base_prompt()
cmd = "{}set cli mode -page OFF{}".format(self.RETURN, self.RETURN)
self.disable_paging(command=cmd)
time.sleep(1 * delay_factor)
self.set_base_prompt()
time.sleep(.3 * delay_factor)
self.clear_buffer()
def send_config_set(self, config_commands=None, exit_config_mode=True, **kwargs):
"""Nothing to exit"""
return super(NetscalerSSH, self).send_config_set(config_commands=config_commands,
exit_config_mode=False, **kwargs)
def strip_prompt(self, a_string):
""" Strip 'Done' from command output """
output = super(NetscalerSSH, self).strip_prompt(a_string)
lines = output.split(self.RESPONSE_RETURN)
if "Done" in lines[-1]:
return 'self.RESPONSE_RETURN'.join(lines[:-1])
else:
return output
|
<commit_msg>Use TLS if given a key and cert.
<commit_before>package main
import (
"log"
"net/http"
"github.com/ParadropLabs/node"
)
// var client *rabric.Client
func main() {
node.Log()
// Pass certificate here
s := node.CreateNode("pd.routers.aardvark")
server := &http.Server{
Handler: s,
Addr: ":8000",
}
log.Fatal(server.ListenAndServe())
}
<commit_after>package main
import (
"log"
"net/http"
"os"
"github.com/ParadropLabs/node"
)
// var client *rabric.Client
func main() {
node.Log()
// Pass certificate here
s := node.CreateNode("pd.routers.aardvark")
server := &http.Server{
Handler: s,
Addr: ":8000",
}
certFile := os.Getenv("EXIS_CERT")
keyFile := os.Getenv("EXIS_KEY")
if certFile != "" && keyFile != "" {
log.Fatal(server.ListenAndServeTLS(certFile, keyFile))
} else {
log.Fatal(server.ListenAndServe())
}
}
|
<commit_msg>Fix PARAMETER ERROR of AnimationClip.toJSON()
<commit_before>import { KeyframeTrack } from './KeyframeTrack';
import { Bone } from './../objects/Bone';
import { MorphTarget } from '../core/Geometry';
import { AnimationBlendMode } from '../constants';
export class AnimationClip {
constructor( name?: string, duration?: number, tracks?: KeyframeTrack[], blendMode?: AnimationBlendMode );
name: string;
tracks: KeyframeTrack[];
/**
* @default THREE.NormalAnimationBlendMode
*/
blendMode: AnimationBlendMode;
/**
* @default -1
*/
duration: number;
uuid: string;
results: any[];
resetDuration(): AnimationClip;
trim(): AnimationClip;
validate(): boolean;
optimize(): AnimationClip;
clone(): AnimationClip;
static CreateFromMorphTargetSequence(
name: string,
morphTargetSequence: MorphTarget[],
fps: number,
noLoop: boolean
): AnimationClip;
static findByName( clipArray: AnimationClip[], name: string ): AnimationClip;
static CreateClipsFromMorphTargetSequences(
morphTargets: MorphTarget[],
fps: number,
noLoop: boolean
): AnimationClip[];
static parse( json: any ): AnimationClip;
static parseAnimation(
animation: any,
bones: Bone[]
): AnimationClip;
static toJSON(): any;
}
<commit_after>import { KeyframeTrack } from './KeyframeTrack';
import { Bone } from './../objects/Bone';
import { MorphTarget } from '../core/Geometry';
import { AnimationBlendMode } from '../constants';
export class AnimationClip {
constructor( name?: string, duration?: number, tracks?: KeyframeTrack[], blendMode?: AnimationBlendMode );
name: string;
tracks: KeyframeTrack[];
/**
* @default THREE.NormalAnimationBlendMode
*/
blendMode: AnimationBlendMode;
/**
* @default -1
*/
duration: number;
uuid: string;
results: any[];
resetDuration(): AnimationClip;
trim(): AnimationClip;
validate(): boolean;
optimize(): AnimationClip;
clone(): AnimationClip;
static CreateFromMorphTargetSequence(
name: string,
morphTargetSequence: MorphTarget[],
fps: number,
noLoop: boolean
): AnimationClip;
static findByName( clipArray: AnimationClip[], name: string ): AnimationClip;
static CreateClipsFromMorphTargetSequences(
morphTargets: MorphTarget[],
fps: number,
noLoop: boolean
): AnimationClip[];
static parse( json: any ): AnimationClip;
static parseAnimation(
animation: any,
bones: Bone[]
): AnimationClip;
static toJSON( json: any): any;
}
|
<commit_msg>Enable terminals in the lab example
<commit_before>
import os
from jinja2 import FileSystemLoader
from notebook.base.handlers import IPythonHandler, FileFindHandler
from notebook.notebookapp import NotebookApp
from traitlets import Unicode
class ExampleHandler(IPythonHandler):
"""Handle requests between the main app page and notebook server."""
def get(self):
"""Get the main page for the application's interface."""
return self.write(self.render_template("index.html",
static=self.static_url, base_url=self.base_url))
def get_template(self, name):
loader = FileSystemLoader(os.getcwd())
return loader.load(self.settings['jinja2_env'], name)
class ExampleApp(NotebookApp):
default_url = Unicode('/example')
def init_webapp(self):
"""initialize tornado webapp and httpserver.
"""
super(ExampleApp, self).init_webapp()
default_handlers = [
(r'/example/?', ExampleHandler),
(r"/example/(.*)", FileFindHandler,
{'path': 'build'}),
]
self.web_app.add_handlers(".*$", default_handlers)
if __name__ == '__main__':
ExampleApp.launch_instance()
<commit_after>
import os
from jinja2 import FileSystemLoader
from notebook.base.handlers import IPythonHandler, FileFindHandler
from notebook.notebookapp import NotebookApp
from traitlets import Unicode
class ExampleHandler(IPythonHandler):
"""Handle requests between the main app page and notebook server."""
def get(self):
"""Get the main page for the application's interface."""
return self.write(self.render_template("index.html",
static=self.static_url, base_url=self.base_url,
terminals_available=True))
def get_template(self, name):
loader = FileSystemLoader(os.getcwd())
return loader.load(self.settings['jinja2_env'], name)
class ExampleApp(NotebookApp):
default_url = Unicode('/example')
def init_webapp(self):
"""initialize tornado webapp and httpserver.
"""
super(ExampleApp, self).init_webapp()
default_handlers = [
(r'/example/?', ExampleHandler),
(r"/example/(.*)", FileFindHandler,
{'path': 'build'}),
]
self.web_app.add_handlers(".*$", default_handlers)
if __name__ == '__main__':
ExampleApp.launch_instance()
|
<commit_msg>[STABLE] Fix logical error in `computeBestTime`
<commit_before>import { ConnectionStatus } from "./interfaces";
import * as m from "moment";
import { isString, max } from "lodash";
export function maxDate(l: m.Moment, r: m.Moment): string {
const dates = [l, r].map(y => y.toDate());
return (max(dates) || dates[0]).toJSON();
}
export function getStatus(cs: ConnectionStatus | undefined): "up" | "down" {
return (cs && cs.state) || "down";
}
/** USE CASE: We have numerous, possibly duplicate sources of information
* that represent `last_saw_mq`. One came from the API and another (possibly)
* came from the MQ directly to the browser. This function determines which of
* the two is most relevant. It is a heuristic process that gives up when
* unable to make a determination. */
export function computeBestTime(cs: ConnectionStatus | undefined,
lastSawMq: string | undefined,
now = m()): ConnectionStatus | undefined {
// Only use the `last_saw_mq` time if it is more recent than the local
// timestamp.
// don't bother guessing if info is unavailable
return isString(lastSawMq) ?
{ at: maxDate(now, m(lastSawMq)), state: getStatus(cs) } : cs;
}
<commit_after>import { ConnectionStatus } from "./interfaces";
import * as m from "moment";
import { isString, max } from "lodash";
export function maxDate(l: m.Moment, r: m.Moment): string {
const dates = [l, r].map(y => y.toDate());
return (max(dates) || dates[0]).toJSON();
}
export function getStatus(cs: ConnectionStatus | undefined): "up" | "down" {
return (cs && cs.state) || "down";
}
/** USE CASE: We have numerous, possibly duplicate sources of information
* that represent `last_saw_mq`. One came from the API and another (possibly)
* came from the MQ directly to the browser. This function determines which of
* the two is most relevant. It is a heuristic process that gives up when
* unable to make a determination. */
export function computeBestTime(cs: ConnectionStatus | undefined,
lastSawMq: string | undefined): ConnectionStatus | undefined {
// Only use the `last_saw_mq` time if it is more recent than the local
// timestamp.
// don't bother guessing if info is unavailable
return isString(lastSawMq) ?
{ at: maxDate(m(cs && cs.at ? cs.at : lastSawMq), m(lastSawMq)), state: getStatus(cs) } : cs;
}
|
<commit_msg>Fix script not working from bash
<commit_before>
import subprocess
class NvidiaCommandsLayerException(Exception):
pass
class NvidiaCommandsLayer(object):
@staticmethod
def set_fan_percentage(
value: int
) -> None:
if value < 0 or value > 100:
raise NvidiaCommandsLayerException('Cannot set a value outside 0 - 100')
result = subprocess.run(
[
'nvidia-settings',
'-a',
'"[gpu:0]/GPUFanControlState=1"',
'-a',
'"[fan:0]/GPUTargetFanSpeed={}"'.format(value)
],
stdout=subprocess.PIPE
)
if result.returncode != 0:
raise NvidiaCommandsLayerException('Could not set the fan speed')
@staticmethod
def read_temperature(
) -> int:
result = subprocess.run(
[
'nvidia-smi',
'--query-gpu=temperature.gpu',
'--format=csv,noheader,nounits'
],
stdout=subprocess.PIPE
)
if result.returncode == 0:
# the result is a string with a '\n' at the end, convert it to a decimal
return int(result.stdout[:-1])
else:
raise NvidiaCommandsLayerException('Could not read the temperature')
<commit_after>
import subprocess
class NvidiaCommandsLayerException(Exception):
pass
class NvidiaCommandsLayer(object):
@staticmethod
def set_fan_percentage(
value: int
) -> None:
if value < 0 or value > 100:
raise NvidiaCommandsLayerException('Cannot set a value outside 0 - 100')
result = subprocess.run(
'nvidia-settings '
'-a "[gpu:0]/GPUFanControlState=1" '
'-a "[fan:0]/GPUTargetFanSpeed={}"'.format(value),
stdout=subprocess.PIPE,
shell=True
)
if result.returncode != 0:
raise NvidiaCommandsLayerException('Could not set the fan speed')
@staticmethod
def read_temperature(
) -> int:
result = subprocess.run(
[
'nvidia-smi',
'--query-gpu=temperature.gpu',
'--format=csv,noheader,nounits'
],
stdout=subprocess.PIPE
)
if result.returncode == 0:
# the result is a string with a '\n' at the end, convert it to a decimal
return int(result.stdout[:-1])
else:
raise NvidiaCommandsLayerException('Could not read the temperature')
|
<commit_msg>Refactor code, move the code in main function to the other function
<commit_before>//this file contains several examples by Golang
package main
import (
"fmt"
)
func main() {
//Check if number is odd or even
fmt.Print("Enter a number: ")
var number int
fmt.Scanf("%d", &number)
if (number % 2 == 0) {
fmt.Printf("%d is even number\n", number)
} else {
fmt.Printf("%d is odd number\n", number)
}
}<commit_after>//this file contains several examples by Golang
package main
import (
"fmt"
)
func main() {
checkNumberIsEvenOrOdd()
}
func checkNumberIsEvenOrOdd() {
fmt.Print("Enter a number: ")
var number int
fmt.Scanf("%d", &number)
if (number % 2 == 0) {
fmt.Printf("%d is even number\n", number)
} else {
fmt.Printf("%d is odd number\n", number)
}
}
|
<commit_msg>Add letter to number doc comments
<commit_before>package gocipher
func LetterNumberEncrypt(text string) []int {
runes := []rune(text)
numbers := make([]int, len(runes))
for i, rune := range runes {
if rune >= 'A' && rune <= 'Z' {
numbers[i] = int(rune - 'A' + 1)
} else if rune >= 'a' && rune <= 'z' {
numbers[i] = int(rune - 'a' + 1)
}
}
return numbers
}
func LetterNumberDecrypt(numbers []int) string {
runes := make([]rune, len(numbers))
for i, number := range numbers {
runes[i] = rune(number + 'A' - 1)
}
return string(runes)
}
<commit_after>package gocipher
// LetterNumberEncrypt - Converts letters to the corresponding number.
// e.g. "ABC...XYZ" becomes []int{1, 2, 3 ... 24, 25, 26}
func LetterNumberEncrypt(text string) []int {
runes := []rune(text)
numbers := make([]int, len(runes))
for i, rune := range runes {
if rune >= 'A' && rune <= 'Z' {
numbers[i] = int(rune - 'A' + 1)
} else if rune >= 'a' && rune <= 'z' {
numbers[i] = int(rune - 'a' + 1)
}
}
return numbers
}
// LetterNumberDecrypt - Converts numbers to the corresponding letter.
// e.g. []int{1, 2, 3 ... 24, 25, 26} becomes "ABC...XYZ"
func LetterNumberDecrypt(numbers []int) string {
runes := make([]rune, len(numbers))
for i, number := range numbers {
runes[i] = rune(number + 'A' - 1)
}
return string(runes)
}
|
<commit_msg>Add getter for futures positions for BitVc
<commit_before>package org.knowm.xchange.huobi.service;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.huobi.BitVc;
import org.knowm.xchange.huobi.BitVcFutures;
import si.mazi.rescu.RestProxyFactory;
public class BitVcFuturesServiceRaw {
protected final BitVcFutures bitvc;
protected final String accessKey;
protected HuobiDigest digest;
public BitVcFuturesServiceRaw(Exchange exchange) {
this.bitvc = RestProxyFactory.createProxy(BitVcFutures.class, "https://api.bitvc.com/futures");
this.accessKey = exchange.getExchangeSpecification().getApiKey();
/** BitVc Futures expect a different secret key digest name from BitVc spot and Huobi */
this.digest = new HuobiDigest(exchange.getExchangeSpecification().getSecretKey(), "secretKey");
}
protected long requestTimestamp() {
return System.currentTimeMillis() / 1000;
}
}
<commit_after>package org.knowm.xchange.huobi.service;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.huobi.BitVc;
import org.knowm.xchange.huobi.BitVcFutures;
import org.knowm.xchange.huobi.dto.trade.BitVcFuturesPosition;
import org.knowm.xchange.huobi.dto.trade.BitVcFuturesPositionByContract;
import si.mazi.rescu.RestProxyFactory;
public class BitVcFuturesServiceRaw {
protected final BitVcFutures bitvc;
protected final String accessKey;
protected HuobiDigest digest;
public BitVcFuturesServiceRaw(Exchange exchange) {
this.bitvc = RestProxyFactory.createProxy(BitVcFutures.class, "https://api.bitvc.com/futures");
this.accessKey = exchange.getExchangeSpecification().getApiKey();
/** BitVc Futures expect a different secret key digest name from BitVc spot and Huobi */
this.digest = new HuobiDigest(exchange.getExchangeSpecification().getSecretKey(), "secretKey");
}
public BitVcFuturesPositionByContract getFuturesPositions() {
final BitVcFuturesPositionByContract positions = bitvc.positions(accessKey, 1, requestTimestamp(), digest);
return positions;
}
protected long requestTimestamp() {
return System.currentTimeMillis() / 1000;
}
}
|
<commit_msg>Remove duplicate import. (Thanks to MechanisM)
<commit_before>
from django.views.generic import View
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
try:
from django.http import StreamingHttpResponse as HttpResponse
except ImportError:
from django.http import HttpResponse
from django.utils.decorators import method_decorator
from sse import Sse
class BaseSseView(View):
"""
This is a base class for sse streaming.
"""
def get_last_id(self):
if "HTTP_LAST_EVENT_ID" in self.request.META:
return self.request.META['HTTP_LAST_EVENT_ID']
return None
def _iterator(self):
for subiterator in self.iterator():
for bufferitem in self.sse:
yield bufferitem
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
self.sse = Sse()
self.request = request
self.args = args
self.kwargs = kwargs
response = HttpResponse(self._iterator(), content_type="text/event-stream")
response['Cache-Control'] = 'no-cache'
response['Software'] = 'django-sse'
return response
def iterator(self):
"""
This is a source of stream.
Must be use sentence ``yield`` for flush
content fon sse object to the client.
Example:
def iterator(self):
counter = 0
while True:
self.sse.add_message('foo', 'bar')
self.sse.add_message('bar', 'foo')
yield
"""
raise NotImplementedError
<commit_after>
from django.views.generic import View
from django.views.decorators.csrf import csrf_exempt
try:
from django.http import StreamingHttpResponse as HttpResponse
except ImportError:
from django.http import HttpResponse
from django.utils.decorators import method_decorator
from sse import Sse
class BaseSseView(View):
"""
This is a base class for sse streaming.
"""
def get_last_id(self):
if "HTTP_LAST_EVENT_ID" in self.request.META:
return self.request.META['HTTP_LAST_EVENT_ID']
return None
def _iterator(self):
for subiterator in self.iterator():
for bufferitem in self.sse:
yield bufferitem
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
self.sse = Sse()
self.request = request
self.args = args
self.kwargs = kwargs
response = HttpResponse(self._iterator(), content_type="text/event-stream")
response['Cache-Control'] = 'no-cache'
response['Software'] = 'django-sse'
return response
def iterator(self):
"""
This is a source of stream.
Must be use sentence ``yield`` for flush
content fon sse object to the client.
Example:
def iterator(self):
counter = 0
while True:
self.sse.add_message('foo', 'bar')
self.sse.add_message('bar', 'foo')
yield
"""
raise NotImplementedError
|
<commit_msg>Test that IndexError is raised when appropriate
<commit_before>from unittest import TestCase
from prudent.sequence import Sequence
class SequenceTest(TestCase):
def setUp(self):
self.seq = Sequence([1, 2, 3])
def test_getitem(self):
assert self.seq[0] == 1
self.seq[2]
assert self.seq[2] == 3
def test_len(self):
assert len(self.seq) == 0
self.seq[2]
assert len(self.seq) == 3
def test_iter(self):
for _ in range(2):
assert list(self.seq) == [1, 2, 3]
<commit_after>from unittest import TestCase
from prudent.sequence import Sequence
class SequenceTest(TestCase):
def setUp(self):
self.seq = Sequence([1, 2, 3])
def test_getitem(self):
assert self.seq[0] == 1
assert self.seq[2] == 3
def test_getitem_raises_indexerror(self):
self.assertRaises(IndexError, lambda: self.seq[3])
def test_len_returns_current_size(self):
assert len(self.seq) == 0
self.seq[2]
assert len(self.seq) == 3
def test_iter_preserves_elems(self):
for _ in range(2):
assert list(self.seq) == [1, 2, 3]
|
<commit_msg>Add another condition in test case where it fails due to fake
implementation in equals method<commit_before>package com.sibisoft.faizaniftikhartdd;
import static org.junit.Assert.*;
import org.junit.Test;
public class DollarEquality {
@Test
public void testEquality()
{
assertTrue(new Dollar(5).equals(new Dollar(5)));
}
}
<commit_after>package com.sibisoft.faizaniftikhartdd;
import static org.junit.Assert.*;
import org.junit.Test;
public class DollarEquality {
@Test
public void testEquality()
{
assertTrue(new Dollar(5).equals(new Dollar(5)));
assertFalse(new Dollar(5).equals(new Dollar(6)));
}
}
|
<commit_msg>Increase max fragment size from 128 KiB to 512 KiB.
This is still allowed in bitswap (which allows up to 1 MiB) and
it should make uploads and downloads much faster (less latency sensitive).
The original motivation for 128 KiB was to play well with erasure encoding
(5 MiB -> 40 * 128 KiB ==erasure encode=> 60 * 128 KiB),
but we no longer do that and it should be done at a lower level regardless.
This will reduce the number of network requests to retrieve a block by 4X
and thus greatly improve bandwidth for latency sensitive instances.
It also reduces the pressure on the blockstore by having ~ 4x fewer blocks.
Some blockstores like S3 can impose limits on the total number of blocks
and also on the number of requests per second.
<commit_before>package peergos.shared.user.fs;
/** A Fragment is a part of an EncryptedChunk which is stored directly in IPFS in raw format
*
*/
public class Fragment {
public static final int MAX_LENGTH = 1024*128;
public final byte[] data;
public Fragment(byte[] data) {
if (MAX_LENGTH < data.length)
throw new IllegalStateException("fragment size "+ data.length +" greater than max "+ MAX_LENGTH);
this.data = data;
}
}
<commit_after>package peergos.shared.user.fs;
/** A Fragment is a part of an EncryptedChunk which is stored directly in IPFS in a raw format block
*
*/
public class Fragment {
public static final int MAX_LENGTH = 512*1024; // max size allowed by bitswap protocol is 1 MiB
public final byte[] data;
public Fragment(byte[] data) {
if (MAX_LENGTH < data.length)
throw new IllegalStateException("fragment size "+ data.length +" greater than max "+ MAX_LENGTH);
this.data = data;
}
}
|
<commit_msg>Tag sentry event with each part of path
<commit_before>from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("type", "404")
scope.set_extra("path", request.path)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
<commit_after>from django.views import defaults
from sentry_sdk import capture_message, push_scope
def page_not_found(request, *args, **kwargs):
with push_scope() as scope:
scope.set_tag("path", request.path)
for i, part in enumerate(request.path.strip("/").split("/")):
scope.set_tag("path_{}".format(i), part)
capture_message("Page not found", level="error")
return defaults.page_not_found(request, *args, **kwargs)
|
<commit_msg>Replace `PascalStr` internals with an `AsciiStr` for convenience. Also, various improvements.
<commit_before>use ascii::AsciiChar;
pub struct PascalStr {
chars: [AsciiChar]
}
<commit_after>use ascii::{AsciiChar, AsciiStr};
use std::convert::AsRef;
use ::PASCAL_STRING_BUF_SIZE;
/// A borrowed slice from a `PascalString`. Does not own its data.
#[derive(Eq, Hash, PartialEq, PartialOrd)]
pub struct PascalStr {
/// The `AsciiStr`, borrowed from the original `PascalString`
string: AsciiStr
}
impl PascalStr {
/// Get a pointer to the first byte of the string buffer.
#[inline]
pub fn as_ptr(&self) -> *const AsciiChar {
self.string.as_ptr()
}
/// Get a mutable pointer to the first byte of the string buffer.
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut AsciiChar {
self.string.as_mut_ptr()
}
#[inline]
pub fn len(&self) -> usize {
self.string.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn is_full(&self) -> bool {
self.len() == PASCAL_STRING_BUF_SIZE
}
}
impl AsRef<AsciiStr> for PascalStr {
fn as_ref(&self) -> &AsciiStr {
&self.string
}
}
impl AsMut<AsciiStr> for PascalStr {
fn as_mut(&mut self) -> &mut AsciiStr {
&mut self.string
}
}
impl AsRef<[AsciiChar]> for PascalStr {
fn as_ref(&self) -> &[AsciiChar] {
self.string.as_ref()
}
}
impl AsMut<[AsciiChar]> for PascalStr {
fn as_mut(&mut self) -> &mut [AsciiChar] {
self.string.as_mut()
}
}
|
<commit_msg>Add regression test for base_fieldsets fix.
<commit_before>from django.test import TestCase
# Create your tests here.
<commit_after>import re
from django.contrib.admin.sites import AdminSite
from django_webtest import WebTest
from django.core.urlresolvers import reverse
from .usertypes.email.models import EmailUser
class TestUserAdminBaseFieldsets(WebTest):
"""
Tests a fix applied to ensure `base_fieldsets` are not
lost in `UserChildAdmin` after calling `get_form()` with
no existing instance (i.e. for a new user).
"""
csrf_checks = False
def setUp(self):
self.site = AdminSite()
self.staff_user = EmailUser.objects.create(
email='staff@test.com',
is_staff=True,
is_active=True,
is_superuser=True,
)
self.staff_user.set_password('abc123')
self.staff_user.save()
def test_user_base_fieldsets(self):
# edit our staff user and capture the form response.
response = self.app.get(
reverse('admin:polymorphic_auth_user_change',
args=(self.staff_user.pk,)),
user=self.staff_user
).maybe_follow(user=self.staff_user)
form1_response = response.form.text
# create a another new user
response = self.app.get(
reverse('admin:polymorphic_auth_user_add'),
user=self.staff_user
).maybe_follow(user=self.staff_user)
form = response.form
form['email'] = 'test@test.com'
form['password1'] = 'testpassword'
form['password2'] = 'testpassword'
form.submit(user=self.staff_user)
# Edit our staff user again and capture the form response.
response = self.app.get(
reverse('admin:polymorphic_auth_user_change',
args=(self.staff_user.pk,)),
user=self.staff_user
)
form2_response = response.form.text
# Rip out fields we expect to differ between the two responses.
form1_response = re.sub(
r'<input name="csrfmiddlewaretoken" (.*?)/>', '', form1_response)
form1_response = re.sub(
r'<input class="vTimeField" (.*?)/>', '', form1_response)
form1_response = re.sub(
r'<input id="initial-id_last_login_1" (.*?)/>', '', form1_response)
form2_response = re.sub(
r'<input name="csrfmiddlewaretoken" (.*?)/>', '', form2_response)
form2_response = re.sub(
r'<input class="vTimeField" (.*?)/>', '', form2_response)
form2_response = re.sub(
r'<input id="initial-id_last_login_1" (.*?)/>', '', form2_response)
# Form output should be identical to the first.
# This will not be the case if the base_fieldsets have been lost.
self.assertEqual(form1_response, form2_response)
|
<commit_msg>Use base::StringPairs where appropriate from /content
Because base/strings/string_split.h defines:
typedef std::vector<std::pair<std::string, std::string> > StringPairs;
BUG=412250
Review URL: https://codereview.chromium.org/600163003
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#296649}
<commit_before>// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_COMMON_RESOURCE_DEVTOOLS_INFO_H_
#define CONTENT_COMMON_RESOURCE_DEVTOOLS_INFO_H_
#include <string>
#include <vector>
#include "base/basictypes.h"
#include "base/memory/ref_counted.h"
#include "content/common/content_export.h"
namespace content {
struct ResourceDevToolsInfo : base::RefCounted<ResourceDevToolsInfo> {
typedef std::vector<std::pair<std::string, std::string> >
HeadersVector;
CONTENT_EXPORT ResourceDevToolsInfo();
int32 http_status_code;
std::string http_status_text;
HeadersVector request_headers;
HeadersVector response_headers;
std::string request_headers_text;
std::string response_headers_text;
private:
friend class base::RefCounted<ResourceDevToolsInfo>;
CONTENT_EXPORT ~ResourceDevToolsInfo();
};
} // namespace content
#endif // CONTENT_COMMON_RESOURCE_DEVTOOLS_INFO_H_
<commit_after>// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_COMMON_RESOURCE_DEVTOOLS_INFO_H_
#define CONTENT_COMMON_RESOURCE_DEVTOOLS_INFO_H_
#include <string>
#include <vector>
#include "base/basictypes.h"
#include "base/memory/ref_counted.h"
#include "base/strings/string_split.h"
#include "content/common/content_export.h"
namespace content {
struct ResourceDevToolsInfo : base::RefCounted<ResourceDevToolsInfo> {
typedef base::StringPairs HeadersVector;
CONTENT_EXPORT ResourceDevToolsInfo();
int32 http_status_code;
std::string http_status_text;
HeadersVector request_headers;
HeadersVector response_headers;
std::string request_headers_text;
std::string response_headers_text;
private:
friend class base::RefCounted<ResourceDevToolsInfo>;
CONTENT_EXPORT ~ResourceDevToolsInfo();
};
} // namespace content
#endif // CONTENT_COMMON_RESOURCE_DEVTOOLS_INFO_H_
|
<commit_msg>Add wrapper with display width CSS
<commit_before>import * as React from "react";
export interface ToolbarProps {
}
export class Toolbar extends React.Component<ToolbarProps> {
public render() {
return "TOOLBAR";
}
}<commit_after>import * as React from "react";
interface ToolbarProps {
}
interface ToolbarState {
displayWide: boolean;
}
export class Toolbar extends React.Component<ToolbarProps, ToolbarState> {
constructor(props: ToolbarProps){
super(props);
this.setState({
displayWide: false
});
}
public render() {
const className = this.state.displayWide ? "toolbar s-wide" : "toolbar s-narrow";
return <div className={className}>
</div>;
}
} |
<commit_msg>Add helper for unmarshaling the request body.
<commit_before>package gowebutils
import (
"io"
"io/ioutil"
"net/http"
)
const MaxRequestSize = 1048576 * 5
func PrepareRequestBody(r *http.Request) ([]byte, error) {
body, err := ioutil.ReadAll(io.LimitReader(r.Body, MaxRequestSize))
if err != nil {
return body, err
}
err = r.Body.Close()
return body, err
}
<commit_after>package gowebutils
import (
"encoding/json"
"io"
"io/ioutil"
"net/http"
)
const MaxRequestSize = 1048576 * 5
func PrepareRequestBody(r *http.Request) ([]byte, error) {
body, err := ioutil.ReadAll(io.LimitReader(r.Body, MaxRequestSize))
if err != nil {
return body, err
}
err = r.Body.Close()
return body, err
}
func UnmarshalRequestBody(r *http.Request, v interface{}) error {
body, err := PrepareRequestBody(r)
if err != nil {
return err
}
if err := json.Unmarshal(body, v); err != nil {
return err
}
return nil
}
|
<commit_msg>Add all tags to type
<commit_before>module HLiquid.Syntax where
import Data.Text
data Liquid
= HTML Text
| If Expression Expression Expression
| ReturnBlock Expression
| Block Expression
deriving (Eq, Show)
data Expression = Expression [Text] -- Temporary Expand to actual expressions later
deriving (Eq, Show)
<commit_after>module HLiquid.Syntax where
import Data.Text
data Liquid
= HTML Text
| ReturnBlock Expression
| Block Expression
deriving (Eq, Show)
data Expression
= Expression [Text] -- Temporary Expand to actual expressions later
| If -- unless and elseif
| Case
-- Loop Tags
| For
| Break
| Continue
| Cycle
| Tablerow
-- Layout Tag
| Comment
| Include
| Form
| Layout
| Paginate
| Raw
-- Variable Tag
| Assign
| Capture
| Increment
| Decrement
-- Filters
| Filter
deriving (Eq, Show)
data Operator
= Equal
| NotEqual
| Greater
| Less
| GreaterEq
| LessEq
| Or
| And |
<commit_msg>Create sentinel rounds on Session creation
<commit_before>from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
<commit_after>from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
|
<commit_msg>Fix spec issue with Transfer::Server ProtocolDetails
<commit_before>patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
]
<commit_after>patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
},
{
"op": "replace",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
"value": "String",
},
]
|
<commit_msg>Use django utils force_bytes to arguments of hashlib
<commit_before>
import hashlib
from django.utils.http import urlencode, urlquote
from .. import register
@register.simple_tag()
def get_gravatar_url(user, size, rating='g', default='identicon'):
url = "http://www.gravatar.com/avatar/"
hash = hashlib.md5(user.email.strip().lower()).hexdigest()
data = urlencode({'d': urlquote(default), 's': str(size), 'r': rating})
return "".join((url, hash, '?', data))
<commit_after>
import hashlib
from django.utils.http import urlencode, urlquote
from django.utils.encoding import force_bytes
from .. import register
@register.simple_tag()
def get_gravatar_url(user, size, rating='g', default='identicon'):
url = "http://www.gravatar.com/avatar/"
hash = hashlib.md5(force_bytes(user.email.strip().lower().encode('utf_8'))).hexdigest()
data = urlencode({'d': urlquote(default), 's': str(size), 'r': rating})
return "".join((url, hash, '?', data))
|
<commit_msg>Prepend '/' to urls gotten from asset_url() to make them absolute. This fixes /projects/* documentation not displaying correctly.
<commit_before>class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
return self.env[bundle_name].urls()[0]
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
<commit_after>class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
urls = self.env[bundle_name].urls()
return "/{}".format(urls[0]) # /{} to make url absolute
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
|
<commit_msg>Use JSON to parse hlint output
<commit_before>
"""This module exports the Hlint plugin class."""
from SublimeLinter.lint import Linter
class Hlint(Linter):
"""Provides an interface to hlint."""
defaults = {
'selector': 'source.haskell'
}
cmd = 'hlint'
regex = (
r'^.+:(?P<line>\d+):'
'(?P<col>\d+):\s*'
'(?:(?P<error>Error)|(?P<warning>Warning)):\s*'
'(?P<message>.+)$'
)
multiline = True
tempfile_suffix = 'hs'
<commit_after>
"""This module exports the Hlint plugin class."""
import json
from SublimeLinter.lint import Linter, LintMatch
class Hlint(Linter):
"""Provides an interface to hlint."""
cmd = 'hlint ${args} --json -'
defaults = {
'selector': 'source.haskell'
}
def find_errors(self, output):
# type: (str) -> Iterator[LintMatch]
errors = json.loads(output)
for error in errors:
message = "{hint}. Found: {from}".format(**error)
if error['to']:
message += " Perhaps: {to}".format(**error)
yield LintMatch(
error_type=error['severity'].lower(),
line=error['startLine'] - 1,
col=error['startColumn'] - 1,
message=message
)
|
<commit_msg>Add builtAssets to webserver-writable dirs
<commit_before>import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
<commit_after>import os
import sys
import subprocess
server_writable_directories = [
"vendor/solr/apache-solr-4.0.0/example/solr/collection1/data/",
"vendor/solr/apache-solr-4.0.0/example/solr-webapp/",
"lib/dotstorm/assets/dotstorm/uploads/",
"lib/www/assets/group_logos/",
"lib/www/assets/user_icons/",
"builtAssets/",
]
BASE = os.path.join(os.path.dirname(__file__), "..")
def set_permissions(user):
for path in server_writable_directories:
print user, path
if not os.path.exists(path):
os.makedirs(path)
subprocess.check_call(["chown", "-R", user, os.path.join(BASE, path)])
if __name__ == "__main__":
try:
target_user = sys.argv[1]
except IndexError:
print "Missing required parameter `target user`."
print "Usage: set_deploy_permissions.py [username]"
sys.exit(1)
set_permissions(target_user)
|
<commit_msg>Add tests for component to SUSE package mapping
Signed-off-by: David Disseldorp <589a549dc9f982d9f46aeeb82a09ab6d87ccf1d8@suse.de>
<commit_before>from ceph_deploy.hosts import suse
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
<commit_after>from ceph_deploy.hosts import suse
from ceph_deploy.hosts.suse.install import map_components
class TestSuseInit(object):
def setup(self):
self.host = suse
def test_choose_init_default(self):
self.host.release = None
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_11(self):
self.host.release = '11'
init_type = self.host.choose_init()
assert init_type == "sysvinit"
def test_choose_init_SLE_12(self):
self.host.release = '12'
init_type = self.host.choose_init()
assert init_type == "systemd"
def test_choose_init_openSUSE_13_1(self):
self.host.release = '13.1'
init_type = self.host.choose_init()
assert init_type == "systemd"
class TestSuseMapComponents(object):
def test_valid(self):
pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw'])
assert 'ceph' in pkgs
assert 'ceph-common' in pkgs
assert 'ceph-radosgw' in pkgs
assert 'ceph-osd' not in pkgs
def test_invalid(self):
pkgs = map_components(['not-provided', 'ceph-mon'])
assert 'not-provided' not in pkgs
assert 'ceph' in pkgs
|
<commit_msg>Add a workaround for processing rules without www
<commit_before>import re
class HostUtils(object):
@classmethod
def host_selectors(self, all_selectors, host):
if host is None:
return None
if host in all_selectors:
selectors = all_selectors[host]
if type(selectors) is dict:
selectors = all_selectors[selectors['reference']]
return selectors
# for regex_string in all_selectors['regexs_references']:
# match_data = re.compile(regex_string).search(host)
# if match_data:
# reference_host = all_selectors['regexs_references'][regex_string]['reference']
# return all_selectors[reference_host]
<commit_after>import re
class HostUtils(object):
@classmethod
def host_selectors(self, all_selectors, host):
if host is None:
return None
host = host.replace("www.", "")
if host in all_selectors:
selectors = all_selectors[host]
if type(selectors) is dict:
selectors = all_selectors[selectors['reference']]
return selectors
# for regex_string in all_selectors['regexs_references']:
# match_data = re.compile(regex_string).search(host)
# if match_data:
# reference_host = all_selectors['regexs_references'][regex_string]['reference']
# return all_selectors[reference_host]
|
<commit_msg>Add singleton methods to access libuv version
<commit_before>
ID id_call;
VALUE mRbuv;
void Init_rbuv() {
id_call = rb_intern("call");
mRbuv = rb_define_module("Rbuv");
Init_rbuv_error();
Init_rbuv_handle();
Init_rbuv_loop();
Init_rbuv_timer();
Init_rbuv_stream();
Init_rbuv_tcp();
Init_rbuv_signal();
}
<commit_after>
ID id_call;
VALUE mRbuv;
VALUE rbuv_version(VALUE self);
VALUE rbuv_version_string(VALUE self);
void Init_rbuv() {
id_call = rb_intern("call");
mRbuv = rb_define_module("Rbuv");
rb_define_singleton_method(mRbuv, "version", rbuv_version, 0);
rb_define_singleton_method(mRbuv, "version_string", rbuv_version_string, 0);
Init_rbuv_error();
Init_rbuv_handle();
Init_rbuv_loop();
Init_rbuv_timer();
Init_rbuv_stream();
Init_rbuv_tcp();
Init_rbuv_signal();
}
VALUE rbuv_version(VALUE self) {
return UINT2NUM(uv_version());
}
VALUE rbuv_version_string(VALUE self) {
return rb_str_new2(uv_version_string());
}
|
<commit_msg>Use 3 rows by default in test
svn changeset:16411/svn branch:6.5
<commit_before>package com.vaadin.tests.components.textarea;
import java.util.LinkedHashMap;
import com.vaadin.tests.components.abstractfield.AbstractTextFieldTest;
import com.vaadin.ui.TextArea;
public class TextAreaTest extends AbstractTextFieldTest<TextArea> {
private Command<TextArea, Boolean> wordwrapCommand = new Command<TextArea, Boolean>() {
public void execute(TextArea c, Boolean value, Object data) {
c.setWordwrap(value);
}
};
private Command<TextArea, Integer> rowsCommand = new Command<TextArea, Integer>() {
public void execute(TextArea c, Integer value, Object data) {
c.setRows(value);
}
};
@Override
protected Class<TextArea> getTestClass() {
return TextArea.class;
}
@Override
protected void createActions() {
super.createActions();
createWordwrapAction(CATEGORY_STATE);
createRowsAction(CATEGORY_STATE);
}
private void createRowsAction(String category) {
LinkedHashMap<String, Integer> options = createIntegerOptions(20);
createSelectAction("Rows", category, options, "0", rowsCommand);
}
private void createWordwrapAction(String category) {
createBooleanAction("Wordwrap", category, false, wordwrapCommand);
}
}
<commit_after>package com.vaadin.tests.components.textarea;
import java.util.LinkedHashMap;
import com.vaadin.tests.components.abstractfield.AbstractTextFieldTest;
import com.vaadin.ui.TextArea;
public class TextAreaTest extends AbstractTextFieldTest<TextArea> {
private Command<TextArea, Boolean> wordwrapCommand = new Command<TextArea, Boolean>() {
public void execute(TextArea c, Boolean value, Object data) {
c.setWordwrap(value);
}
};
private Command<TextArea, Integer> rowsCommand = new Command<TextArea, Integer>() {
public void execute(TextArea c, Integer value, Object data) {
c.setRows(value);
}
};
@Override
protected Class<TextArea> getTestClass() {
return TextArea.class;
}
@Override
protected void createActions() {
super.createActions();
createWordwrapAction(CATEGORY_FEATURES);
createRowsAction(CATEGORY_FEATURES);
}
private void createRowsAction(String category) {
LinkedHashMap<String, Integer> options = createIntegerOptions(20);
createSelectAction("Rows", category, options, "3", rowsCommand);
}
private void createWordwrapAction(String category) {
createBooleanAction("Wordwrap", category, false, wordwrapCommand);
}
}
|
<commit_msg>Make our IFrameWidget more configurable
<commit_before>
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url']
template = """
<iframe id="${id}" src="${url}" width="100%" height="100%">
<p>Your browser does not support iframes.</p>
</iframe>
"""
engine_name = 'mako'
<commit_after>
from tw.api import Widget
class IFrameWidget(Widget):
params = ['id', 'url', 'title', 'height', 'width']
template = """
<h1>${title}</h1>
<iframe id="${id}" src="${url}" width="${width}" height="${height}">
<p>Your browser does not support iframes.</p>
</iframe>
"""
title = ''
height = width = '100%'
engine_name = 'mako'
iframe_widget = IFrameWidget('iframe_widget')
|
<commit_msg>Update database close timeout value.
<commit_before>import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
def __init__(self):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
self.delay_and_execute(3600, self.close_db_connection_if_needed)
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(3600, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
<commit_after>import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 5*60
def __init__(self):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.DB_TIMEOUT_SECONDS, self.close_db_connection_if_needed)
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.DB_TIMEOUT_SECONDS, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
<commit_msg>Add request to inside of try
<commit_before>import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
raw_response = requests.get(self.certifications_url, headers=self.headers)
try:
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
<commit_after>import requests
import os
class UnauthorizedToken(Exception):
pass
class UdacityConnection:
def __init__(self):
self.certifications_url = 'https://review-api.udacity.com/api/v1/me/certifications.json'
token = os.environ.get('UDACITY_AUTH_TOKEN')
self.headers = {'Authorization': token, 'Content-Length': '0'}
def certifications(self):
try:
raw_response = requests.get(self.certifications_url, headers=self.headers)
response = raw_response.json()
certifications_list = [item['project_id'] for item in response if item['status'] == 'certified']
return certifications_list
except requests.exceptions.HTTPError:
raise UnauthorizedToken
|
<commit_msg>Remove the problematic migration entirely
- The thumbnail check code is run every time the server is started anyway!
<commit_before>
from django.db import migrations
from django.db.utils import OperationalError, ProgrammingError
from part.models import Part
from stdimage.utils import render_variations
def create_thumbnails(apps, schema_editor):
"""
Create thumbnails for all existing Part images.
"""
try:
for part in Part.objects.all():
# Render thumbnail for each existing Part
if part.image:
try:
part.image.render_variations()
except FileNotFoundError:
print("Missing image:", part.image())
# The image is missing, so clear the field
part.image = None
part.save()
except (OperationalError, ProgrammingError):
# Migrations have not yet been applied - table does not exist
print("Could not generate Part thumbnails")
class Migration(migrations.Migration):
dependencies = [
('part', '0033_auto_20200404_0445'),
]
operations = [
migrations.RunPython(create_thumbnails),
]
<commit_after>
from django.db import migrations
def create_thumbnails(apps, schema_editor):
"""
Create thumbnails for all existing Part images.
Note: This functionality is now performed in apps.py,
as running the thumbnail script here caused too many database level errors.
This migration is left here to maintain the database migration history
"""
pass
class Migration(migrations.Migration):
dependencies = [
('part', '0033_auto_20200404_0445'),
]
operations = [
migrations.RunPython(create_thumbnails, reverse_code=create_thumbnails),
]
|
<commit_msg>Fix DatabaseCreation from django 1.7
<commit_before>
import django
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreationMixin16(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._destroy_test_db(test_database_name, verbosity)
class DatabaseCreationMixin17(object):
def _create_test_db(self, verbosity, autoclobber, keepdb=False):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._create_test_db(verbosity, autoclobber, keepdb)
def _destroy_test_db(self, test_database_name, verbosity, keepdb=False):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._destroy_test_db(test_database_name, verbosity, keepdb)
if django.VERSION >= (1, 7):
class DatabaseCreationMixin(DatabaseCreationMixin17):
pass
else:
class DatabaseCreationMixin(DatabaseCreationMixin16):
pass
class DatabaseCreation(DatabaseCreationMixin, OriginalDatabaseCreation):
pass
<commit_after>
import django
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreationMixin16(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._destroy_test_db(test_database_name, verbosity)
class DatabaseCreationMixin17(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._destroy_test_db(test_database_name, verbosity)
if django.VERSION >= (1, 7):
class DatabaseCreationMixin(DatabaseCreationMixin17):
pass
else:
class DatabaseCreationMixin(DatabaseCreationMixin16):
pass
class DatabaseCreation(DatabaseCreationMixin, OriginalDatabaseCreation):
pass
|
<commit_msg>Add stop waiter to call buffer
<commit_before>from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
if self.waiter:
self.waiter([])
self.waiter = None
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
<commit_after>from constants import *
import collections
import uuid
class CallBuffer():
def __init__(self):
self.waiter = None
self.queue = collections.deque(maxlen=CALL_QUEUE_MAX)
self.call_waiters = {}
def wait_for_calls(self, callback):
self.stop_waiter()
calls = []
while True:
try:
calls.append(self.queue.popleft())
except IndexError:
break
if calls:
callback(calls)
return
self.waiter = callback
def cancel_waiter(self):
self.waiter = None
def stop_waiter(self):
if self.waiter:
self.waiter(None)
self.waiter = None
def return_call(self, id, response):
callback = self.call_waiters.pop(id, None)
if callback:
callback(response)
def create_call(self, command, args, callback=None):
call_id = uuid.uuid4().hex
call = {
'id': call_id,
'command': command,
'args': args,
}
if callback:
self.call_waiters[call_id] = callback
if self.waiter:
self.waiter([call])
self.waiter = None
else:
self.queue.append(call)
|
<commit_msg>Fix hashcode generation for existing URLs
<commit_before>
import string
from django.db import models
class Blacklist(models.Model):
domain = models.CharField(max_length=255, unique=True, null=True)
def __unicode__(self):
return self.domain
class URL(models.Model):
hashcode = models.CharField(max_length=10, unique=True,
db_index=True, null=True)
longurl = models.CharField(max_length=1024, unique=True,
db_index=True, null=True)
views = models.IntegerField(default=0)
ip = models.GenericIPAddressField(null=True)
data = models.DateTimeField(auto_now_add=True, null=True)
def save(self, *args, **kwargs):
if URL.objects.count():
last = URL.objects.latest('id').pk + 1
alphabet = string.digits + string.ascii_lowercase
base36 = ''
while last != 0:
last, i = divmod(last, len(alphabet))
base36 = alphabet[i] + base36
self.hashcode = base36
else:
self.hashcode = '1'
return super(URL, self).save(*args, **kwargs)
def short_url(self, request):
return ''.join([
request.scheme,
'://', request.get_host(),
'/', self.hashcode,
])
def __unicode__(self):
return ' - '.join([self.hashcode, self.longurl])
<commit_after>
import string
from django.db import models
class Blacklist(models.Model):
domain = models.CharField(max_length=255, unique=True, null=True)
def __unicode__(self):
return self.domain
class URL(models.Model):
hashcode = models.CharField(max_length=10, unique=True,
db_index=True, null=True)
longurl = models.CharField(max_length=1024, unique=True,
db_index=True, null=True)
views = models.IntegerField(default=0)
ip = models.GenericIPAddressField(null=True)
data = models.DateTimeField(auto_now_add=True, null=True)
def save(self, *args, **kwargs):
if not self.pk:
if URL.objects.count():
last = URL.objects.latest('id').pk + 1
alphabet = string.digits + string.ascii_lowercase
base36 = ''
while last != 0:
last, i = divmod(last, len(alphabet))
base36 = alphabet[i] + base36
self.hashcode = base36
else:
self.hashcode = '1'
return super(URL, self).save(*args, **kwargs)
def short_url(self, request):
return ''.join([
request.scheme,
'://', request.get_host(),
'/', self.hashcode,
])
def __unicode__(self):
return ' - '.join([self.hashcode, self.longurl])
|
<commit_msg>Add a bunch of LJ tables
<commit_before>from sqlalchemy import (
LargeBinary, Boolean, Column, Integer, String, PickleType, ForeignKey)
from sqlalchemy.orm import backref, relationship
from sqlalchemy.orm.exc import NoResultFound
from myarchive.db.tables.base import Base
from myarchive.db.tables.file import TrackedFile
from myarchive.db.tables.association_tables import (
at_tweet_tag, at_tweet_file, at_twuser_file)
class LJJournal(Base):
"""Class representing a raw tweet."""
pass
<commit_after>from sqlalchemy import (
Column, Integer, String, TIMESTAMP, ForeignKey)
from sqlalchemy.orm import backref, relationship
from sqlalchemy.orm.exc import NoResultFound
from myarchive.db.tables.base import Base
from myarchive.db.tables.file import TrackedFile
class LJHost(Base):
"""Class representing a user retrieved from a LJ-like service."""
__tablename__ = 'lj_hosts'
id = Column(Integer, index=True, primary_key=True)
url = Column(String)
def __init__(self, url):
self.url = url
class LJUser(Base):
"""Class representing a user retrieved from a LJ-like service."""
__tablename__ = 'lj_users'
id = Column(Integer, index=True, primary_key=True)
username = Column(String)
host_id = Column(Integer, ForeignKey("lj_hosts.id"))
def __init__(self, user_id, username):
self.id = user_id
self.username = username
class LJEntries(Base):
"""Class representing an entry retrieved from a LJ-like service."""
__tablename__ = 'lj_entries'
id = Column(Integer, index=True, primary_key=True)
# itemid is unique only to the user, possibly only to the pull...
itemid = Column(Integer)
eventtime = Column(TIMESTAMP)
subject = Column(String)
text = Column(String)
current_music = Column(String)
user_id = Column(Integer, ForeignKey("lj_users.id"))
def __init__(self, itemid, eventtime, subject, text, current_music):
self.itemid = itemid
self.eventtime = eventtime
self.subject = subject
self.text = text
self.current_music = current_music
# props["taglist"]
# props["current_music"]
class LJComments(Base):
"""Class representing a comment retrieved from a LJ-like service."""
__tablename__ = 'lj_comments'
id = Column(Integer, index=True, primary_key=True)
body = Column(String)
date = Column(TIMESTAMP)
parent_id = Column(Integer, ForeignKey("lj_comments.id"))
entry_id = Column(Integer, ForeignKey("lj_entries.id"))
|
<commit_msg>Remove the extra comma at the end.
Fixes #7
<commit_before>import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if not args.jsonlines:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
if not args.jsonlines:
args.outfile.write(',')
args.outfile.write('\n')
if not args.jsonlines:
# args.outfile.seek(-2)
args.outfile.write(']}')
if __name__ == '__main__':
main()
<commit_after>import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if args.jsonlines:
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
args.outfile.write('\n')
else:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
feature_iter = dumper.iter()
try:
feature = feature_iter.next()
while True:
args.outfile.write(json.dumps(feature))
feature = feature_iter.next()
args.outfile.write(',\n')
except StopIteration:
args.outfile.write('\n')
args.outfile.write(']}')
if __name__ == '__main__':
main()
|
<commit_msg>Use batches instead of raw sql for long migration
<commit_before>from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0083_add_file_fields_for_target'),
]
operations = [
migrations.RunSQL(
["""
UPDATE osf_basefilenode
SET target_object_id = node_id;
UPDATE osf_basefilenode
SET target_content_type_id = (SELECT id FROM django_content_type WHERE app_label = 'osf' AND model = 'abstractnode');
"""], ["""
UPDATE osf_basefilenode
SET node_id = target_object_id;
"""]
),
]
<commit_after>from __future__ import unicode_literals
from django.db import migrations, models, connection
from django.contrib.contenttypes.models import ContentType
def set_basefilenode_target(apps, schema_editor):
BaseFileNode = apps.get_model('osf', 'basefilenode')
AbstractNode = apps.get_model('osf', 'abstractnode')
target_content_type_id = ContentType.objects.get_for_model(AbstractNode).id
BATCHSIZE = 5000
max_pk = BaseFileNode.objects.aggregate(models.Max('pk'))['pk__max']
if max_pk is not None:
for offset in range(0, max_pk + 1, BATCHSIZE):
(BaseFileNode.objects
.filter(pk__gte=offset)
.filter(pk__lt=offset + BATCHSIZE)
.filter(target_object_id__isnull=True)
.filter(target_content_type_id__isnull=True)
.update(
target_content_type_id=target_content_type_id,
target_object_id=models.F('node_id')
)
)
def reset_basefilenode_target_to_node(*args, **kwargs):
sql = "UPDATE osf_basefilenode SET node_id = target_object_id;"
with connection.cursor() as cursor:
cursor.execute(sql)
class Migration(migrations.Migration):
# Avoid locking basefilenode
atomic = False
dependencies = [
('osf', '0083_add_file_fields_for_target'),
]
operations = [
migrations.RunPython(set_basefilenode_target, reset_basefilenode_target_to_node),
]
|
<commit_msg>Fix commit_on_http_success when an exception is raised
<commit_before>from django.db.transaction import is_dirty, leave_transaction_management, rollback, commit, enter_transaction_management, managed
from django.db import DEFAULT_DB_ALIAS
from django.http import HttpResponse
def commit_on_http_success(func, using=None):
"""
This decorator activates db commit on HTTP success response. This way, if the
view function return a success reponse, a commit is made; if the viewfunc
produces an exception or return an error response, a rollback is made.
"""
if using is None:
using = DEFAULT_DB_ALIAS
def wrapped_func(*args, **kwargs):
enter_transaction_management(using=using)
managed(True, using=using)
try:
res = func(*args, **kwargs)
except:
if is_dirty(using=using):
rollback(using=using)
raise
else:
if is_dirty(using=using):
if not isinstance(res, HttpResponse) or res.status_code < 200 or res.status_code >= 400:
rollback(using=using)
else:
try:
commit(using=using)
except:
rollback(using=using)
raise
leave_transaction_management(using=using)
return res
return wrapped_func
<commit_after>from django.db.transaction import is_dirty, leave_transaction_management, rollback, commit, enter_transaction_management, managed
from django.db import DEFAULT_DB_ALIAS
from django.http import HttpResponse
def commit_on_http_success(func, using=None):
"""
This decorator activates db commit on HTTP success response. This way, if the
view function return a success reponse, a commit is made; if the viewfunc
produces an exception or return an error response, a rollback is made.
"""
if using is None:
using = DEFAULT_DB_ALIAS
def wrapped_func(*args, **kwargs):
enter_transaction_management(using=using)
managed(True, using=using)
try:
res = func(*args, **kwargs)
except:
if is_dirty(using=using):
rollback(using=using)
raise
else:
if is_dirty(using=using):
if not isinstance(res, HttpResponse) or res.status_code < 200 or res.status_code >= 400:
rollback(using=using)
else:
try:
commit(using=using)
except:
rollback(using=using)
raise
finally:
leave_transaction_management(using=using)
return res
return wrapped_func
|
<commit_msg>Make compatible with direct use by pymongo.
I.e. for direct passing to collection.insert()
<commit_before>
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
class Document(Container):
__foreign__ = 'object'
<commit_after>
from collections import OrderedDict as odict, MutableMapping
from itertools import chain
from marrow.schema import Container, Attribute
from .util import py2, SENTINEL, adjust_attribute_sequence
class Document(Container):
__foreign__ = 'object'
# Mapping Protocol
def __getitem__(self, name):
return self.__data__[name]
def __setitem__(self, name, value):
self.__data__[name] = value
def __delitem__(self, name):
del self.__data__[name]
def __iter__(self):
return iter(self.__data__.keys())
def __len__(self):
return len(self.__data__)
if py2:
def keys(self):
return self.__data__.iterkeys()
def items(self):
return self.__data__.iteritems()
def values(self):
return self.__data__.itervalues()
else:
def keys(self):
return self.__data__.keys()
def items(self):
return self.__data__.items()
def values(self):
return self.__data__.values()
def __contains__(self, key):
return key in self.__data__
def __eq__(self, other):
return self.__data__ == other
def __ne__(self, other):
return self.__data__ != other
def get(self, key, default=None):
return self.__data__.get(key, default)
def clear(self):
self.__data__.clear()
def pop(self, name, default=SENTINEL):
if default is SENTINEL:
return self.__data__.pop(name)
return self.__data__.pop(name, default)
def popitem(self):
return self.__data__.popitem()
def update(self, *args, **kw):
self.__data__.update(*args, **kw)
def setdefault(self, key, value=None):
return self.__data__.setdefault(key, value)
MutableMapping.register(Document) # Metaclass conflict if we subclass.
|
<commit_msg>ADD new exception -> EnvironmentNotFound!
<commit_before>"""Specific PyGrid exceptions."""
class PyGridError(Exception):
def __init__(self, message):
super().__init__(message)
class AuthorizationError(PyGridError):
def __init__(self, message=""):
if not message:
message = "User is not authorized for this operation!"
super().__init__(message)
class RoleNotFoundError(PyGridError):
def __init__(self):
message = "Role ID not found!"
super().__init__(message)
class UserNotFoundError(PyGridError):
def __init__(self):
message = "User not found!"
super().__init__(message)
class GroupNotFoundError(PyGridError):
def __init__(self):
message = "Group ID not found!"
super().__init__(message)
class InvalidRequestKeyError(PyGridError):
def __init__(self):
message = "Invalid request key!"
super().__init__(message)
class InvalidCredentialsError(PyGridError):
def __init__(self):
message = "Invalid credentials!"
super().__init__(message)
class MissingRequestKeyError(PyGridError):
def __init__(self, message=""):
if not message:
message = "Missing request key!"
super().__init__(message)
<commit_after>"""Specific PyGrid exceptions."""
class PyGridError(Exception):
def __init__(self, message):
super().__init__(message)
class AuthorizationError(PyGridError):
def __init__(self, message=""):
if not message:
message = "User is not authorized for this operation!"
super().__init__(message)
class RoleNotFoundError(PyGridError):
def __init__(self):
message = "Role ID not found!"
super().__init__(message)
class UserNotFoundError(PyGridError):
def __init__(self):
message = "User not found!"
super().__init__(message)
class EnvironmentNotFoundError(PyGridError):
def __init__(self):
message = "Environment not found!"
super().__init__(message)
class GroupNotFoundError(PyGridError):
def __init__(self):
message = "Group ID not found!"
super().__init__(message)
class InvalidRequestKeyError(PyGridError):
def __init__(self):
message = "Invalid request key!"
super().__init__(message)
class InvalidCredentialsError(PyGridError):
def __init__(self):
message = "Invalid credentials!"
super().__init__(message)
class MissingRequestKeyError(PyGridError):
def __init__(self, message=""):
if not message:
message = "Missing request key!"
super().__init__(message)
|
<commit_msg>Add response_ok and response_error methods which return byte strings.
<commit_before>"""Echo server in socket connection: receives and sends back a message."""
import socket
if __name__ == '__main__':
"""Run from terminal, this will recieve a messages and send them back."""
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
buffsize = 32
try:
while True:
msg = ''
done = False
conn, addr = server_socket.accept()
while not done:
msg_part = conn.recv(buffsize)
msg += msg_part
if len(msg_part) < buffsize:
done = True
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
except KeyboardInterrupt:
print 'I successfully stopped.'
server_socket.close()
<commit_after>"""Echo server in socket connection: receives and sends back a message."""
import socket
def response_ok():
"""Return byte string 200 ok response."""
return u"HTTP/1.1 200 OK\nContent-Type: text/plain\nContent-length: 18\n\r\neverything is okay".encode('utf-8')
def reponse_error(error_code, reason):
"""Return byte string error code."""
return u"HTTP/1.1 {} {}".format(error_code, reason).encode('utf-8')
if __name__ == '__main__':
"""Run from terminal, this will recieve a messages and send them back."""
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
buffsize = 32
try:
while True:
msg = ''
done = False
conn, addr = server_socket.accept()
while not done:
msg_part = conn.recv(buffsize)
msg += msg_part
if len(msg_part) < buffsize:
done = True
conn.sendall(msg)
conn.shutdown(socket.SHUT_WR)
conn.close()
except KeyboardInterrupt:
print 'I successfully stopped.'
server_socket.close()
|
<commit_msg>Add depots into Import and increase mime version to 2.2
<commit_before>package fi.cosky.sdk;
/*
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
import java.util.List;
public class ImportData extends BaseData {
public static final String MimeType = "application/vnd.jyu.nfleet.import";
public static final double MimeVersion = 2.1;
private int VersionNumber;
private int ErrorCount;
private String State;
private List<VehicleError> Vehicles;
private List<TaskError> Tasks;
public int getVersionNumber() {
return VersionNumber;
}
public void setVersionNumber(int versionNumber) {
VersionNumber = versionNumber;
}
public int getErrorCount() {
return ErrorCount;
}
public void setErrorCount(int errorCount) {
ErrorCount = errorCount;
}
public String getState() {
return State;
}
public void setState(String state) {
State = state;
}
public List<VehicleError> getVehicles() {
return Vehicles;
}
public void setVehicles(List<VehicleError> vehicles) {
Vehicles = vehicles;
}
public List<TaskError> getTasks() {
return Tasks;
}
public void setTasks(List<TaskError> tasks) {
Tasks = tasks;
}
}
<commit_after>package fi.cosky.sdk;
/*
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
import java.util.List;
public class ImportData extends BaseData {
public static final String MimeType = "application/vnd.jyu.nfleet.import";
public static final double MimeVersion = 2.2;
private int VersionNumber;
private int ErrorCount;
private String State;
private List<VehicleError> Vehicles;
private List<TaskError> Tasks;
private List<DepotError> Depots;
public int getVersionNumber() {
return VersionNumber;
}
public void setVersionNumber(int versionNumber) {
VersionNumber = versionNumber;
}
public int getErrorCount() {
return ErrorCount;
}
public void setErrorCount(int errorCount) {
ErrorCount = errorCount;
}
public String getState() {
return State;
}
public void setState(String state) {
State = state;
}
public List<VehicleError> getVehicles() {
return Vehicles;
}
public void setVehicles(List<VehicleError> vehicles) {
Vehicles = vehicles;
}
public List<TaskError> getTasks() {
return Tasks;
}
public void setTasks(List<TaskError> tasks) {
Tasks = tasks;
}
public List<DepotError> getDepots() { return Depots; }
public void setDepots(List<DepotError> depots) { Depots = depots; }
}
|