content
stringlengths 128
6.74k
|
---|
<commit_msg>Add all versions of conversion doctests
<commit_before>import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
<commit_after>import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.to_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.to_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_pyflux_dataset",
"tslearn.utils.from_cesium_dataset",
"tslearn.utils.to_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
|
<commit_msg>Optimize filter condition to fail earlier
<commit_before>export function getRows<T>(grid: T[]): T[][] {
const size = Math.sqrt(grid.length);
const copy = grid.concat([]);
return getArray(size).map(() => copy.splice(0, size));
}
export function getColumns<T>(grid: T[]): T[][] {
return getRows(transpose(grid));
}
export function getDiagonals<T>(grid: T[]): T[][] {
const size = Math.sqrt(grid.length);
const lesser = size - 1;
const last = grid.length - 1;
return [
grid.filter((x, i) => Math.floor(i / size) === i % size),
grid.filter((x, i) => i > 0 && i % lesser === 0 && i !== last)
];
}
function getArray(length: number): number[] {
return Array.apply(null, { length }).map(Number.call, Number);
}
export function transpose<T>(grid: Array<T>): Array<T> {
const size = Math.sqrt(grid.length);
return grid.map((x, i) => grid[Math.floor(i / size) + ((i % size) * size)]);
}
<commit_after>export function getRows<T>(grid: T[]): T[][] {
const size = Math.sqrt(grid.length);
const copy = grid.concat([]);
return getArray(size).map(() => copy.splice(0, size));
}
export function getColumns<T>(grid: T[]): T[][] {
return getRows(transpose(grid));
}
export function getDiagonals<T>(grid: T[]): T[][] {
const size = Math.sqrt(grid.length);
const lesser = size - 1;
const last = grid.length - 1;
return [
grid.filter((x, i) => Math.floor(i / size) === i % size),
grid.filter((x, i) => i > 0 && i < last && i % lesser === 0)
];
}
function getArray(length: number): number[] {
return Array.apply(null, { length }).map(Number.call, Number);
}
export function transpose<T>(grid: Array<T>): Array<T> {
const size = Math.sqrt(grid.length);
return grid.map((x, i) => grid[Math.floor(i / size) + ((i % size) * size)]);
}
|
<commit_msg>Extend game time by static methods to get parts of a time format
Returns integer values for every time division like hours or minutes with the intention to be used for a "timer" like application
<commit_before>package de.gurkenlabs.litiengine;
public class GameTime {
private final IGameLoop gameLoop;
public GameTime(final IGameLoop loop) {
this.gameLoop = loop;
}
public long getDays() {
return this.getMilliseconds() / 1000 / 60 / 60 / 24 % 365;
}
public long getHours() {
return this.getMilliseconds() / 1000 / 60 / 60 % 24;
}
public long getMilliseconds() {
return this.gameLoop.convertToMs(this.gameLoop.getTicks());
}
public long getMinutes() {
return this.getMilliseconds() / 1000 / 60 % 60;
}
public long getSeconds() {
return this.getMilliseconds() / 1000 % 60;
}
public long getYears() {
return this.getMilliseconds() / 1000 / 60 / 60 / 24 / 365;
}
}
<commit_after>package de.gurkenlabs.litiengine;
public class GameTime {
private final IGameLoop gameLoop;
public GameTime(final IGameLoop loop) {
this.gameLoop = loop;
}
public long getDays() {
return getDays(this.getMilliseconds());
}
public long getHours() {
return getHours(this.getMilliseconds());
}
public long getMilliseconds() {
return this.gameLoop.convertToMs(this.gameLoop.getTicks());
}
public long getMinutes() {
return getMinutes(this.getMilliseconds());
}
public long getSeconds() {
return getSeconds(this.getMilliseconds());
}
public long getYears() {
return getYears(this.getMilliseconds());
}
public static long getDays(long ms) {
return ms / 1000 / 60 / 60 / 24 % 365;
}
public static long getHours(long ms) {
return ms / 1000 / 60 / 60 % 24;
}
public static long getMinutes(long ms) {
return ms / 1000 / 60 % 60;
}
public static long getSeconds(long ms) {
return ms / 1000 % 60;
}
public static long getMilliSeconds(long ms) {
return ms % 1000;
}
public static long getYears(long ms) {
return ms / 1000 / 60 / 60 / 24 / 365;
}
}
|
<commit_msg>Delete performance benchmarks, they are now hosted as a separate github repository.
<commit_before>package com.zaxxer.hikari;
import java.sql.Statement;
import java.util.ArrayList;
import org.junit.Assert;
import org.junit.Test;
import com.zaxxer.hikari.performance.StubStatement;
import com.zaxxer.hikari.util.FastStatementList;
public class TestFastStatementList
{
@Test
public void testOverflow()
{
ArrayList<Statement> verifyList = new ArrayList<Statement>();
FastStatementList list = new FastStatementList();
for (int i = 0; i < 100; i++)
{
StubStatement statement = new StubStatement();
list.add(statement);
verifyList.add(statement);
}
for (int i = 0; i < 100; i++)
{
Assert.assertNotNull("Element " + i, list.get(i));
Assert.assertSame(verifyList.get(i), list.get(i));
}
}
}
<commit_after>package com.zaxxer.hikari;
import java.sql.Statement;
import java.util.ArrayList;
import org.junit.Assert;
import org.junit.Test;
import com.zaxxer.hikari.mocks.StubStatement;
import com.zaxxer.hikari.util.FastStatementList;
public class TestFastStatementList
{
@Test
public void testOverflow()
{
ArrayList<Statement> verifyList = new ArrayList<Statement>();
FastStatementList list = new FastStatementList();
for (int i = 0; i < 100; i++)
{
StubStatement statement = new StubStatement(null);
list.add(statement);
verifyList.add(statement);
}
for (int i = 0; i < 100; i++)
{
Assert.assertNotNull("Element " + i, list.get(i));
Assert.assertSame(verifyList.get(i), list.get(i));
}
}
}
|
<commit_msg>Update metadata with dynamic config types
After settings rework, we decided to make the upgrade plug-ins define their own configuration types. This is basically the definition for these configuration types. Only the get_version function is not yet implemented.
Contributes to issue CURA-844.
<commit_before>
from . import VersionUpgrade21to22
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Version Upgrade 2.1 to 2.2"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Upgrades configurations from Cura 2.1 to Cura 2.2."),
"api": 2
},
"version_upgrade": {
"profile": {
"from": 1,
"to": 2
},
"machine_instance": {
"from": 1,
"to": 2
}
}
}
def register(app):
return { "version_upgrade": VersionUpgrade21to22.VersionUpgrade21to22() }
<commit_after>
from . import VersionUpgrade21to22
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Version Upgrade 2.1 to 2.2"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Upgrades configurations from Cura 2.1 to Cura 2.2."),
"api": 2
},
"version_upgrade": {
# From To Upgrade function
("profile", 1): ("instance_container", 2, VersionUpgrade21to22.upgradeProfile),
("machine_instance", 1): ("container_stack", 2, VersionUpgrade21to22.upgradeMachineInstance),
("preferences", 1): ("preferences", 2, VersionUpgrade21to22.upgradePreferences)
},
"sources": {
"profile": {
"get_version": VersionUpgrade21to22.getCfgVersion,
"location": {"./profiles"}
},
"machine_instance": {
"get_version": VersionUpgrade21to22.getCfgVersion,
"location": {"./machine_instances"}
},
"preferences": {
"get_version": VersionUpgrade21to22.getCfgVersion,
"location": {"."}
}
}
}
def register(app):
return { "version_upgrade": VersionUpgrade21to22.VersionUpgrade21to22() }
|
<commit_msg>Remove signature of deleted function
<commit_before>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* Copyright (C) 2017, James R. Barlow (https://github.com/jbarlow83/)
*/
#pragma once
#include "pikepdf.h"
py::object fspath(py::object filename);
FILE *portable_fopen(py::object filename, const char* mode);
void portable_unlink(py::object filename);
<commit_after>/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* Copyright (C) 2017, James R. Barlow (https://github.com/jbarlow83/)
*/
#pragma once
#include "pikepdf.h"
py::object fspath(py::object filename);
FILE *portable_fopen(py::object filename, const char* mode);
|
<commit_msg>Update property to fix print ourder.
<commit_before>package poml.conv.basic;
import java.util.Map;
import poml.conv.Converter;
import poml.in.Poml;
import poml.out.Xml;
public class Property implements Converter {
@Override public String name() { return "property"; }
@Override public void convert(Poml poml, Xml xml) {
Map<String, String> kv = poml.conf.map(name(), false);
replaceKey(kv, encoding, encodings);
replaceKey(kv, compiler, compilers);
xml.println(" <properties>");
xml.printKvTags(sp4, kv);
xml.println(" </properties>");
}
private static final String encoding = "$encoding";
private static final String[] encodings = {
"project.build.sourceEncoding", "project.reporting.outputEncoding"
};
private static final String compiler = "$compiler";
private static final String[] compilers = {
"maven.compiler.source", "maven.compiler.target"
};
private void replaceKey(Map<String, String> kv, String from, String[] to) {
if (!kv.containsKey(from)) return;
String v = kv.remove(from);
for (String k: to) kv.put(k, v);
}
}
<commit_after>package poml.conv.basic;
import java.util.Map;
import poml.conv.Converter;
import poml.in.Poml;
import poml.out.Xml;
public class Property implements Converter {
@Override public String name() { return "property"; }
@Override public void convert(Poml poml, Xml xml) {
Map<String, String> kv = poml.conf.map(name(), false);
xml.println(" <properties>");
for (String k: kv.keySet()) {
if (k.startsWith("$")) replace(k, kv.get(k), xml);
else xml.printKvTag(sp4, k, kv.get(k));
}
xml.println(" </properties>");
}
private void replace(String k, String v, Xml xml) {
if ("$encoding".equals(k)) {
xml.printKvTag(sp4, "project.build.sourceEncoding", v);
xml.printKvTag(sp4, "project.reporting.outputEncoding", v);
}
else if ("$compiler".equals(k)) {
xml.printKvTag(sp4, "maven.compiler.source", v);
xml.printKvTag(sp4, "maven.compiler.target", v);
}
}
}
|
<commit_msg>Change class description to be indipendent against the EventStream implementation
<commit_before>package com.suse.saltstack.netapi.event;
/**
* Defines a client notification interface for SSE stream events.
*/
public interface EventListener {
/**
* Notify the listener of a new event stream event. Returned data is a {@link String}
* in JSON format.
* @param event Return a JSON representation of the latest stream event.
*/
void notify(String event);
/**
* Notify the listener that the backing event stream was closed. Listener may
* need to recreate the event stream or take other actions.
*/
void eventStreamClosed();
}
<commit_after>package com.suse.saltstack.netapi.event;
/**
* Defines a client notification interface for events stream.
*/
public interface EventListener {
/**
* Notify the listener of a new event stream event. Returned data is a {@link String}
* in JSON format.
* @param event Return a JSON representation of the latest stream event.
*/
void notify(String event);
/**
* Notify the listener that the backing event stream was closed. Listener may
* need to recreate the event stream or take other actions.
*/
void eventStreamClosed();
}
|
<commit_msg>Add constructors and toString method to make unit testing easier
<commit_before>package org.opennms.netmgt.model;
public class OnmsMonitoringLocationDefinition {
private String m_area;
private String m_name;
private String m_pollingPackageName;
public String getArea() {
return m_area;
}
public void setArea(String area) {
m_area = area;
}
public String getName() {
return m_name;
}
public void setName(String name) {
m_name = name;
}
public String getPollingPackageName() {
return m_pollingPackageName;
}
public void setPollingPackageName(String pollingPackageName) {
m_pollingPackageName = pollingPackageName;
}
}
<commit_after>package org.opennms.netmgt.model;
public class OnmsMonitoringLocationDefinition {
private String m_area;
private String m_name;
private String m_pollingPackageName;
public OnmsMonitoringLocationDefinition() {
}
public OnmsMonitoringLocationDefinition(String name, String pollingPackageName) {
m_name = name;
m_pollingPackageName = pollingPackageName;
}
public OnmsMonitoringLocationDefinition(String name, String pollingPackageName, String area) {
m_name = name;
m_pollingPackageName = pollingPackageName;
m_area = area;
}
public String getArea() {
return m_area;
}
public void setArea(String area) {
m_area = area;
}
public String getName() {
return m_name;
}
public void setName(String name) {
m_name = name;
}
public String getPollingPackageName() {
return m_pollingPackageName;
}
public void setPollingPackageName(String pollingPackageName) {
m_pollingPackageName = pollingPackageName;
}
@Override
public String toString() {
return "OnmsMonitoringLocationDefinition@" + Integer.toHexString(hashCode()) + ": Name \"" + m_name + "\", polling package name \"" + m_pollingPackageName + "\", area \"" + m_area + "\"";
}
}
|
<commit_msg>Add redirection from '**' to '' (root).
<commit_before>import { Routes } from '@angular/router';
import { NoContentComponent } from './no-content';
import {ProjectComponent} from "./cardsView/project/project.component";
import {RootComponent} from "./cardsView/root/root.component";
export const ROUTES: Routes = [
{ path: '', component: RootComponent },
{ path: 'project/:key', component: ProjectComponent},
{ path: '**', component: NoContentComponent }
];
<commit_after>import { Routes } from '@angular/router';
import { NoContentComponent } from './no-content';
import {ProjectComponent} from "./cardsView/project/project.component";
import {RootComponent} from "./cardsView/root/root.component";
export const ROUTES: Routes = [
{ path: '', component: RootComponent },
{ path: 'project/:key', component: ProjectComponent},
{ path: '**', redirectTo: '' }
];
|
<commit_msg>Make Trunks have opposite directions in the included lanes
<commit_before>
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
<commit_after>
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2, reversed=True)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
|
<commit_msg>Allow Questions to be copied
<commit_before>from __future__ import unicode_literals
from django.contrib import admin
from .models import Question, Choice, Answer
class ChoiceInline(admin.TabularInline):
model = Choice
class QuestionAdmin(admin.ModelAdmin):
inlines = [
ChoiceInline,
]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Answer)
<commit_after>from __future__ import unicode_literals
from django.contrib import admin
from .models import Question, Choice, Answer
class ChoiceInline(admin.TabularInline):
model = Choice
def copy_question(modeladmin, request, queryset):
for orig in queryset:
q = Question(question_text="Kopie van "+orig.question_text)
q.save()
for orig_choice in orig.choice_set.all():
c = Choice(question=q, choice_text=orig_choice.choice_text)
c.save()
copy_question.short_description = "Kopieer stemmingen"
class QuestionAdmin(admin.ModelAdmin):
inlines = [
ChoiceInline,
]
actions = [copy_question]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Answer)
|
<commit_msg>Add CPU affinity to gcc build
<commit_before>
void testRDTSC ( void )
{
int64_t temp = rdtsc();
printf("%d",(int)temp);
}
#if defined(_MSC_VER)
#include <windows.h>
void SetAffinity ( int cpu )
{
SetProcessAffinityMask(GetCurrentProcess(),cpu);
SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_HIGHEST);
}
#else
void SetAffinity ( int /*cpu*/ )
{
}
#endif
<commit_after>
void testRDTSC ( void )
{
int64_t temp = rdtsc();
printf("%d",(int)temp);
}
#if defined(_MSC_VER)
#include <windows.h>
void SetAffinity ( int cpu )
{
SetProcessAffinityMask(GetCurrentProcess(),cpu);
SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_HIGHEST);
}
#else
#include <sched.h>
void SetAffinity ( int /*cpu*/ )
{
cpu_set_t mask;
CPU_ZERO(&mask);
CPU_SET(2,&mask);
if( sched_setaffinity(0,sizeof(mask),&mask) == -1)
{
printf("WARNING: Could not set CPU affinity\n");
}
}
#endif
|
<commit_msg>Remove unused select_related, defer the loading of shape for speed
<commit_before>from __future__ import absolute_import, unicode_literals
from django.views.generic import DetailView, ListView
from .models import District
class DistrictListView(ListView):
queryset = District.objects.all().select_related('county__name')
class DistrictDetailView(DetailView):
queryset = District.objects.all().prefetch_related('stats__year')
pk_url_kwarg = 'district_id'
slug_url_kwarg = 'district_slug'
<commit_after>from __future__ import absolute_import, unicode_literals
from django.views.generic import DetailView, ListView
from .models import District
class DistrictListView(ListView):
queryset = District.objects.all().defer('shape')
class DistrictDetailView(DetailView):
queryset = District.objects.all().prefetch_related('stats__year')
pk_url_kwarg = 'district_id'
slug_url_kwarg = 'district_slug'
|
<commit_msg>Fix DemoAuthenicator to require username and password not be null. (eclipse)
<commit_before>package axiom.delauth.token;
public class DemoAuthenticator implements Authenticator {
public boolean authenticate(String username, String password) {
return password.equals("123456");
}
}
<commit_after>package axiom.delauth.token;
public class DemoAuthenticator implements Authenticator {
public boolean authenticate(String username, String password) {
return !(null == username || null == password) && password.equals("123456");
}
}
|
<commit_msg>Add graphviz file output argument
<commit_before>import ast
import click
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
def cli(code, printed, remove_builtins):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
<commit_after>import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
|
<commit_msg>Add search filtering for name and booleans in resource API.
<commit_before>from django.shortcuts import render
from .models import Resource
from .serializers import ResourceSerializer
from rest_framework import generics, permissions
class ResourceList(generics.ListCreateAPIView):
queryset = Resource.objects.all()
serializer_class = ResourceSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class ResourceDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Resource.objects.all()
serializer_class = ResourceSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
<commit_after>from django.shortcuts import render
from .models import Resource
from .serializers import ResourceSerializer
from rest_framework import generics, permissions
import django_filters
class ResourceFilter(django_filters.FilterSet):
class Meta:
model = Resource
fields = ('name', 'featured', 'accessible', 'responsive_web',)
class ResourceList(generics.ListCreateAPIView):
queryset = Resource.objects.all()
serializer_class = ResourceSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
filter_class = ResourceFilter
class ResourceDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Resource.objects.all()
serializer_class = ResourceSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
|
<commit_msg>Tweak pressure plate inventory rendering.
<commit_before>package carpentersblocks.renderer;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class BlockHandlerCarpentersPressurePlate extends BlockHandlerBase {
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderBlocks)
{
renderBlocks.setRenderBounds(0.0F, 0.4375F, 0.0F, 1.0F, 0.5625F, 1.0F);
super.renderInventoryBlock(block, metadata, modelID, renderBlocks);
}
}
<commit_after>package carpentersblocks.renderer;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class BlockHandlerCarpentersPressurePlate extends BlockHandlerBase {
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderBlocks)
{
renderBlocks.setRenderBounds(0.0F, 0.0F, 0.0F, 1.0F, 0.125F, 1.0F);
Tessellator.instance.addTranslation(0.0F, 0.4365F, 0.0F);
super.renderInventoryBlock(block, metadata, modelID, renderBlocks);
Tessellator.instance.addTranslation(0.0F, -0.4365F, 0.0F);
}
}
|
<commit_msg>Read config from ENV instead of crashing
<commit_before>package main
import (
"fmt"
"log"
"net/http"
"os"
"github.com/ayoisaiah/stellar-photos-server/routes"
"github.com/joho/godotenv"
"github.com/rs/cors"
)
func main() {
err := godotenv.Load()
if err != nil {
log.Fatal("Error loading .env file")
}
port := fmt.Sprintf(":%v", os.Getenv("PORT"))
mux := routes.NewRouter()
handler := cors.Default().Handler(mux)
log.Fatal(http.ListenAndServe(port, handler))
}
<commit_after>package main
import (
"fmt"
"log"
"net/http"
"os"
"github.com/ayoisaiah/stellar-photos-server/routes"
"github.com/joho/godotenv"
"github.com/rs/cors"
)
func main() {
err := godotenv.Load()
if err != nil {
log.Println("File .env not found, reading configuration from ENV")
}
port := fmt.Sprintf(":%v", os.Getenv("PORT"))
mux := routes.NewRouter()
handler := cors.Default().Handler(mux)
log.Fatal(http.ListenAndServe(port, handler))
}
|
<commit_msg>Support collection statistics in data usage tests
<commit_before>from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
<commit_after>from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'collection_count',
'collection_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
|
<commit_msg>Simplify random, so it can be port as easily as possible.<commit_before>package org.chabu;
public
class Random {
private long seed;
private static final long multiplier = 0x5_DEEC_E66DL;
private static final long addend = 0x0BL;
private static final long mask = (1L << 48) - 1;
public Random(long seed) {
this.seed = initialScramble(seed);
}
private static long initialScramble(long seed) {
return (seed ^ multiplier) & mask;
}
protected int next(int bits) {
this.seed = (this.seed * multiplier + addend) & mask;
return (int)(this.seed >>> (48 - bits));
}
public void nextBytes(byte[] bytes, int offset, int length ) {
for (int i = 0; i < length; ) {
bytes[offset+i++] = (byte)nextInt();
}
}
public int nextInt() {
return next(32);
}
}
<commit_after>package org.chabu;
public class Random {
private long seed;
private static final long multiplier = 0x5_DEEC_E66DL;
private static final long addend = 0x0BL;
private static final long mask = (1L << 48) - 1;
public Random(long seed) {
this.seed = (seed ^ multiplier) & mask;
}
public void nextBytes(byte[] bytes, int offset, int length ) {
for (int i = 0; i < length; ) {
bytes[offset+i++] = (byte)nextInt();
}
}
public int nextInt() {
this.seed = (this.seed * multiplier + addend) & mask;
return (int)(this.seed >>> 16);
}
}
|
<commit_msg>Remove directories when deleting files
<commit_before>from __future__ import unicode_literals
from django.db import models
from django.conf import settings
import django.db.models.signals
import django.dispatch.dispatcher
import web.core
import re
import os
import uuid
class File(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
slug = models.CharField(max_length=64, default=web.core.random_slug_default_length, editable=False)
author = models.ForeignKey(settings.AUTH_USER_MODEL)
file = models.FileField(upload_to=web.core.get_file_path)
created = models.DateTimeField(auto_now_add=True)
expiry = models.DateTimeField(default=web.core.default_expiry, blank=True)
def delete_file(self):
return self.file.delete()
def __unicode__(self):
return self.file.name
@django.dispatch.dispatcher.receiver(django.db.models.signals.post_delete, sender=File)
def file_delete(sender, instance, **kwargs):
instance.delete_file()
<commit_after>from __future__ import unicode_literals
from django.db import models
from django.conf import settings
import django.db.models.signals
import django.dispatch.dispatcher
import web.core
import re
import os
import uuid
class File(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
slug = models.CharField(max_length=64, default=web.core.random_slug_default_length, editable=False)
author = models.ForeignKey(settings.AUTH_USER_MODEL)
file = models.FileField(upload_to=web.core.get_file_path)
created = models.DateTimeField(auto_now_add=True)
expiry = models.DateTimeField(default=web.core.default_expiry, blank=True)
def delete_file(self):
base_path = os.path.dirname(self.file.name)
os.unlink(self.file.name)
os.rmdir(base_path)
def __unicode__(self):
return self.file.name
@django.dispatch.dispatcher.receiver(django.db.models.signals.post_delete, sender=File)
def file_delete(sender, instance, **kwargs):
instance.delete_file()
|
<commit_msg>Stop sorting sessions by ID (column removed).<commit_before>package io.miti.beetle.cache;
import io.miti.beetle.model.Session;
import java.util.List;
public final class SessionCache {
/** The one instance of the cache object. */
private static final SessionCache cache;
/** The list of objects in the cache. */
private List<Session> list = null;
/** Instantiate the cache. */
static {
cache = new SessionCache();
}
/**
* Private default constructor.
*/
private SessionCache() {
super();
}
/**
* Get the cache.
*
* @return the data cache
*/
public static SessionCache get() {
return cache;
}
/**
* Load the cache.
*/
public void loadCache() {
if (list != null) {
return;
}
list = Session.getList("order by ID");
}
public List<Session> getList() {
return list;
}
/**
* Print the list of objects in the cache.
*/
public void printList() {
if (list == null) {
System.out.println("(List is null)");
return;
}
System.out.println("Session Objects:");
for (Session obj : list) {
System.out.println(" " + obj.toString());
}
}
}
<commit_after>package io.miti.beetle.cache;
import io.miti.beetle.model.Session;
import java.util.List;
public final class SessionCache {
/** The one instance of the cache object. */
private static final SessionCache cache;
/** The list of objects in the cache. */
private List<Session> list = null;
/** Instantiate the cache. */
static {
cache = new SessionCache();
}
/**
* Private default constructor.
*/
private SessionCache() {
super();
}
/**
* Get the cache.
*
* @return the data cache
*/
public static SessionCache get() {
return cache;
}
/**
* Load the cache.
*/
public void loadCache() {
if (list != null) {
return;
}
list = Session.getList();
}
public List<Session> getList() {
return list;
}
/**
* Print the list of objects in the cache.
*/
public void printList() {
if (list == null) {
System.out.println("(List is null)");
return;
}
System.out.println("Session Objects:");
for (Session obj : list) {
System.out.println(" " + obj.toString());
}
}
}
|
<commit_msg>Disable copy and assign on ScriptObject
<commit_before>
class EmacsInstance;
class ScriptObject : public NPObject {
public:
static ScriptObject* create(NPP npp);
void invalidate();
bool hasMethod(NPIdentifier name);
bool invoke(NPIdentifier name,
const NPVariant *args,
uint32_t argCount,
NPVariant *result);
bool enumerate(NPIdentifier **identifiers, uint32_t *identifierCount);
static NPObject* allocateThunk(NPP npp, NPClass *aClass);
static void deallocateThunk(NPObject *npobj);
static void invalidateThunk(NPObject *npobj);
static bool hasMethodThunk(NPObject *npobj, NPIdentifier name);
static bool invokeThunk(NPObject *npobj, NPIdentifier name,
const NPVariant *args, uint32_t argCount,
NPVariant *result);
static bool enumerateThunk(NPObject *npobj, NPIdentifier **identifiers,
uint32_t *identifierCount);
private:
ScriptObject(NPP npp);
~ScriptObject();
EmacsInstance* emacsInstance();
NPP npp_;
};
#endif // INCLUDED_SCRIPT_OBJECT_H_
<commit_after>
class EmacsInstance;
class ScriptObject : public NPObject {
public:
static ScriptObject* create(NPP npp);
void invalidate();
bool hasMethod(NPIdentifier name);
bool invoke(NPIdentifier name,
const NPVariant *args,
uint32_t argCount,
NPVariant *result);
bool enumerate(NPIdentifier **identifiers, uint32_t *identifierCount);
static NPObject* allocateThunk(NPP npp, NPClass *aClass);
static void deallocateThunk(NPObject *npobj);
static void invalidateThunk(NPObject *npobj);
static bool hasMethodThunk(NPObject *npobj, NPIdentifier name);
static bool invokeThunk(NPObject *npobj, NPIdentifier name,
const NPVariant *args, uint32_t argCount,
NPVariant *result);
static bool enumerateThunk(NPObject *npobj, NPIdentifier **identifiers,
uint32_t *identifierCount);
private:
ScriptObject(NPP npp);
~ScriptObject();
EmacsInstance* emacsInstance();
NPP npp_;
DISALLOW_COPY_AND_ASSIGN(ScriptObject);
};
#endif // INCLUDED_SCRIPT_OBJECT_H_
|
<commit_msg>Add cover test : core factories
<commit_before>from django.test import TestCase
from .. import factories
class CoreFactoriesTest(TestCase):
"""
Ensure factories work as expected.
Here we just call each one to ensure they do not trigger any random
error without verifying any other expectation.
"""
def test_path_factory(self):
factories.PathFactory()
def test_topology_mixin_factory(self):
factories.TopologyFactory()
def test_path_aggregation_factory(self):
factories.PathAggregationFactory()
def test_source_management_factory(self):
factories.PathSourceFactory()
def test_challenge_management_factory(self):
factories.StakeFactory()
def test_usage_management_factory(self):
factories.UsageFactory()
def test_network_management_factory(self):
factories.NetworkFactory()
def test_path_management_factory(self):
factories.TrailFactory()
<commit_after>from django.test import TestCase
from .. import factories
class CoreFactoriesTest(TestCase):
"""
Ensure factories work as expected.
Here we just call each one to ensure they do not trigger any random
error without verifying any other expectation.
"""
def test_path_factory(self):
factories.PathFactory()
def test_topology_mixin_factory(self):
factories.TopologyFactory()
def test_path_aggregation_factory(self):
factories.PathAggregationFactory()
def test_source_management_factory(self):
factories.PathSourceFactory()
def test_challenge_management_factory(self):
factories.StakeFactory()
def test_usage_management_factory(self):
factories.UsageFactory()
def test_network_management_factory(self):
factories.NetworkFactory()
def test_path_management_factory(self):
factories.TrailFactory()
def test_path_in_bounds_existing_factory(self):
factories.PathFactory.create()
factories.PathInBoundsExistingGeomFactory()
def test_path_in_bounds_not_existing_factory(self):
with self.assertRaises(IndexError):
factories.PathInBoundsExistingGeomFactory()
|
<commit_msg>Add more proper pure BinarySearch algorithm removing linear search
<commit_before>/*
*
Just playing around with binary and linear search. No idea.
*/
#include <iostream>
using namespace std;
int foo[10] = { 2, 16, 40, 77, 88, 99, 105, 120, 150 };
int n, result = 0;
int length = 10;
int sillySearch(int element) {
//Figure out array length and make first split
length = length / 2;
result = foo[length];
if (result < element)
{
while (length<10) {
result = foo[(length)];
length++;
if (result == element) {
return result;
}
}
}
else
{
while (length>0) {
result = foo[(length)];
length--;
if (result == element) {
return result;
}
}
}
return -1;
}
int main(int argc, char argv[])
{
//first sort the array ascenging order
int tell = sillySearch(40);
cout << tell;
cout << "Hello\n";
}<commit_after>/*
*
Implementing binary search
*/
#include <iostream>
using namespace std;
//get rid of fixed array
int foo[10] = { 2, 16, 40, 77, 88, 99, 105, 120, 150 };
int sillySearch(int element) {
int minIndex = 0;
int maxIndex = 10 - 1; //array.length here plz
int curElement, curIndex;
while (minIndex < maxIndex) {
curIndex = (minIndex + maxIndex) / 2 | 0;
curElement = foo[curIndex];
//curElement = foo[(minIndex + maxIndex) / 2 | 0];
if (curElement < element)
{
minIndex = curIndex + 1;
}
else if (curElement > element)
{
maxIndex = curIndex + 1;
}
else {
return curIndex;
}
}
return -1;
}
int main(int argc, char argv[])
{
//first sort the array ascenging order
int tell = sillySearch(40);
cout << tell;
cout << "Hello World Once Again!\n";
} |
<commit_msg>Add smiley face to output
<commit_before>"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
<commit_after>"""A Python calculator"""
import sys
command = sys.argv[1]
numbers = [float(a) for a in sys.argv[2:]]
a = float(sys.argv[2])
b = float(sys.argv[3])
if command == 'add':
print sum(numbers)
elif command == 'multiply':
product = 1
for num in numbers:
product *= num
print product
elif command == 'divide':
div = 1
if b != 0:
div = a/b
print div
print ':)'
|
<commit_msg>Fix the logic of Insertion sort which is not working
<commit_before>
void insertion(int *data, const int size_of_data)
{
for(int i = 0; i < size_of_data; ++i)
{
int j;
for(j = i; j >= 0 && data[j] < data[i]; --j);
swap(data[i], data[j]);
}
}
<commit_after>
void insertion(int *data, const int size_of_data)
{
for(int i = 0; i < size_of_data; ++i)
{
for(int j = i; j > 0 && data[j] < data[j - 1]; --j) swap(data[j], data[j - 1]);
}
}
|
<commit_msg>Add nodiscard to ErrorMessage and ErrorMessageOr
Test: builds
<commit_before>// Copyright (c) 2020 The Orbit Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ORBIT_BASE_RESULT_H_
#define ORBIT_BASE_RESULT_H_
#include <string>
#include <type_traits>
#include "outcome.hpp"
class ErrorMessage final {
public:
ErrorMessage() = default;
explicit ErrorMessage(std::string message) : message_(std::move(message)) {}
[[nodiscard]] const std::string& message() const { return message_; }
private:
std::string message_;
};
template <typename T, typename E>
using Result = outcome::result<T, E, outcome::policy::terminate>;
template <typename T>
class ErrorMessageOr : public Result<T, ErrorMessage> {
public:
using Result<T, ErrorMessage>::Result;
operator bool() = delete;
operator bool() const = delete;
};
template <typename T>
struct IsErrorMessageOr : std::false_type {};
template <typename T>
struct IsErrorMessageOr<ErrorMessageOr<T>> : std::true_type {};
#endif // ORBIT_BASE_RESULT_H_
<commit_after>// Copyright (c) 2020 The Orbit Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ORBIT_BASE_RESULT_H_
#define ORBIT_BASE_RESULT_H_
#include <string>
#include <type_traits>
#include "outcome.hpp"
class [[nodiscard]] ErrorMessage final {
public:
ErrorMessage() = default;
explicit ErrorMessage(std::string message) : message_(std::move(message)) {}
[[nodiscard]] const std::string& message() const { return message_; }
private:
std::string message_;
};
template <typename T, typename E>
using Result = outcome::result<T, E, outcome::policy::terminate>;
template <typename T>
class [[nodiscard]] ErrorMessageOr : public Result<T, ErrorMessage> {
public:
using Result<T, ErrorMessage>::Result;
operator bool() = delete;
operator bool() const = delete;
};
template <typename T>
struct IsErrorMessageOr : std::false_type {};
template <typename T>
struct IsErrorMessageOr<ErrorMessageOr<T>> : std::true_type {};
#endif // ORBIT_BASE_RESULT_H_
|
<commit_msg>Add validator to initial user migration
<commit_before>from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(
verbose_name='ID', serialize=False, auto_created=True,
primary_key=True)),
('contact_number', models.CharField(
max_length=16, null=True, blank=True)),
('bio', models.TextField(null=True, blank=True)),
('homepage', models.CharField(
max_length=256, null=True, blank=True)),
('twitter_handle', models.CharField(
max_length=15, null=True, blank=True)),
('github_username', models.CharField(
max_length=32, null=True, blank=True)),
('user', models.OneToOneField(
to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
},
bases=(models.Model,),
),
]
<commit_after>from __future__ import unicode_literals
import django.core.validators
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(
verbose_name='ID', serialize=False, auto_created=True,
primary_key=True)),
('contact_number', models.CharField(
max_length=16, null=True, blank=True)),
('bio', models.TextField(null=True, blank=True)),
('homepage', models.CharField(
max_length=256, null=True, blank=True)),
('twitter_handle', models.CharField(
max_length=15, null=True, blank=True,
validators=[
django.core.validators.RegexValidator(
'^[A-Za-z0-9_]{1,15}$',
'Incorrectly formatted twitter handle')
])),
('github_username', models.CharField(
max_length=32, null=True, blank=True)),
('user', models.OneToOneField(
to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
},
bases=(models.Model,),
),
]
|
<commit_msg>[scs] Disable negative test in shadowcallstack.
The test checks that scs does NOT work correctly w/o runtime support.
That's a strange thing to test, and it is also flaky, because things
may just work if x18 happens to point to a writable page.
git-svn-id: c199f293c43da69278bea8e88f92242bf3aa95f7@335982 91177308-0d34-0410-b5e6-96231b3b80d8
<commit_before>// RUN: %clang_scs -D INCLUDE_RUNTIME %s -o %t
// RUN: %run %t
// RUN: %clang_scs %s -o %t
// RUN: not --crash %run %t
// Basic smoke test for the runtime
#include "libc_support.h"
#ifdef INCLUDE_RUNTIME
#include "minimal_runtime.h"
#else
#define scs_main main
#endif
int scs_main(void) {
scs_fputs_stdout("In main.\n");
return 0;
}
<commit_after>// RUN: %clang_scs %s -o %t
// RUN: %run %t
// Basic smoke test for the runtime
#include "libc_support.h"
#include "minimal_runtime.h"
int scs_main(void) {
scs_fputs_stdout("In main.\n");
return 0;
}
|
<commit_msg>Use alternate GitHub download URL
<commit_before>
from setuptools import find_packages, setup
VERSION = "1.0.0"
with open("requirements.txt", "rt") as f:
requirements= f.read().splitlines()
setup(name="sacad",
version=VERSION,
author="desbma",
packages=find_packages(),
entry_points={"console_scripts": ["sacad = sacad:cl_main"]},
package_data={"": ["LICENSE", "README.md", "requirements.txt"]},
test_suite="tests",
install_requires=requirements,
description="Search and download music album covers",
url="https://github.com/desbma/sacad",
download_url="https://github.com/desbma/sacad/tarball/%s" % (VERSION),
keywords=["dowload", "album", "cover", "art", "albumart", "music"],
classifiers=["Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Graphics",
"Topic :: Utilities"])
<commit_after>
from setuptools import find_packages, setup
VERSION = "1.0.0"
with open("requirements.txt", "rt") as f:
requirements= f.read().splitlines()
setup(name="sacad",
version=VERSION,
author="desbma",
packages=find_packages(),
entry_points={"console_scripts": ["sacad = sacad:cl_main"]},
package_data={"": ["LICENSE", "README.md", "requirements.txt"]},
test_suite="tests",
install_requires=requirements,
description="Search and download music album covers",
url="https://github.com/desbma/sacad",
download_url="https://github.com/desbma/sacad/archive/%s.tar.gz" % (VERSION),
keywords=["dowload", "album", "cover", "art", "albumart", "music"],
classifiers=["Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Graphics",
"Topic :: Utilities"])
|
<commit_msg>Use a more realistic context to render pages for search
The Mezzanine page middleware adds a page and _current_page to the
context for pages, so our search index should too.
<commit_before>from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
<commit_after>from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
context = {'_current_page': obj.page_ptr, 'page': obj.page_ptr}
html = render_root(context, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
<commit_msg>Remove TODO - complete as of last commit
Season object is passed to the table update
<commit_before>import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
# TODO Update season='2017' below to a variable
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
<commit_after>import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
|
<commit_msg>Clean up log message in FanStatChannel
<commit_before>package channels
import (
"log"
)
type FanState struct {
Speed *float64 `json:"speed,omitempty"` // the speed of the fan as a percentage of maximum
Direction *string `json:"direction,omitempty"` // the direction of the fan: "forward" or "reverse"
}
type FanStatActuator interface {
SetFanState(fanState *FanState) error
}
type FanStatChannel struct {
baseChannel
actuator FanStatActuator
}
func NewFanStatChannel(actuator FanStatActuator) *FanStatChannel {
return &FanStatChannel{
baseChannel: baseChannel{protocol: "fanstat"},
actuator: actuator,
}
}
func (c *FanStatChannel) Set(fanState *FanState) error {
return c.actuator.SetFanState(fanState)
}
func (c *FanStatChannel) SendState(fanState *FanState) error {
log.Printf("SendState: %+v\n, %p", fanState, c.SendEvent)
return c.SendEvent("state", fanState)
}
<commit_after>package channels
type FanState struct {
Speed *float64 `json:"speed,omitempty"` // the speed of the fan as a percentage of maximum
Direction *string `json:"direction,omitempty"` // the direction of the fan: "forward" or "reverse"
}
type FanStatActuator interface {
SetFanState(fanState *FanState) error
}
type FanStatChannel struct {
baseChannel
actuator FanStatActuator
}
func NewFanStatChannel(actuator FanStatActuator) *FanStatChannel {
return &FanStatChannel{
baseChannel: baseChannel{protocol: "fanstat"},
actuator: actuator,
}
}
func (c *FanStatChannel) Set(fanState *FanState) error {
return c.actuator.SetFanState(fanState)
}
func (c *FanStatChannel) SendState(fanState *FanState) error {
//log.Printf("SendState: %+v\n, %p", fanState, c.SendEvent)
return c.SendEvent("state", fanState)
}
|
<commit_msg>Use file path as Europeana series name.
<commit_before>from __future__ import print_function
import sys, os
from re import sub
import zipfile, json
# from pyspark import SparkContext
# from pyspark.sql import SQLContext
# from pyspark.sql import Row
# from pyspark.sql.types import StringType
def getSeries(fname):
with zipfile.ZipFile(fname, 'r') as zf:
names = zf.namelist()
mfile = [f for f in names if f.endswith('.metadata.json')]
series = fname
if len(mfile) > 0:
m = json.loads(zf.read(mfile[0]))
series = m['identifier'][0]
return m
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: europeana.py <input> <output>", file=sys.stderr)
exit(-1)
# sc = SparkContext(appName="Europeana Import")
# sqlContext = SQLContext(sc)
x = [os.path.join(d[0], f) for d in os.walk(sys.argv[1]) for f in d[2] if f.endswith('zip')]
for f in x:
print(json.dumps(getSeries(f)))
# sc.parallelize(x, 200).flatMap(getSeries).toDF().write.save(sys.argv[2])
# sc.stop()
<commit_after>from __future__ import print_function
import sys, os
from re import sub
import zipfile, json
# from pyspark import SparkContext
# from pyspark.sql import SQLContext
# from pyspark.sql import Row
# from pyspark.sql.types import StringType
def getSeries(fname):
with zipfile.ZipFile(fname, 'r') as zf:
names = zf.namelist()
mfile = [f for f in names if f.endswith('.metadata.json')]
series = 'europeana/' + sub('^.*newspapers-by-country/', '',
sub('[\x80-\xff]', '', fname).replace('.zip', ''))
if len(mfile) > 0:
m = json.loads(zf.read(mfile[0]))
return {'series': series, 'title': m['title'][0], 'lang': m['language']}
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: europeana.py <input> <output>", file=sys.stderr)
exit(-1)
# sc = SparkContext(appName="Europeana Import")
# sqlContext = SQLContext(sc)
x = [os.path.join(d[0], f) for d in os.walk(sys.argv[1]) for f in d[2] if f.endswith('zip')]
for f in x:
print(json.dumps(getSeries(f)))
# sc.parallelize(x, 200).flatMap(getSeries).toDF().write.save(sys.argv[2])
# sc.stop()
|
<commit_msg>Improve error message and imports
<commit_before>from typing import IO, Text
from re import compile, MULTILINE
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = compile(r"^\s*[\u00A0]+\s*", MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted it from a Web page.\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
<commit_after>from typing import IO, Text
import re
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = re.compile(r"^\s*[\u00A0]+\s*", re.MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} lines with non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted from a Web page?\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
|
<commit_msg>Remove getDefaultProps and fix constructor.
<commit_before>import * as React from 'react';
import * as d3 from 'd3';
export class Diagram extends React.Component<{}, {}> {
static displayName = 'Diagram';
static propTypes = {
title: React.PropTypes.string,
};
constructor () {
super();
}
getDefaultProps () {
return {
title: "Unknown diagram",
};
}
render () {
return (
<div>
Like Foo
</div>
);
}
}
<commit_after>import * as React from 'react';
import * as d3 from 'd3';
export class Diagram extends React.Component<{}, {}> {
static displayName = 'Diagram';
static propTypes = {
title: React.PropTypes.string,
};
constructor (props) {
super(props);
}
render () {
return (
<div>
Like Foo
</div>
);
}
}
|
<commit_msg>Fix long filenames overflowing progress card.
<commit_before>import * as React from 'react';
import MUI from 'material-ui';
import Theme from './MurphyTheme';
interface Props {
progress: number;
message: string;
error: boolean;
}
export default class ProgressCard extends React.Component<Props, {}> {
constructor(props: Props) {
super(props);
}
static childContextTypes: any = {
muiTheme: React.PropTypes.object
}
private get styles() {
const padding: number = Theme.spacing.desktopGutter;
return {
progressMessage: {
fontSize: 14,
marginTop: padding / 2
},
error: {
color: 'red'
}
};
}
render() {
return (
<div>
<MUI.LinearProgress mode="determinate" value={this.props.progress * 100} color={this.props.error ? '#F44336' : undefined} />
<div style={this.styles.progressMessage}>{this.props.message}</div>
</div>
);
}
}
<commit_after>import * as React from 'react';
import MUI from 'material-ui';
import Theme from './MurphyTheme';
interface Props {
progress: number;
message: string;
error: boolean;
}
export default class ProgressCard extends React.Component<Props, {}> {
constructor(props: Props) {
super(props);
}
static childContextTypes: any = {
muiTheme: React.PropTypes.object
}
private get styles() {
const padding: number = Theme.spacing.desktopGutter;
return {
progressMessage: {
fontSize: 14,
marginTop: padding / 2,
wordWrap: 'break-word'
},
error: {
color: 'red'
}
};
}
render() {
return (
<div>
<MUI.LinearProgress mode="determinate" value={this.props.progress * 100} color={this.props.error ? '#F44336' : undefined} />
<div style={this.styles.progressMessage}>{this.props.message}</div>
</div>
);
}
}
|
<commit_msg>Update command docs for conda index
<commit_before>from __future__ import absolute_import, division, print_function
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-c', "--check-md5",
action="store_true",
help="Use MD5 values instead of file modification times for\
determining if a package's metadata needs to be \
updated.")
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
<commit_after>from __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
<commit_msg>Fix NPE at IDE restart
<commit_before>// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.util.SystemInfo;
public abstract class RemoteDesktopService {
private static volatile RemoteDesktopService ourInstance;
public static RemoteDesktopService getInstance() {
RemoteDesktopService service = ourInstance;
if (service == null) {
ourInstance = service = ServiceManager.getService(RemoteDesktopService.class);
}
return service;
}
public static boolean isRemoteSession() {
if (!SystemInfo.isWindows) return false;
return ApplicationManager.getApplication() != null && getInstance().isRemoteDesktopConnected();
}
public abstract boolean isRemoteDesktopConnected();
}
<commit_after>// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.util.SystemInfo;
public abstract class RemoteDesktopService {
private static volatile RemoteDesktopService ourInstance;
public static RemoteDesktopService getInstance() {
RemoteDesktopService service = ourInstance;
if (service == null) {
ourInstance = service = ServiceManager.getService(RemoteDesktopService.class);
}
return service;
}
public static boolean isRemoteSession() {
if (!SystemInfo.isWindows) return false;
if (ApplicationManager.getApplication() == null) return false;
RemoteDesktopService instance = getInstance();
return instance != null && instance.isRemoteDesktopConnected();
}
public abstract boolean isRemoteDesktopConnected();
}
|
<commit_msg>Add some braces around an if guard
<commit_before>package com.yammer.tenacity.core.properties;
import com.netflix.config.ConfigurationManager;
import com.netflix.config.DynamicConfiguration;
import com.netflix.config.FixedDelayPollingScheduler;
import com.netflix.config.PolledConfigurationSource;
import com.netflix.config.sources.URLConfigurationSource;
import com.yammer.tenacity.core.config.BreakerboxConfiguration;
import org.apache.commons.configuration.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ArchaiusPropertyRegister {
private static class TenacityPollingScheduler extends FixedDelayPollingScheduler {
private static final Logger LOGGER = LoggerFactory.getLogger(TenacityPollingScheduler.class);
public TenacityPollingScheduler(int initialDelayMillis, int delayMillis, boolean ignoreDeletesFromSource) {
super(initialDelayMillis, delayMillis, ignoreDeletesFromSource);
}
@Override
protected synchronized void initialLoad(PolledConfigurationSource source, Configuration config) {
try {
super.initialLoad(source, config);
} catch (Exception err) {
LOGGER.warn("Initial dynamic configuration load failed", err);
}
}
}
public void register(BreakerboxConfiguration breakerboxConfiguration) {
if(breakerboxConfiguration.getUrls().isEmpty()) return;
ConfigurationManager.install(
new DynamicConfiguration(
new URLConfigurationSource(breakerboxConfiguration.getUrls().split(",")),
new TenacityPollingScheduler(
(int)breakerboxConfiguration.getInitialDelay().toMilliseconds(),
(int)breakerboxConfiguration.getDelay().toMilliseconds(),
true)));
}
}
<commit_after>package com.yammer.tenacity.core.properties;
import com.netflix.config.ConfigurationManager;
import com.netflix.config.DynamicConfiguration;
import com.netflix.config.FixedDelayPollingScheduler;
import com.netflix.config.PolledConfigurationSource;
import com.netflix.config.sources.URLConfigurationSource;
import com.yammer.tenacity.core.config.BreakerboxConfiguration;
import org.apache.commons.configuration.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ArchaiusPropertyRegister {
private static class TenacityPollingScheduler extends FixedDelayPollingScheduler {
private static final Logger LOGGER = LoggerFactory.getLogger(TenacityPollingScheduler.class);
public TenacityPollingScheduler(int initialDelayMillis, int delayMillis, boolean ignoreDeletesFromSource) {
super(initialDelayMillis, delayMillis, ignoreDeletesFromSource);
}
@Override
protected synchronized void initialLoad(PolledConfigurationSource source, Configuration config) {
try {
super.initialLoad(source, config);
} catch (Exception err) {
LOGGER.warn("Initial dynamic configuration load failed", err);
}
}
}
public void register(BreakerboxConfiguration breakerboxConfiguration) {
if (breakerboxConfiguration.getUrls().isEmpty()) {
return;
}
ConfigurationManager.install(
new DynamicConfiguration(
new URLConfigurationSource(breakerboxConfiguration.getUrls().split(",")),
new TenacityPollingScheduler(
(int)breakerboxConfiguration.getInitialDelay().toMilliseconds(),
(int)breakerboxConfiguration.getDelay().toMilliseconds(),
true)));
}
}
|
<commit_msg>Load blog data from command line
<commit_before>from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame=MainWindow()
app.MainLoop()
if __name__ == "__main__" :
main()
<commit_after>from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
main()
|
<commit_msg>Add a requirement for serving the assets in all tests
<commit_before>import os
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
<commit_after>import os
import subprocess
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
|
<commit_msg>Use proper filters avoiding deprecated warnings.
<commit_before>from .key_value_filter import KeyValueFilter
from .key_value_regex_filter import KeyValueRegexFilter
from .no_filter import NoFilter
from .pythonexp_filter import PythonexpFilter
__all__ = ['KeyValueFilter', 'KeyValueRegexFilter', 'NoFilter', 'PythonexpFilter']
<commit_after>from .key_value_filters import KeyValueFilter, KeyValueRegexFilter
from .no_filter import NoFilter
from .pythonexp_filter import PythonexpFilter
__all__ = ['KeyValueFilter', 'KeyValueRegexFilter', 'NoFilter', 'PythonexpFilter']
|
<commit_msg>Fix a bug and log to the console
Fix a bug where a command is called without any arguments, so no indices will be out-of-range
Log to the console whenever someone calls a command
<commit_before>commands = {}
def add_cmd(name, alias=None, owner=False, admin=False):
def real_command(func):
commands[name] = func
if alias:
commands[alias] = func
return real_command
def call_command(bot, event, irc):
command = ' '.join(event.arguments).split(' ')
args = command[1]
name = command[0][1:]
try:
commands[name](bot, event, irc, args)
except KeyError:
irc.reply(event, 'Invalid command {}'.format(name))
except:
irc.reply(event, 'Oops, an error occured')
<commit_after>commands = {}
def add_cmd(name, alias=None, owner=False, admin=False):
def real_command(func):
commands[name] = func
if alias:
commands[alias] = func
return real_command
def call_command(bot, event, irc):
command = ' '.join(event.arguments).split(' ')
args = command[1:] if len(command) > 1 else None
name = command[0][1:]
try:
commands[name](bot, event, irc, args)
except KeyError:
irc.reply(event, 'Invalid command {}'.format(name))
except:
irc.reply(event, 'Oops, an error occured')
else:
privmsg = event.target == bot.config['nickname']
target = "a private message" if privmsg else event.target
print("{] called {} in {}".format(event.source.nick, name, target))
|
<commit_msg>Remove mixins from namespace after monkey patching
After their job is done, they can go home ;-)
<commit_before>
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
__author__ = "Christian Kellner"
<commit_after>
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
del BlockMixin, FileMixin, SectionMixin, PropertyMixin, ValueMixin, SourceMixin, DataArrayMixin, TagMixin
del MultiTagMixin, DataArrySourcesMixin, MultiTagSourcesMixin, TagSourcesMixin
__author__ = 'Christian Kellner, Adrian Stoewer, Andrey Sobolev, Jan Grewe, Balint Morvai'
|
<commit_msg>Add a method to get the previous GUI.
<commit_before>package io.musician101.musicianlibrary.java.minecraft.gui.chest;
import java.util.List;
public abstract class ChestGUI<C, G extends ChestGUI<C, G, I, J, P, S>, I, J, P, S> {
protected final List<GUIButton<C, G, I, J, P, S>> buttons;
protected final I inventory;
protected final int page;
protected final P player;
protected final J plugin;
protected final G prevGUI;
protected ChestGUI(I inventory, P player, int page, List<GUIButton<C, G, I, J, P, S>> buttons, G prevGUI, J plugin, boolean manualOpen) {
this.inventory = inventory;
this.player = player;
this.page = page;
this.buttons = buttons;
this.prevGUI = prevGUI;
this.plugin = plugin;
if (!manualOpen) {
open();
}
}
public abstract void close();
public abstract void open();
}
<commit_after>package io.musician101.musicianlibrary.java.minecraft.gui.chest;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public abstract class ChestGUI<C, G extends ChestGUI<C, G, I, J, P, S>, I, J, P, S> {
@Nonnull
protected final List<GUIButton<C, G, I, J, P, S>> buttons;
@Nonnull
protected final I inventory;
protected final int page;
@Nonnull
protected final P player;
@Nonnull
protected final J plugin;
@Nullable
protected final G prevGUI;
protected ChestGUI(@Nonnull I inventory, @Nonnull P player, int page, @Nonnull List<GUIButton<C, G, I, J, P, S>> buttons, @Nullable G prevGUI, @Nonnull J plugin, boolean manualOpen) {
this.inventory = inventory;
this.player = player;
this.page = page;
this.buttons = buttons;
this.prevGUI = prevGUI;
this.plugin = plugin;
if (!manualOpen) {
open();
}
}
public abstract void close();
@Nullable
public G getPreviousGUI() {
return prevGUI;
}
public abstract void open();
}
|
<commit_msg>Fix up template crap to not use same name locally hurr
<commit_before>import types
from fabric.api import env, run
from fabric.contrib import files
class Integration(object):
def setup(self):
env.host_string = "127.0.0.1"
def tildify(path):
home = run("echo ~", quiet=True).stdout.strip()
return path.replace('~', home)
def expect(path):
assert files.exists(tildify(path))
def expect_contains(path, value):
assert files.contains(tildify(path), value)
def escape(path):
return path.replace(' ', r'\ ')
class TestTildeExpansion(Integration):
def test_append(self):
for target in ('~/append_test', '~/append_test with spaces'):
files.append(target, ['line'])
expect(target)
def test_exists(self):
for target in ('~/exists_test', '~/exists test with space'):
run("touch %s" % escape(target))
expect(target)
def test_sed(self):
for target in ('~/sed_test', '~/sed test with space'):
run("echo 'before' > %s" % escape(target))
files.sed(target, 'before', 'after')
expect_contains(target, 'after')
def test_upload_template(self):
for target in ('~/upload_template_test', '~/upload template test with space'):
files.upload_template(target, target)
expect(target)
<commit_after>import types
from fabric.api import env, run, local
from fabric.contrib import files
class Integration(object):
def setup(self):
env.host_string = "127.0.0.1"
def tildify(path):
home = run("echo ~", quiet=True).stdout.strip()
return path.replace('~', home)
def expect(path):
assert files.exists(tildify(path))
def expect_contains(path, value):
assert files.contains(tildify(path), value)
def escape(path):
return path.replace(' ', r'\ ')
class TestTildeExpansion(Integration):
def test_append(self):
for target in ('~/append_test', '~/append_test with spaces'):
files.append(target, ['line'])
expect(target)
def test_exists(self):
for target in ('~/exists_test', '~/exists test with space'):
run("touch %s" % escape(target))
expect(target)
def test_sed(self):
for target in ('~/sed_test', '~/sed test with space'):
run("echo 'before' > %s" % escape(target))
files.sed(target, 'before', 'after')
expect_contains(target, 'after')
def test_upload_template(self):
for i, target in enumerate((
'~/upload_template_test',
'~/upload template test with space'
)):
src = "source%s" % i
local("touch %s" % src)
files.upload_template(src, target)
expect(target)
|
<commit_msg>Update module init file; begin version stamping here.
<commit_before>
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2016 Instana Inc.'
__credits__ = ['Pavlo Baron']
__license__ = 'MIT'
__version__ = '0.0.1'
__maintainer__ = 'Pavlo Baron'
__email__ = 'pavlo.baron@instana.com'
__all__ = ['sensor', 'tracer']
<commit_after>
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2017 Instana Inc.'
__credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo']
__license__ = 'MIT'
__version__ = '0.6.6'
__maintainer__ = 'Peter Giacomo Lombardo'
__email__ = 'peter.lombardo@instana.com'
__all__ = ['sensor', 'tracer']
|
<commit_msg>Add equal method to model with uuid
Change-Id: Iefa05d50f2f591399d5423debd699f88074a574e
<commit_before>
import abc
import uuid
from restalchemy.dm import properties
from restalchemy.dm import types
class Model(properties.PropertyBasedObject):
__metaclass__ = abc.ABCMeta
def __init__(self, **kwargs):
super(Model, self).__init__(properties.AbstractProperty, **kwargs)
@classmethod
def restore(cls, **kwargs):
return super(Model, cls).restore(properties.AbstractProperty, **kwargs)
@abc.abstractmethod
def get_id(self):
pass
class ModelWithUUID(Model):
uuid = properties.property(types.UUID, read_only=True,
default=lambda: str(uuid.uuid4()))
def get_id(self):
return self.uuid
<commit_after>
import abc
import uuid
from restalchemy.dm import properties
from restalchemy.dm import types
class Model(properties.PropertyBasedObject):
__metaclass__ = abc.ABCMeta
def __init__(self, **kwargs):
super(Model, self).__init__(properties.AbstractProperty, **kwargs)
@classmethod
def restore(cls, **kwargs):
return super(Model, cls).restore(properties.AbstractProperty, **kwargs)
@abc.abstractmethod
def get_id(self):
pass
class ModelWithUUID(Model):
uuid = properties.property(types.UUID, read_only=True,
default=lambda: str(uuid.uuid4()))
def get_id(self):
return self.uuid
def __eq__(self, other):
if isinstance(other, type(self)):
return self.get_id() == other.get_id()
return False
|
<commit_msg>Add test for register/unregister in event class map.
<commit_before>
import {expect} from "chai";
import {EventClassMap} from "../../../main/Apha/EventStore/EventClassMap";
import {Event, EventType} from "../../../main/Apha/Message/Event";
import {UnknownEventException} from "../../../main/Apha/EventStore/UnknownEventException";
describe("EventClassMap", () => {
describe("getTypeByClassName", () => {
it("retrieves type by event class name", () => {
const events = new Set<EventType>();
events.add(EventClassMapEvent);
const classMap = new EventClassMap(events);
const classType = classMap.getTypeByClassName("EventClassMapEvent");
expect(classType).to.equal(EventClassMapEvent);
});
it("throws exception if class cannot be found", () => {
const classMap = new EventClassMap();
expect(() => {
classMap.getTypeByClassName("foo");
}).to.throw(UnknownEventException);
});
});
});
class EventClassMapEvent extends Event {}
<commit_after>
import {expect} from "chai";
import {EventClassMap} from "../../../main/Apha/EventStore/EventClassMap";
import {Event, EventType} from "../../../main/Apha/Message/Event";
import {UnknownEventException} from "../../../main/Apha/EventStore/UnknownEventException";
describe("EventClassMap", () => {
describe("getTypeByClassName", () => {
it("should retrieve type by event class name", () => {
const events = new Set<EventType>();
events.add(EventClassMapEvent);
const classMap = new EventClassMap(events);
const classType = classMap.getTypeByClassName("EventClassMapEvent");
expect(classType).to.equal(EventClassMapEvent);
});
it("should throw exception if class cannot be found", () => {
const classMap = new EventClassMap();
expect(() => {
classMap.getTypeByClassName("foo");
}).to.throw(UnknownEventException);
});
});
describe("register", () => {
it("should register an event in the map", () => {
const classMap = new EventClassMap();
classMap.register(EventClassMapEvent);
expect(classMap.getTypeByClassName("EventClassMapEvent")).to.equal(EventClassMapEvent);
});
});
describe("unregister", () => {
it("should unregister an event from the map", () => {
const classMap = new EventClassMap();
classMap.register(EventClassMapEvent);
classMap.unregister(EventClassMapEvent);
expect(() => {
classMap.getTypeByClassName("EventClassMapEvent");
}).to.throw(UnknownEventException);
});
it("should be idempotent", () => {
const classMap = new EventClassMap();
expect(() => {
classMap.unregister(EventClassMapEvent);
}).to.not.throw();
});
});
});
class EventClassMapEvent extends Event {}
|
<commit_msg>Set icon before calling show() to avoid warning.
<commit_before>import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._trayIcon.show()
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
<commit_after>import trayjenkins
from PySide import QtGui
from pyjenkins.Event import Event
from trayjenkins.status.interfaces import IView
class TrayIconView(IView):
def __init__(self, parentWidget, delayInSecons):
"""
@type parentWidget: QtGui.QWidget
"""
self._statusRefreshEvent= Event()
self._delayInSeconds= delayInSecons
self._trayIcon= QtGui.QSystemTrayIcon(parentWidget)
self._icons= {}
self._icons[trayjenkins.status.FAILING]= QtGui.QIcon('images/status/failing.png')
self._icons[trayjenkins.status.OK]= QtGui.QIcon('images/status/ok.png')
self._icons[trayjenkins.status.UNKNOWN]= QtGui.QIcon('images/status/unknown.png')
self.setStatus(trayjenkins.status.UNKNOWN)
self._trayIcon.show()
def statusRefreshEvent(self):
"""
Event arguments: <none>
@rtype: pyjenkins.interfaces.IEvent
"""
return self._statusRefreshEvent
def setStatus(self, status):
"""
@type status: str
"""
self._trayIcon.setIcon(self._icons[status])
self._trayIcon.setToolTip(status.capitalize())
self._trayIcon.showMessage(unicode("Jenkins status change"),
unicode("Status: %s" % status.capitalize()),
QtGui.QSystemTrayIcon.Information,# icon,
self._delayInSeconds * 1000)
|
<commit_msg>Fix move to return only int, draw functions cannot handle floats as coordinates
<commit_before>from math import cos, sin, pi, hypot
def rotate(polygon, angle):
rotated_points = []
cos_result = cos(angle)
sin_result = sin(angle)
for point in polygon:
x = point[0] * cos_result - point[1] * sin_result
y = point[0] * sin_result + point[1] * cos_result
rotated_points.append((x, y))
return rotated_points
def move(point, direction, amount):
return [point[0] + amount * cos(direction),
point[1] + amount * sin(direction)]
def distance(point1, point2):
return hypot(point1[0] - point2[0], point1[1] - point2[1])
<commit_after>from math import cos, sin, pi, hypot
def rotate(polygon, angle):
rotated_points = []
cos_result = cos(angle)
sin_result = sin(angle)
for point in polygon:
x = point[0] * cos_result - point[1] * sin_result
y = point[0] * sin_result + point[1] * cos_result
rotated_points.append((x, y))
return rotated_points
def move(point, direction, amount):
return [int(point[0] + amount * cos(direction)),
int(point[1] + amount * sin(direction))]
def distance(point1, point2):
return hypot(point1[0] - point2[0], point1[1] - point2[1])
|
<commit_msg>Send email to client working
<commit_before>__author__ = 'Shahariar Rabby'
# # Sendy
# ### Importing Send mail file
# In[6]:
from Sendmail import *
# ** Take user email, text plan massage, HTML file **
# In[7]:
TO_EMAIL = raw_input("Enter reciver email : ") #Taking Reciver email as input
subject = raw_input("Enter Mail Subject : ") #taking mail subject
text = raw_input("Enter Plain message(or html format) : ") #Taking plane massage as input
filename = raw_input('Enter file name with location(if any) : ')
try:
file = open(filename,'r') #reading HTML format message
html = file.read()
except:
html = text
# **Calling send mail and sending mail **
# In[8]:
Send_Mail(login(),TO_EMAIL=TO_EMAIL,text=text,html=html,subject=subject)
<commit_after>__author__ = 'Shahariar Rabby'
# This will read details and send email to clint
# # Sendy
# ### Importing Send mail file
# In[6]:
from Sendmail import *
# ** Take user email, text plan massage, HTML file **
# In[7]:
TO_EMAIL = raw_input("Enter reciver email : ") #Taking Reciver email as input
subject = raw_input("Enter Mail Subject : ") #taking mail subject
text = raw_input("Enter Plain message(or html format) : ") #Taking plane massage as input
filename = raw_input('Enter file name with location(if any) : ')
try:
file = open(filename,'r') #reading HTML format message
html = file.read()
except:
html = text
# **Calling send mail and sending mail **
# In[8]:
Send_Mail(login(),TO_EMAIL=TO_EMAIL,text=text,html=html,subject=subject)
|
<commit_msg>Disable Sentinel client with redis-py < 2.9
<commit_before>
from .default import DefaultClient
from .sharded import ShardClient
from .herd import HerdClient
from .experimental import SimpleFailoverClient
from .sentinel import SentinelClient
__all__ = ['DefaultClient', 'ShardClient',
'HerdClient', 'SimpleFailoverClient',
'SentinelClient']
<commit_after>
import warnings
from .default import DefaultClient
from .sharded import ShardClient
from .herd import HerdClient
from .experimental import SimpleFailoverClient
__all__ = ['DefaultClient', 'ShardClient',
'HerdClient', 'SimpleFailoverClient',]
try:
from .sentinel import SentinelClient
__all__.append("SentinelClient")
except ImportError:
warnings.warn("sentinel client is unsuported with redis-py<2.9",
RuntimeWarning)
|
<commit_msg>Implement test showing that we can also use other iter methods
<commit_before>struct Counter {
count: u32
}
impl Counter {
fn new() -> Counter {
Counter { count: 0 }
}
}
impl Iterator for Counter {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
self.count += 1;
if self.count < 6 {
Some(self.count)
} else {
None
}
}
}
#[test]
fn calling_counter_next() {
let mut counter = Counter::new();
assert_eq!(counter.next(), Some(1));
assert_eq!(counter.next(), Some(2));
assert_eq!(counter.next(), Some(3));
assert_eq!(counter.next(), Some(4));
assert_eq!(counter.next(), Some(5));
assert_eq!(counter.next(), None);
}<commit_after>struct Counter {
count: u32
}
impl Counter {
fn new() -> Counter {
Counter { count: 0 }
}
}
impl Iterator for Counter {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
self.count += 1;
if self.count < 6 {
Some(self.count)
} else {
None
}
}
}
#[test]
fn calling_counter_next() {
let mut counter = Counter::new();
assert_eq!(counter.next(), Some(1));
assert_eq!(counter.next(), Some(2));
assert_eq!(counter.next(), Some(3));
assert_eq!(counter.next(), Some(4));
assert_eq!(counter.next(), Some(5));
assert_eq!(counter.next(), None);
}
#[test]
fn using_other_iterator_trait_methods() {
let sum: u32 = Counter::new().zip(Counter::new().skip(1))
.map(|(a, b)| a * b)
.filter(|x| x % 3 == 0)
.sum();
assert_eq!(sum, 18);
} |
<commit_msg>Add request encoding to WMTS source<commit_before>/*******************************************************************************
* Copyright 2014, 2017 gwt-ol3
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package ol.source;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
/**
* WMTS options.
*
* @author Tino Desjardins
*
*/
@JsType(isNative = true, namespace = JsPackage.GLOBAL, name = "Object")
public class WmtsOptions extends TileImageOptions {
/**
* Sets the layername.
*
* @param layer layername
*/
@JsProperty
public native void setLayer(String layer);
@JsProperty
public native void setStyle(String style);
@JsProperty
public native void setFormat(String format);
@JsProperty
public native void setVersion(String version);
@JsProperty
public native void setMatrixSet(String matrixSet);
}
<commit_after>/*******************************************************************************
* Copyright 2014, 2019 gwt-ol
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package ol.source;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
/**
* WMTS options.
*
* @author Tino Desjardins
*
*/
@JsType(isNative = true, namespace = JsPackage.GLOBAL, name = "Object")
public class WmtsOptions extends TileImageOptions {
/**
* Sets the layername.
*
* @param layer layername
*/
@JsProperty
public native void setLayer(String layer);
/**
* @param requestEncoding Request encoding.
*/
@JsProperty
public native void setRequestEncoding(String requestEncoding);
@JsProperty
public native void setStyle(String style);
@JsProperty
public native void setFormat(String format);
@JsProperty
public native void setVersion(String version);
@JsProperty
public native void setMatrixSet(String matrixSet);
}
|
<commit_msg>Add script's path to the python path<commit_before>
from __future__ import absolute_import
import sys
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
<commit_after>
from __future__ import absolute_import
import sys
import os
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
script_dir = os.path.abspath(os.path.dirname(script))
if script_dir not in [os.path.abspath(p) for p in sys.path]:
sys.path.append(script_dir)
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
|
<commit_msg>:fire: Clean exports from typer, remove unneeded Click components
and add Exit exception
<commit_before>"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
__version__ = "0.0.4"
from click.exceptions import ( # noqa
Abort,
BadArgumentUsage,
BadOptionUsage,
BadParameter,
ClickException,
FileError,
MissingParameter,
NoSuchOption,
UsageError,
)
from click.termui import ( # noqa
clear,
confirm,
echo_via_pager,
edit,
get_terminal_size,
getchar,
launch,
pause,
progressbar,
prompt,
secho,
style,
unstyle,
)
from click.utils import ( # noqa
echo,
format_filename,
get_app_dir,
get_binary_stream,
get_os_args,
get_text_stream,
open_file,
)
from .main import Typer, run # noqa
from .models import BinaryFileRead, BinaryFileWrite, Context, TextFile # noqa
from .params import Argument, Option # noqa
<commit_after>"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
__version__ = "0.0.4"
from click.exceptions import ( # noqa
Abort,
Exit,
)
from click.termui import ( # noqa
clear,
confirm,
echo_via_pager,
edit,
get_terminal_size,
getchar,
launch,
pause,
progressbar,
prompt,
secho,
style,
unstyle,
)
from click.utils import ( # noqa
echo,
format_filename,
get_app_dir,
get_binary_stream,
get_text_stream,
open_file,
)
from .main import Typer, run # noqa
from .models import BinaryFileRead, BinaryFileWrite, Context, TextFile # noqa
from .params import Argument, Option # noqa
|
<commit_msg>Update Fraction to Recurring Decimal
<commit_before>package problems;
public class FractionToRecurringDecimal {
public String fractionToDecimal(int numerator, int denominator) {
if (denominator == 0)
return "NULL";
boolean sign = false;
if (numerator < 0) {
numerator = -numerator;
sign = !sign;
}
if (denominator < 0) {
denominator = -denominator;
sign = !sign;
}
StringBuilder sb = new StringBuilder();
if (sign)
sb.append('-');
if (numerator >= denominator) {
sb.append(numerator / denominator);
numerator %= denominator;
}
if (numerator == 0)
return sb.toString();
while ()
}
public static void main(String[] args) {
FractionToRecurringDecimal fd = new FractionToRecurringDecimal();
int num = 2;
int de = 3;
System.out.println(fd.fractionToDecimal(num, de));
}
}
<commit_after>package problems;
import java.util.HashMap;
import java.util.Map;
public class FractionToRecurringDecimal {
public String fractionToDecimal(int numerator, int denominator) {
if (denominator == 0)
return "NULL";
long num = numerator > 0 ? numerator : -(long)numerator;
long den = denominator > 0 ? denominator : -(long)denominator;
StringBuilder sb = new StringBuilder();
if (1.0 * numerator / denominator < 0)
sb.append('-');
sb.append(num / den);
num %= den;
if (num == 0)
return sb.toString();
sb.append('.');
int i = sb.length();
Map<Long, Integer> map = new HashMap<Long, Integer>();
while (num > 0) {
if (map.containsKey(num)) {
int t = map.get(num);
sb.insert(t, '(');
sb.append(')');
return sb.toString();
}
map.put(num, i++);
num *= 10;
sb.append(num / den);
num %= den;
}
return sb.toString();
}
public static void main(String[] args) {
FractionToRecurringDecimal fd = new FractionToRecurringDecimal();
int num = -1;
int de = -2147483648;
System.out.println(fd.fractionToDecimal(num, de));
}
}
|
<commit_msg>Add some output on startup
<commit_before>package main
import (
"github.com/HearthSim/stove/bnet"
"github.com/HearthSim/stove/pegasus"
)
func main() {
serv := bnet.NewServer()
serv.RegisterGameServer("WTCG", pegasus.NewServer(serv))
serv.ListenAndServe("localhost:1119")
}
<commit_after>package main
import (
"fmt"
"github.com/HearthSim/stove/bnet"
"github.com/HearthSim/stove/pegasus"
)
const (
CONN_HOST = "localhost"
CONN_PORT = 1119
)
func main() {
serv := bnet.NewServer()
serv.RegisterGameServer("WTCG", pegasus.NewServer(serv))
addr := fmt.Sprintf("%s:%d", CONN_HOST, CONN_PORT)
fmt.Printf("Listening on %s ...\n", addr)
serv.ListenAndServe(addr)
}
|
<commit_msg>Update to the serialization module - only construct object mapper once.
<commit_before>package org.rcsb.mmtf.serialization;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.msgpack.jackson.dataformat.MessagePackFactory;
import org.rcsb.mmtf.dataholders.MmtfStructure;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* A message pack implementation of the {@link MmtfStructure} serializer / deserializer.
* @author Anthony Bradley
*
*/
public class MessagePackSerialization implements MmtfStructureSerializationInterface {
@Override
public MmtfStructure deserialize(InputStream inputStream){
MmtfStructure mmtfBean = null;
try {
mmtfBean = new ObjectMapper(new MessagePackFactory()).readValue(inputStream, MmtfStructure.class);
} catch (IOException e) {
e.printStackTrace();
}
return mmtfBean;
}
@Override
public void serialize(MmtfStructure mmtfStructure, OutputStream outputStream) {
ObjectMapper objectMapper = new ObjectMapper(new MessagePackFactory());
objectMapper.setSerializationInclusion(Include.NON_NULL);
try {
objectMapper.writeValue(outputStream, mmtfStructure);
} catch (IOException e) {
e.printStackTrace();
}
}
}
<commit_after>package org.rcsb.mmtf.serialization;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.msgpack.jackson.dataformat.MessagePackFactory;
import org.rcsb.mmtf.dataholders.MmtfStructure;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* A message pack implementation of the {@link MmtfStructure} serializer / deserializer.
* @author Anthony Bradley
*
*/
public class MessagePackSerialization implements MmtfStructureSerializationInterface {
private ObjectMapper objectMapper;
/**
* Constructor for the {@link MessagePackSerialization} class.
* Generates {@link ObjectMapper} and sets to include non-null.
*/
public MessagePackSerialization() {
objectMapper = new ObjectMapper(new MessagePackFactory());
objectMapper.setSerializationInclusion(Include.NON_NULL);
}
@Override
public MmtfStructure deserialize(InputStream inputStream){
MmtfStructure mmtfBean = null;
try {
mmtfBean = objectMapper.readValue(inputStream, MmtfStructure.class);
} catch (IOException e) {
e.printStackTrace();
}
return mmtfBean;
}
@Override
public void serialize(MmtfStructure mmtfStructure, OutputStream outputStream) {
try {
objectMapper.writeValue(outputStream, mmtfStructure);
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
<commit_msg>Remove db if it already exists
<commit_before>import sqlite3
import os
import pandas as pd
TABLES = [['Natures', 'nature'],
['Experience'],
]
PATH = os.path.dirname(__file__)+"/"
CONNECTION = sqlite3.connect(PATH + 'serpyrior.db')
# insert a little jimmy drop tables here
for table in TABLES:
table_name = table[0]
print(table_name)
try:
table_index = table[1]
write_index = False
except IndexError:
table_index = None
write_index = True
df = pd.read_csv(PATH + table_name + '.csv')
df.to_sql(table_name, CONNECTION, index=write_index, index_label=table_index)
CONNECTION.commit()
CONNECTION.close()
# cur = conn.cursor()
# cur.execute("CREATE TABLE IF NOT EXISTS natures()")
# filename.encode('utf-8')
# with open(filename) as f:
# reader = csv.reader(f)
# for field in reader:
# cur.execute("INSERT INTO natures VALUES (?,?,?,?,?,?,?);", field)
#
# conn.commit()
#
# df = pd.read_sql_query("SELECT * FROM natures", conn, index_col='nature')
#
# print(df.head(25))
# conn.close()
<commit_after>import sqlite3
import os
import pandas as pd
TABLES = [['Natures', 'nature'],
['Experience'],
]
PATH = os.path.dirname(__file__)+"/"
try: # Little Bobby Tables
os.remove(PATH + 'serpyrior.db')
except FileNotFoundError:
pass
CONNECTION = sqlite3.connect(PATH + 'serpyrior.db')
for table in TABLES:
table_name = table[0]
print(table_name)
try:
table_index = table[1]
write_index = False
except IndexError:
table_index = None
write_index = True
df = pd.read_csv(PATH + table_name + '.csv')
df.to_sql(table_name, CONNECTION, index=write_index, index_label=table_index)
CONNECTION.commit()
CONNECTION.close()
|
<commit_msg>Handle updated boto exception format.
See https://github.com/boto/boto/issues/625
<commit_before>import json
class Error(Exception):
"""This is an ambiguous error that occured."""
pass
class SyncUnallowedError(Error): pass
class DuplicateBatchItemError(Error): pass
class IncompleteSolventError(Error): pass
class ExceededBatchRequestsError(Error): pass
class ItemNotFoundError(Error): pass
class DynamoDBError(Error): pass
class ProvisionedThroughputError(DynamoDBError): pass
class UnprocessedItemError(DynamoDBError): pass
def parse_error(raw_error):
"""Parse the error we get out of Boto into something we can code around"""
if isinstance(raw_error, Error):
return raw_error
error_data = json.loads(raw_error.data)
if 'ProvisionedThroughputExceededException' in error_data['__type']:
return ProvisionedThroughputError(error_data['message'])
else:
return DynamoDBError(error_data['message'], error_data['__type'])
__all__ = ["Error", "SyncUnallowedError", "DuplicateBatchItemError", "DynamoDBError", "ProvisionedThroughputError", "ItemNotFoundError"]
<commit_after>import json
class Error(Exception):
"""This is an ambiguous error that occured."""
pass
class SyncUnallowedError(Error): pass
class DuplicateBatchItemError(Error): pass
class IncompleteSolventError(Error): pass
class ExceededBatchRequestsError(Error): pass
class ItemNotFoundError(Error): pass
class DynamoDBError(Error): pass
class ProvisionedThroughputError(DynamoDBError): pass
class UnprocessedItemError(DynamoDBError): pass
def parse_error(raw_error):
"""Parse the error we get out of Boto into something we can code around"""
if isinstance(raw_error, Error):
return raw_error
if 'ProvisionedThroughputExceededException' in raw_error.error_code:
return ProvisionedThroughputError(raw_error.error_message)
else:
return DynamoDBError(raw_error.error_message, raw_error.error_code)
__all__ = ["Error", "SyncUnallowedError", "DuplicateBatchItemError", "DynamoDBError", "ProvisionedThroughputError", "ItemNotFoundError"]
|
<commit_msg>Set Textarea width and height
<commit_before>from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea)
<commit_after>from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
<commit_msg>Make sure CGO_ENABLED is 0
Signed-off-by: Ronald G. Minnich <f569d0af9a3481c4007b95c7180d6c41009f2513@gmail.com>
<commit_before>package main
import (
"log"
"os"
"os/exec"
"path"
)
func buildinit() {
e := os.Environ()
for i := range e {
if e[i][0:6] == "GOPATH" {
e[i] = e[i] + ":" + path.Join(config.Uroot, "src/bb/bbsh")
}
}
cmd := exec.Command("go", "build", "-o", "init", ".")
cmd.Stderr = os.Stderr
cmd.Stdout = os.Stdout
cmd.Dir = path.Join(config.Uroot, "src/bb/bbsh")
cmd.Env = e
err := cmd.Run()
if err != nil {
log.Fatalf("%v\n", err)
os.Exit(1)
}
}
<commit_after>package main
import (
"log"
"os"
"os/exec"
"path"
)
func buildinit() {
e := os.Environ()
for i := range e {
if e[i][0:6] == "GOPATH" {
e[i] = e[i] + ":" + path.Join(config.Uroot, "src/bb/bbsh")
}
}
e = append(e, "CGO_ENABLED=0")
cmd := exec.Command("go", "build", "-o", "init", ".")
cmd.Stderr = os.Stderr
cmd.Stdout = os.Stdout
cmd.Dir = path.Join(config.Uroot, "src/bb/bbsh")
cmd.Env = e
err := cmd.Run()
if err != nil {
log.Fatalf("%v\n", err)
os.Exit(1)
}
}
|
<commit_msg>Add a test for block bucket
<commit_before>module Network.BitTorrent.Exchange.BlockSpec (spec) where
import Control.Applicative
import Test.Hspec
import Test.QuickCheck
import Network.BitTorrent.Exchange.Block
instance Arbitrary a => Arbitrary (Block a) where
arbitrary = Block <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary BlockIx where
arbitrary = BlockIx <$> arbitrary <*> arbitrary <*> arbitrary
spec :: Spec
spec = return ()<commit_after>module Network.BitTorrent.Exchange.BlockSpec (spec) where
import Control.Applicative
import Test.Hspec
import Test.QuickCheck
import Network.BitTorrent.Exchange.Block
instance Arbitrary a => Arbitrary (Block a) where
arbitrary = Block <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary BlockIx where
arbitrary = BlockIx <$> arbitrary <*> arbitrary <*> arbitrary
spec :: Spec
spec = do
describe "bucket" $ do
it "render to piece when it is full" $ property $ \ bkt ->
if full bkt then isJust (toPiece bkt) |
<commit_msg>Remove spaces around `=` operator
<commit_before>from setuptools import setup, find_packages
setup(
name = 'AtomSeeker',
version = '0.0.1',
description = 'Analyzer for MP4/MOV format file',
packages = find_packages(),
author = 'Katsuki Kobayashi',
author_email = 'rare@tirasweel.org',
license = 'BSD 2-Clause License',
entry_points = """
[console_scripts]
atomseek = atomseeker.cmdline:main
""",
)
<commit_after>from setuptools import setup, find_packages
setup(
name='AtomSeeker',
version='0.0.1',
description='Analyzer for MP4/MOV format file',
packages=find_packages(),
author='Katsuki Kobayashi',
author_email='rare@tirasweel.org',
license='BSD 2-Clause License',
entry_points="""
[console_scripts]
atomseek = atomseeker.cmdline:main
""",
)
|
<commit_msg>Make KeychainItem _decode method static
<commit_before>from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
def _decrypt(self, data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
<commit_after>from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
@staticmethod
def _decrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
|
<commit_msg>Add exemplar generators for HashDigest, FakerGenerator
<commit_before>from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS<commit_after>from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=8),
FakerGenerator(method="name"),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS |
<commit_msg>model_row: Handle methods and non-field attributes
<commit_before>from django import template
from django.db import models
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter
def model_row(instance, fields):
for name in fields.split(','):
f = instance._meta.get_field(name)
if isinstance(f, models.ForeignKey):
fk = getattr(instance, f.name)
if hasattr(fk, 'get_absolute_url'):
value = mark_safe(u'<a href="%s">%s</a>' % (
fk.get_absolute_url(),
fk))
else:
value = unicode(fk)
elif f.choices:
value = getattr(instance, 'get_%s_display' % f.name)()
else:
value = unicode(getattr(instance, f.name))
yield (f.verbose_name, value)
<commit_after>from django import template
from django.db import models
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter
def model_row(instance, fields):
for name in fields.split(','):
try:
f = instance._meta.get_field(name)
except models.FieldDoesNotExist:
attr = getattr(instance, name)
if hasattr(attr, '__call__'):
yield (name, attr())
yield (name, attr)
continue
if isinstance(f, models.ForeignKey):
fk = getattr(instance, f.name)
if hasattr(fk, 'get_absolute_url'):
value = mark_safe(u'<a href="%s">%s</a>' % (
fk.get_absolute_url(),
fk))
else:
value = unicode(fk)
elif f.choices:
value = getattr(instance, 'get_%s_display' % f.name)()
else:
value = unicode(getattr(instance, f.name))
yield (f.verbose_name, value)
|
<commit_msg>Remove dirty lies from doctstring
This was a leftover from wherever I originally copied this config from.
<commit_before>
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
"""Run test cases in the tests/ directory."""
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
<commit_after>
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
|
<commit_msg>Add a few more tests for variety
<commit_before>from nose.tools import ok_, eq_
from pennathletics.athletes import get_roster, get_player
class TestAthletics():
def test_roster(self):
ok_(get_roster("m-baskbl", 2015) != [])
def test_player_empty(self):
ok_(get_player("m-baskbl", 2014) != [])
def test_player_number(self):
eq_(get_player("m-baskbl", 2013, jersey=1)[0].height, "6'2\"")
<commit_after>from nose.tools import ok_, eq_
from pennathletics.athletes import get_roster, get_player
class TestAthletics():
def test_roster(self):
ok_(get_roster("m-baskbl", 2015) != [])
def test_player_empty(self):
ok_(get_player("m-baskbl", 2014) != [])
def test_player_number(self):
eq_(get_player("m-baskbl", 2013, jersey=1)[0].height, "6'2\"")
def test_player_hometown(self):
player = get_player("m-baskbl", 2012, homeTown="Belfast, Ireland")[0]
eq_(player.weight, '210 lbs')
def test_player_softball(self):
# 19 players on the 2013 softball team
eq_(len(get_roster("w-softbl", 2013)), 19)
|
<commit_msg>Support enum based properties type.
<commit_before>package com.tinkerpop.frames.annotations;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Edge;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.frames.ClassUtilities;
import com.tinkerpop.frames.FramedGraph;
import com.tinkerpop.frames.Property;
import java.lang.reflect.Method;
public class PropertyAnnotationHandler implements AnnotationHandler<Property> {
@Override
public Class<Property> getAnnotationType() {
return Property.class;
}
@Override
public Object processElement(final Property annotation, final Method method, final Object[] arguments, final FramedGraph framedGraph, final Element element, final Direction direction) {
if (ClassUtilities.isGetMethod(method)) {
return element.getProperty(annotation.value());
} else if (ClassUtilities.isSetMethod(method)) {
Object value = arguments[0];
if (null == value) {
element.removeProperty(annotation.value());
} else {
element.setProperty(annotation.value(), value);
}
return null;
} else if (ClassUtilities.isRemoveMethod(method)) {
element.removeProperty(annotation.value());
return null;
}
return null;
}
}
<commit_after>package com.tinkerpop.frames.annotations;
import java.lang.reflect.Method;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Element;
import com.tinkerpop.frames.ClassUtilities;
import com.tinkerpop.frames.FramedGraph;
import com.tinkerpop.frames.Property;
public class PropertyAnnotationHandler implements AnnotationHandler<Property> {
@Override
public Class<Property> getAnnotationType() {
return Property.class;
}
@Override
public Object processElement(final Property annotation, final Method method, final Object[] arguments, final FramedGraph framedGraph, final Element element, final Direction direction) {
if (ClassUtilities.isGetMethod(method)) {
Object value = element.getProperty(annotation.value());
if (method.getReturnType().isEnum())
return getValueAsEnum(method, value);
else
return value;
} else if (ClassUtilities.isSetMethod(method)) {
Object value = arguments[0];
if (null == value) {
element.removeProperty(annotation.value());
} else {
if (value.getClass().isEnum()) {
element.setProperty(annotation.value(), ((Enum<?>) value).name());
} else {
element.setProperty(annotation.value(), value);
}
}
return null;
} else if (ClassUtilities.isRemoveMethod(method)) {
element.removeProperty(annotation.value());
return null;
}
return null;
}
private Enum getValueAsEnum(final Method method, final Object value) {
Class<Enum> en = (Class<Enum>) method.getReturnType();
return Enum.valueOf(en, value.toString());
}
}
|
<commit_msg>[python] Improve tests relevant to None comparison
<commit_before>import types
def test_none_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def test_none_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_none_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
assert thing == None # dependes on __eq__
assert thing != None
<commit_after>import types
def test_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def __ne__(self, other): # requried for py2
return not self.__eq__(other)
def test_comparison__use_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_comparison__donot_use_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
# weird? the result dependes on thing.__eq__()
assert thing == None
assert not (thing != None)
|
<commit_msg>Make sure the param passed is correct
<commit_before>package ActiveObject
import (
"sync"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestActiveObject(t *testing.T) {
var activeObject IActiveObject
var wait sync.WaitGroup
wait.Add(1)
activeObject = NewActiveObjectWithInterval(time.Millisecond * 50)
counter := 0
activeObject.SetWorkerFunction(func(param interface{}) {
counter++
if counter > 3 {
wait.Done()
}
})
activeObject.Run(10)
wait.Wait()
activeObject.ForceStop()
time.Sleep(time.Millisecond * 1000)
assert.Equal(t, counter, 4, "counter is wrong")
}
<commit_after>package ActiveObject
import (
"sync"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestActiveObject(t *testing.T) {
var activeObject IActiveObject
var wait sync.WaitGroup
wait.Add(1)
activeObject = NewActiveObjectWithInterval(time.Millisecond * 50)
counter := 0
activeObject.SetWorkerFunction(func(param interface{}) {
assert.Equal(t, param, 20, "param is incorrect")
counter++
if counter > 3 {
wait.Done()
}
})
activeObject.Run(10)
wait.Wait()
activeObject.ForceStop()
time.Sleep(time.Millisecond * 1000)
assert.Equal(t, counter, 4, "counter is wrong")
}
|
<commit_msg>Add minor fixes to movie lister miniapp
<commit_before>
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
models_factory = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=models_factory.delegate())
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
<commit_after>
import movies.finders
import movies.listers
import movies.models
import dependency_injector.containers as containers
import dependency_injector.providers as providers
class MoviesModule(containers.DeclarativeContainer):
"""IoC container of movies module component providers."""
movie = providers.Factory(movies.models.Movie)
finder = providers.AbstractFactory(movies.finders.MovieFinder,
movie_model=movie.provider)
lister = providers.Factory(movies.listers.MovieLister,
movie_finder=finder)
|
<commit_msg>BLD: Fix check for existing pools.
<commit_before>import os
import photomosaic.flickr
import photomosaic as pm
if not os.path.isfile('~/pools/cats/pool.json'):
FLICKR_API_KEY = os.environ['FLICKR_API_KEY']
pm.set_options(flickr_api_key=FLICKR_API_KEY)
photomosaic.flickr.from_search('cats', '~/pools/cats/')
pool = pm.make_pool('~/pools/cats/*.jpg')
pm.export_pool(pool, '~/pools/cats/pool.json') # save color analysis for future reuse
<commit_after>import os
import photomosaic.flickr
import photomosaic as pm
if not os.path.isfile(os.path.expanduser('~/pools/cats/pool.json')):
FLICKR_API_KEY = os.environ['FLICKR_API_KEY']
pm.set_options(flickr_api_key=FLICKR_API_KEY)
photomosaic.flickr.from_search('cats', '~/pools/cats/')
pool = pm.make_pool('~/pools/cats/*.jpg')
pm.export_pool(pool, '~/pools/cats/pool.json') # save color analysis for future reuse
|
<commit_msg>Add more complete crypto tests
<commit_before>package crypt
import (
"io/ioutil"
"os"
"testing"
)
const (
passphrase = "testingpassphrase"
filecontents = "woweezowee"
filetestpath = "/tmp/encryptdecrypttest"
)
func TestEncryptDecrypt(t *testing.T) {
//Write a temp file and make sure we get back what we expect
if err := ioutil.WriteFile(filetestpath, []byte(filecontents), os.FileMode(0644)); err != nil {
t.Errorf("Error writing test file to %s: %v", filetestpath, err)
}
EncryptFile(filetestpath, passphrase)
CheckEncryption(filetestpath)
DecryptFile(filetestpath, passphrase)
data, err := ioutil.ReadFile(filetestpath)
if err != nil {
t.Errorf("[ERR] Unable to read testfile: %v", err)
}
if string(data) != filecontents {
t.Errorf("Encrypt Decrypt returned bad results!\n Expected: %v \n Got: %v", filecontents, data)
}
}
<commit_after>package crypt
import (
"io/ioutil"
"os"
"testing"
)
const (
passphrase = "testingpassphrase"
filecontents = "woweezowee"
filetestpath = "/tmp/encryptdecrypttest"
)
func TestEncryptDecrypt(t *testing.T) {
var isencrypted bool
var err error
//Write a temp file and make sure we get back what we expect
if err := ioutil.WriteFile(filetestpath, []byte(filecontents), os.FileMode(0644)); err != nil {
t.Errorf("Error writing test file to %s: %v", filetestpath, err)
}
// at this point its just a regular file and it should not be encrypted
isencrypted, err = CheckEncryption(filetestpath)
if isencrypted == true {
t.Error("File detected as encrypted before it was encrypted!")
}
// encrypt the file in place
EncryptFile(filetestpath, passphrase)
// now the file should be encrypted
isencrypted, err = CheckEncryption(filetestpath)
if isencrypted == false {
t.Error("File detected as not encrypted right after it was encrypted!")
}
// decrypt the file
DecryptFile(filetestpath, passphrase)
// read it back
data, err := ioutil.ReadFile(filetestpath)
if err != nil {
t.Errorf("[ERR] Unable to read testfile: %v", err)
}
// the source and the data we read from the file should again match as strings
if string(data) != filecontents {
t.Errorf("Encrypt Decrypt returned bad results!\n Expected: %v \n Got: %v", filecontents, data)
}
}
|
<commit_msg>Replace re_path with path wherever possible
<commit_before>from django.urls import re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
re_path(r'^$', vocabulary_list, name="vocabulary_list"),
re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
re_path(r'^about/', about, name="about"),
re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
name="vocabulary_file"),
]
<commit_after>from django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
path('', vocabulary_list, name="vocabulary_list"),
path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
path('about/', about, name="about"),
path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
name="vocabulary_file"),
path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
|
<commit_msg>Clean up the docs to remove implementation details.
<commit_before>package butterknife.internal;
import android.view.View;
/**
* A {@link View.OnClickListener} that enables debouncing of multiple clicks posted in a row.
*
* Once a click is fired, a post is enqueued to the main thread looper queue and no further click
* is allowed until that post is dequeued.
*
* A click on one button disables all buttons.
*
*/
public abstract class DebouncingOnClickListener implements View.OnClickListener {
/**
* This is static because we want to disable clicks for all click listeners.
*/
private static boolean enabled = true;
private static final Runnable ENABLE_AGAIN = new Runnable() {
@Override public void run() {
enabled = true;
}
};
@Override public final void onClick(View v) {
if (enabled) {
enabled = false;
v.post(ENABLE_AGAIN);
doClick(v);
}
}
public abstract void doClick(View v);
}
<commit_after>package butterknife.internal;
import android.view.View;
/**
* A {@linkplain View.OnClickListener click listener} that debounces multiple clicks posted in the
* same frame. A click on one button disables all buttons for that frame.
*/
public abstract class DebouncingOnClickListener implements View.OnClickListener {
private static boolean enabled = true;
private static final Runnable ENABLE_AGAIN = new Runnable() {
@Override public void run() {
enabled = true;
}
};
@Override public final void onClick(View v) {
if (enabled) {
enabled = false;
v.post(ENABLE_AGAIN);
doClick(v);
}
}
public abstract void doClick(View v);
}
|
<commit_msg>Remove unused text and circle
<commit_before>import * as React from 'react'
import { Component } from 'react'
import { StyleSheet } from 'react-native'
import { Text } from 'react-native'
import { View } from 'react-native'
import { Card } from './Card'
import { Circle } from './Circle'
import { Draggable } from './Draggable'
import { Suit } from './Suit'
export default class App extends Component<{}, void> {
private styles = StyleSheet.create({
container: {
alignItems: 'center',
backgroundColor: '#3b3',
flex: 1,
justifyContent: 'center'
}
})
public render() {
return (
<View style={this.styles.container}>
<Text>Open up App.js to start working on your app!</Text>
<Text>2 + 2 = {this.sum(2, 2)}</Text>
<Circle
color={'#ffb'}
size={40}
/>
<Draggable
startPosition={
{
left: 50,
top: 80
}
}
>
<Card
suit={Suit.Spades}
value={10}
/>
</Draggable>
</View>
)
}
private sum(a: number, b: number) {
return a + b
}
}<commit_after>import * as React from 'react'
import { Component } from 'react'
import { StyleSheet } from 'react-native'
import { Text } from 'react-native'
import { View } from 'react-native'
import { Card } from './Card'
import { Draggable } from './Draggable'
import { Suit } from './Suit'
export default class App extends Component<{}, void> {
private styles = StyleSheet.create({
container: {
alignItems: 'center',
backgroundColor: '#3b3',
flex: 1,
justifyContent: 'center'
}
})
public render() {
return (
<View style={this.styles.container}>
<Text>Open up App.js to start working on your app!</Text>
<Draggable
startPosition={
{
left: 50,
top: 80
}
}
>
<Card
suit={Suit.Spades}
value={10}
/>
</Draggable>
</View>
)
}
} |
<commit_msg>Handle only clinical, no genomic, submission
<commit_before>import json
import requests
import argparse
parser = argparse.ArgumentParser(
description="Upload submission from submit.cancergenetrust.org")
parser.add_argument('file', nargs='?', default="submission.json",
help="Path to json file to submit")
args = parser.parse_args()
with open(args.file) as f:
submission = json.loads(f.read())
submission["clinical"]["CGT Public ID"] = submission["patientId"]
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
files=[("files[]",
("foundationone.json",
json.dumps(submission["genomic"], sort_keys=True)))],
data=submission["clinical"])
print(r.text)
assert(r.status_code == requests.codes.ok)
<commit_after>import json
import requests
import argparse
parser = argparse.ArgumentParser(
description="Upload submission from submit.cancergenetrust.org")
parser.add_argument('file', nargs='?', default="submission.json",
help="Path to json file to submit")
args = parser.parse_args()
with open(args.file) as f:
submission = json.loads(f.read())
submission["clinical"]["CGT Public ID"] = submission["patientId"]
if submission["genomic"]:
print("Submitting clinical and genomic data")
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
files=[("files[]",
("foundationone.json",
json.dumps(submission["genomic"], sort_keys=True)))],
data=submission["clinical"])
else:
print("No genomic data, submitting only clinical")
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
data=submission["clinical"])
print(r.text)
assert(r.status_code == requests.codes.ok)
|
<commit_msg>Fix issue with startup of admin-web
<commit_before>package io.fundrequest.platform.admin;
import io.fundrequest.common.infrastructure.IgnoreDuringComponentScan;
import io.fundrequest.core.FundRequestCore;
import io.fundrequest.platform.github.FundRequestGithub;
import io.fundrequest.platform.keycloak.FundRequestKeycloak;
import io.fundrequest.platform.profile.ProfileApplication;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.autoconfigure.AutoConfigurationExcludeFilter;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.context.TypeExcludeFilter;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
@SpringBootConfiguration
@EnableAutoConfiguration
@ComponentScan(
basePackageClasses = {
AdminApplication.class,
FundRequestKeycloak.class,
FundRequestGithub.class,
FundRequestCore.class,
ProfileApplication.class,
},
excludeFilters = {
@ComponentScan.Filter(type = FilterType.CUSTOM, classes = TypeExcludeFilter.class),
@ComponentScan.Filter(type = FilterType.CUSTOM, classes = AutoConfigurationExcludeFilter.class),
@ComponentScan.Filter(IgnoreDuringComponentScan.class)})
public class AdminApplication {
public static void main(String[] args) {
SpringApplication.run(AdminApplication.class, args);
}
}
<commit_after>package io.fundrequest.platform.admin;
import io.fundrequest.common.FundRequestCommon;
import io.fundrequest.common.infrastructure.IgnoreDuringComponentScan;
import io.fundrequest.core.FundRequestCore;
import io.fundrequest.platform.github.FundRequestGithub;
import io.fundrequest.platform.keycloak.FundRequestKeycloak;
import io.fundrequest.platform.profile.ProfileApplication;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.autoconfigure.AutoConfigurationExcludeFilter;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.context.TypeExcludeFilter;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
@SpringBootConfiguration
@EnableAutoConfiguration
@ComponentScan(
basePackageClasses = {
AdminApplication.class,
FundRequestKeycloak.class,
FundRequestGithub.class,
FundRequestCommon.class,
FundRequestCore.class,
ProfileApplication.class,
},
excludeFilters = {
@ComponentScan.Filter(type = FilterType.CUSTOM, classes = TypeExcludeFilter.class),
@ComponentScan.Filter(type = FilterType.CUSTOM, classes = AutoConfigurationExcludeFilter.class),
@ComponentScan.Filter(IgnoreDuringComponentScan.class)})
public class AdminApplication {
public static void main(String[] args) {
SpringApplication.run(AdminApplication.class, args);
}
}
|
<commit_msg>Add Daily forecast class initalization and parsing
<commit_before>package com.nikolak.weatherapp.ForecastIO;
import android.util.Log;
import org.json.JSONException;
import org.json.JSONObject;
public class Forecast {
public Currently currently = new Currently();
public Minutely minutely = new Minutely();
public Hourly hourly = new Hourly();
public Daily daily = new Daily();
private ForecastAPI forecastAPI = new ForecastAPI();
public Boolean updateForecast(String lat, String lon) throws JSONException {
JSONObject response = forecastAPI.getDefault(lat, lon);
if (response==null) {
return false;
}
JSONObject currentlyJObject = response.getJSONObject("currently");
this.currently.ConstructFromJson(currentlyJObject);
try{
JSONObject minutelyJObject = response.getJSONObject("minutely");
if (minutelyJObject!=null){
minutely.constructFromJson(minutelyJObject);
} else{
minutely = null;
}
} catch (JSONException e){
Log.d("Json", "Minutely not available");
}
JSONObject hourlyJObject = response.getJSONObject("hourly");
this.hourly.constructFromJson(hourlyJObject);
return true;
}
}
<commit_after>package com.nikolak.weatherapp.ForecastIO;
import android.util.Log;
import org.json.JSONException;
import org.json.JSONObject;
public class Forecast {
public Currently currently = new Currently();
public Minutely minutely = new Minutely();
public Hourly hourly = new Hourly();
public Daily daily = new Daily();
private ForecastAPI forecastAPI = new ForecastAPI();
public Boolean updateForecast(String lat, String lon) throws JSONException {
JSONObject response = forecastAPI.getDefault(lat, lon);
if (response == null) {
return false;
}
JSONObject currentlyJObject = response.getJSONObject("currently");
this.currently.ConstructFromJson(currentlyJObject);
try {
JSONObject minutelyJObject = response.getJSONObject("minutely");
if (minutelyJObject != null) {
minutely.constructFromJson(minutelyJObject);
} else {
minutely = null;
}
} catch (JSONException e) {
Log.d("Json", "Minutely not available");
}
JSONObject hourlyJObject = response.getJSONObject("hourly");
this.hourly.constructFromJson(hourlyJObject);
JSONObject dailyObject = response.getJSONObject("daily");
this.daily.constructFromJson(dailyObject);
return true;
}
}
|
<commit_msg>Fix up grep to use a correct module scope
<commit_before>{-# LANGUAGE RecordWildCards #-}
module Grep(runGrep) where
import Language.Haskell.HLint2
import HSE.All
import Control.Monad
runGrep :: String -> Bool -> ParseFlags -> [FilePath] -> IO ()
runGrep pattern exact flags files = do
let exp = fromParseResult $ parseExp pattern
forM_ files $ \file -> do
Right m <- parseModuleEx flags file Nothing
let rule = hintRules [HintRule Warning "grep" (scopeCreate m) exp exp Nothing []]
forM_ (applyHints [] rule [m]) $ \Idea{..} -> do
putStr $ unlines $ showSrcLoc (getPointLoc ideaSpan) : map (" "++) (lines ideaFrom)
<commit_after>{-# LANGUAGE RecordWildCards #-}
module Grep(runGrep) where
import Language.Haskell.HLint2
import HSE.All
import Control.Monad
runGrep :: String -> Bool -> ParseFlags -> [FilePath] -> IO ()
runGrep pattern exact flags files = do
let exp = fromParseResult $ parseExp pattern
let scope = scopeCreate $ Module an Nothing [] [] []
let rule = hintRules [HintRule Warning "grep" scope exp (Tuple an Boxed []) Nothing []]
forM_ files $ \file -> do
Right m <- parseModuleEx flags file Nothing
forM_ (applyHints [] rule [m]) $ \Idea{..} -> do
putStr $ unlines $ showSrcLoc (getPointLoc ideaSpan) : map (" "++) (lines ideaFrom)
|
<commit_msg>Add exitcode and segv check for timing util
<commit_before>
import os
import sys
import subprocess
def main():
count = int(sys.argv[1])
time_min = None
for i in xrange(count):
cmd = [
'time',
'-f', '%U',
'--quiet',
sys.argv[2], # cmd
sys.argv[3] # testcase
]
#print(repr(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
retval = p.wait()
#print(i, retval, stdout, stderr)
if retval != 0:
print 'n/a'
return
time = float(stderr)
#print(i, time)
if time_min is None:
time_min = time
else:
time_min = min(time_min, time)
# /usr/bin/time has only two digits of resolution
print('%.02f' % time_min)
if __name__ == '__main__':
main()
<commit_after>
import os
import sys
import subprocess
def main():
count = int(sys.argv[1])
time_min = None
for i in xrange(count):
cmd = [
'time',
'-f', '%U',
'--quiet',
sys.argv[2], # cmd
sys.argv[3] # testcase
]
#print(repr(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
retval = p.wait()
#print(i, retval, stdout, stderr)
if retval == 139:
print 'segv'
sys.exit(1)
elif retval != 0:
print 'n/a'
sys.exit(1)
time = float(stderr)
#print(i, time)
if time_min is None:
time_min = time
else:
time_min = min(time_min, time)
# /usr/bin/time has only two digits of resolution
print('%.02f' % time_min)
sys.exit(0)
if __name__ == '__main__':
main()
|
<commit_msg>Reduce float multiplication in strength
<commit_before>//=======================================================================
// Copyright Baptiste Wicht 2011.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//=======================================================================
#include "tac/ReduceInStrength.hpp"
#include "tac/OptimizerUtils.hpp"
using namespace eddic;
void tac::ReduceInStrength::operator()(std::shared_ptr<tac::Quadruple>& quadruple){
switch(quadruple->op){
case tac::Operator::MUL:
if(*quadruple->arg1 == 2){
replaceRight(*this, quadruple, *quadruple->arg2, tac::Operator::ADD, *quadruple->arg2);
} else if(*quadruple->arg2 == 2){
replaceRight(*this, quadruple, *quadruple->arg1, tac::Operator::ADD, *quadruple->arg1);
}
break;
default:
break;
}
}
<commit_after>//=======================================================================
// Copyright Baptiste Wicht 2011.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//=======================================================================
#include "tac/ReduceInStrength.hpp"
#include "tac/OptimizerUtils.hpp"
using namespace eddic;
void tac::ReduceInStrength::operator()(std::shared_ptr<tac::Quadruple>& quadruple){
switch(quadruple->op){
case tac::Operator::MUL:
if(*quadruple->arg1 == 2){
replaceRight(*this, quadruple, *quadruple->arg2, tac::Operator::ADD, *quadruple->arg2);
} else if(*quadruple->arg2 == 2){
replaceRight(*this, quadruple, *quadruple->arg1, tac::Operator::ADD, *quadruple->arg1);
}
break;
case tac::Operator::FMUL:
if(*quadruple->arg1 == 2.0){
replaceRight(*this, quadruple, *quadruple->arg2, tac::Operator::FADD, *quadruple->arg2);
} else if(*quadruple->arg2 == 2.0){
replaceRight(*this, quadruple, *quadruple->arg1, tac::Operator::FADD, *quadruple->arg1);
}
break;
default:
break;
}
}
|
<commit_msg>[WFLY-4853] Add in overrides for methods otherwise TransactionAttribute changes don't get applied.
<commit_before>package org.jboss.as.test.clustering.twoclusters.bean.forwarding;
import org.jboss.as.test.clustering.twoclusters.bean.stateful.RemoteStatefulSB;
import org.jboss.ejb3.annotation.Clustered;
import javax.ejb.Stateful;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
@Stateful
@Clustered
@TransactionAttribute(TransactionAttributeType.REQUIRED) // this is the default anyway
public class ForwardingStatefulSBImpl extends AbstractForwardingStatefulSBImpl implements RemoteStatefulSB {
}
<commit_after>package org.jboss.as.test.clustering.twoclusters.bean.forwarding;
import org.jboss.as.test.clustering.twoclusters.bean.stateful.RemoteStatefulSB;
import org.jboss.ejb3.annotation.Clustered;
import javax.ejb.Stateful;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
@Stateful
@Clustered
@TransactionAttribute(TransactionAttributeType.REQUIRED) // this is the default anyway
public class ForwardingStatefulSBImpl extends AbstractForwardingStatefulSBImpl implements RemoteStatefulSB {
// we need to override these methods so that the TransactionAttribute gets processed on this class!
@Override
public int getSerial()
{
return super.getSerial();
}
@Override
public int getSerialAndIncrement()
{
return super.getSerialAndIncrement();
}
@Override
public byte[] getCargo()
{
return super.getCargo();
}
}
|
<commit_msg>Update BrowserifyCompiler for n Pipeline settings.
<commit_before>import re
from django.conf import settings
from django.utils.encoding import smart_bytes
from pipeline.compilers import CompilerBase
from pipeline.exceptions import CompilerError
class BrowserifyCompiler(CompilerBase):
output_extension = 'browserified.js'
def match_file(self, path):
# Allow for cache busting hashes between ".browserify" and ".js"
return re.search(r'\.browserify(\.[a-fA-F0-9]+)?\.js$', path) is not None
def compile_file(self, infile, outfile, outdated=False, force=False):
command = "%s %s %s > %s" % (
getattr(settings, 'PIPELINE_BROWSERIFY_BINARY', '/usr/bin/env browserify'),
getattr(settings, 'PIPELINE_BROWSERIFY_ARGUMENTS', ''),
infile,
outfile
)
return self.execute_command(command)
def execute_command(self, command, content=None, cwd=None):
"""This is like the one in SubProcessCompiler, except it checks the exit code."""
import subprocess
pipe = subprocess.Popen(command, shell=True, cwd=cwd,
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
if content:
content = smart_bytes(content)
stdout, stderr = pipe.communicate(content)
if self.verbose:
print(stderr)
if pipe.returncode != 0:
raise CompilerError(stderr)
return stdout
<commit_after>import re
from django.conf import settings
from django.utils.encoding import smart_bytes
from pipeline.compilers import CompilerBase
from pipeline.exceptions import CompilerError
class BrowserifyCompiler(CompilerBase):
output_extension = 'browserified.js'
def match_file(self, path):
# Allow for cache busting hashes between ".browserify" and ".js"
return re.search(r'\.browserify(\.[a-fA-F0-9]+)?\.js$', path) is not None
def compile_file(self, infile, outfile, outdated=False, force=False):
pipeline_settings = getattr(settings, 'PIPELINE', {})
command = "%s %s %s > %s" % (
pipeline_settings.get('BROWSERIFY_BINARY', '/usr/bin/env browserify'),
pipeline_settings.get('BROWSERIFY_ARGUMENTS', ''),
infile,
outfile
)
return self.execute_command(command)
def execute_command(self, command, content=None, cwd=None):
"""This is like the one in SubProcessCompiler, except it checks the exit code."""
import subprocess
pipe = subprocess.Popen(command, shell=True, cwd=cwd,
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
if content:
content = smart_bytes(content)
stdout, stderr = pipe.communicate(content)
if self.verbose:
print(stderr)
if pipe.returncode != 0:
raise CompilerError(stderr)
return stdout
|
<commit_msg>Add test translate protobuf object to json directly
<commit_before>//#include "../pb2json.h"
#include <pb2json.h>
using namespace std;
int main(int argc,char *argv[])
{
// Test 1: read binary PB from a file and convert it to JSON
ifstream fin("dump",ios::binary);
fin.seekg(0,ios_base::end);
size_t len = fin.tellg();
fin.seekg(0,ios_base::beg);
char *buf = new char [len];
fin.read(buf,len);
google::protobuf::Message *p = new Person();
char *json = pb2json(p,buf,len);
cout<<json<<endl;
free(json);
delete p;
// Test 2: convert PB to JSON directly
Person p2;
char *json2 = pb2json(p2);
cout<<json2<<endl;
free(json2);
return 0;
}
<commit_after>//#include "../pb2json.h"
#include <pb2json.h>
using namespace std;
int main(int argc,char *argv[])
{
// Test 1: read binary PB from a file and convert it to JSON
ifstream fin("dump",ios::binary);
fin.seekg(0,ios_base::end);
size_t len = fin.tellg();
fin.seekg(0,ios_base::beg);
char *buf = new char [len];
fin.read(buf,len);
google::protobuf::Message *p = new Person();
char *json = pb2json(p,buf,len);
cout<<json<<endl;
free(json);
delete p;
// Test 2: convert PB to JSON directly
Person p2;
p2.set_name("Shafreeck Sea");
p2.set_id(2);
p2.set_email("renenglish@gmail.com");
Person_PhoneNumber *pn1 = p2.add_phone();
pn1->set_number("1234567");
pn1->set_type(Person::HOME);
char *json2 = pb2json(p2);
cout<<json2<<endl;
free(json2);
return 0;
}
|
<commit_msg>Add unit test for register
<commit_before>from django.core.urlresolvers import resolve
from xpserver_web.views import main
def test_root_resolves_to_hello_world():
found = resolve('/')
assert found.func == main
<commit_after>from django.core.urlresolvers import resolve
from xpserver_web.views import main, register
def test_root_resolves_to_main():
found = resolve('/')
assert found.func == main
def test_register_resolves_to_main():
found = resolve('/register/')
assert found.func == register
|
<commit_msg>Update URLs to Django 1.8 style
django.conf.urls.patterns() is deprecated since 1.8.
We should not use patterns(), so this patch updates URLs to
1.8 style.
Change-Id: I6f2b6f44d843ca5e0cdb5db9828df94fa4df5f88
Closes-Bug: #1539354
<commit_before>
from django.conf import urls
from zaqar_ui.content.queues import views
urlpatterns = urls.patterns(
'zaqar_ui.content.queues',
urls.url(r'^$', views.IndexView.as_view(), name='index'),
)
<commit_after>
from django.conf import urls
from zaqar_ui.content.queues import views
urlpatterns = [
urls.url(r'^$', views.IndexView.as_view(), name='index'),
]
|
<commit_msg>Add formula comment to hyperbolic NW class.
<commit_before>package cs437.som.neighborhood;
import cs437.som.NeightborhoodWidthFunction;
/**
* Hyperbolic neighborhood width strategy for self-organizing map.
*/
public class HyperbolicNeighborhoodWidthFunction implements NeightborhoodWidthFunction {
private double expectedIterations;
public void setExpectedIterations(int expectedIterations) {
this.expectedIterations = expectedIterations;
}
public double neighborhoodWidth(int iteration) {
return expectedIterations / (expectedIterations + iteration);
}
@Override
public String toString() {
return "HyperbolicNeighborhoodWidthFunction";
}
}
<commit_after>package cs437.som.neighborhood;
import cs437.som.NeightborhoodWidthFunction;
/**
* Hyperbolic neighborhood width strategy for self-organizing map.
*
* The exact behavior follows the formula:
* w_i / (t + t_max)
* where
* w_i is the initial width of the neighborhood
* t is the current iteration
* t_max is the maximum expected iteration
*/
public class HyperbolicNeighborhoodWidthFunction implements NeightborhoodWidthFunction {
private double expectedIterations = 0.0;
public void setExpectedIterations(int expectedIterations) {
this.expectedIterations = expectedIterations;
}
public double neighborhoodWidth(int iteration) {
return expectedIterations / (expectedIterations + iteration);
}
@Override
public String toString() {
return "HyperbolicNeighborhoodWidthFunction";
}
}
|
<commit_msg>Add get_data method to data importer
Signed-off-by: Matheus Fernandes <9de26933bf4170c6685be29c190c23bc8c34d55f@gmail.com>
<commit_before>from django.conf import settings
from urllib.parse import urljoin, urlencode
class ComputerDataImporter(object):
def __init__(self):
pass
def build_api_url(self, **kwargs):
api_url = urljoin(settings.LOMADEE_API_URL, settings.LOMADEE_APP_TOKEN)
# Specific path to 'Computer' category
url = urljoin(api_url, 'offer/_category/6424')
kwargs['sourceId'] = settings.LOMADEE_SOURCE_ID
return '{}?{}'.format(url, urlencode(kwargs))
<commit_after>from django.conf import settings
from urllib.parse import urljoin, urlencode
import requests
class ComputerDataImporter(object):
def __init__(self):
pass
def build_api_url(self, **kwargs):
api_url = urljoin(settings.LOMADEE_API_URL, settings.LOMADEE_APP_TOKEN)
# Specific path to 'Computer' category
url = urljoin('{}/'.format(api_url), 'offer/_category/6424')
kwargs['sourceId'] = settings.LOMADEE_SOURCE_ID
kwargs['size'] = 100
return '{}?{}'.format(url, urlencode(kwargs))
def get_data(self, url=None):
if not url:
url = self.build_api_url()
data = requests.get(url).json()
if data['requestInfo']['status'] != 'OK':
return False
final_data = []
final_data.extend(data['offers'])
pagination = data['pagination']
# Get only 3 pages. To get all pages use:
# if pagination['page'] < pagination['totalPage']
if pagination['page'] < 3:
next_page_data = self.get_data(
self.build_api_url(page=pagination['page'] + 1)
)
final_data.extend(next_page_data)
return final_data
|
<commit_msg>Fix migration when no existing team.user field at fresh start
<commit_before>from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
def generate_slugs(apps, schema_editor):
teams = Team.objects.all()
for team in teams:
team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
migrations.RunPython(generate_slugs),
]
<commit_after>from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
# def generate_slugs(apps, schema_editor):
# teams = Team.objects.all()
# for team in teams:
# team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
# migrations.RunPython(generate_slugs),
]
|
<commit_msg>Fix gettext wrong argument error in py34
Closes-Bug: #1550202
Change-Id: I468bef7a8c0a9fa93576744e7869dfa5f2569fa0
<commit_before>
import gettext
gettext.install('tacker', unicode=1)
<commit_after>
import gettext
import six
if six.PY2:
gettext.install('tacker', unicode=1)
else:
gettext.install('tacker')
|
<commit_msg>Allow to use not with general conditions
<commit_before>/*
* (C) YANDEX LLC, 2014-2015
*
* The Source Code called "YoctoDB" available at
* https://bitbucket.org/yandex/yoctodb is subject to the terms of the
* Mozilla Public License, v. 2.0 (hereinafter referred to as the "License").
*
* A copy of the License is also available at http://mozilla.org/MPL/2.0/.
*/
package com.yandex.yoctodb.query.simple;
import com.yandex.yoctodb.query.Condition;
import com.yandex.yoctodb.query.QueryContext;
import com.yandex.yoctodb.query.TermCondition;
import com.yandex.yoctodb.util.mutable.BitSet;
import net.jcip.annotations.Immutable;
import org.jetbrains.annotations.NotNull;
/**
* Condition negation
*
* @author incubos
*/
@Immutable
public final class SimpleNotCondition implements Condition {
@NotNull
private final TermCondition delegate;
public SimpleNotCondition(
@NotNull
final TermCondition delegate) {
this.delegate = delegate;
}
@Override
public boolean set(
@NotNull
final QueryContext ctx,
@NotNull
final BitSet to) {
delegate.set(ctx, to);
return to.inverse();
}
}
<commit_after>/*
* (C) YANDEX LLC, 2014-2015
*
* The Source Code called "YoctoDB" available at
* https://bitbucket.org/yandex/yoctodb is subject to the terms of the
* Mozilla Public License, v. 2.0 (hereinafter referred to as the "License").
*
* A copy of the License is also available at http://mozilla.org/MPL/2.0/.
*/
package com.yandex.yoctodb.query.simple;
import com.yandex.yoctodb.query.Condition;
import com.yandex.yoctodb.query.QueryContext;
import com.yandex.yoctodb.util.mutable.BitSet;
import net.jcip.annotations.Immutable;
import org.jetbrains.annotations.NotNull;
/**
* Condition negation
*
* @author incubos
*/
@Immutable
public final class SimpleNotCondition implements Condition {
@NotNull
private final Condition delegate;
public SimpleNotCondition(
@NotNull
final Condition delegate) {
this.delegate = delegate;
}
@Override
public boolean set(
@NotNull
final QueryContext ctx,
@NotNull
final BitSet to) {
delegate.set(ctx, to);
return to.inverse();
}
}
|
<commit_msg>Use app request context directly rather than hacking with a test client
<commit_before>import json
from flask import request as current_request, Response
from werkzeug.exceptions import BadRequest
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.client = app.test_client()
app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def post(self):
try:
requests = json.loads(current_request.data)
if not isinstance(requests, list):
raise TypeError
except (ValueError, TypeError):
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for request in requests:
yield data + ',' if data else '{'
data = '"{}": {}'.format(request, self.client.get(request).data)
yield data + '}'
return Response(__generate(), mimetype='application/json')
<commit_after>import json
from flask import request, Request, Response
from werkzeug.exceptions import BadRequest
from werkzeug.test import EnvironBuilder
class Aggregator(object):
def __init__(self, app=None, endpoint=None):
self.url_map = {}
self.endpoint = endpoint or "/aggregator"
if app:
self.init_app(app)
def init_app(self, app):
self.app = app
self.app.add_url_rule(self.endpoint, view_func=self.post, methods=["POST"])
def get_response(self, route):
query_string = ""
if '?' in route:
route, query_string = route.split('?', 1)
builder = EnvironBuilder(path=route, query_string=query_string)
self.app.request_context(builder.get_environ()).push()
return self.app.dispatch_request()
def post(self):
try:
data = request.data.decode('utf-8')
routes = json.loads(data)
if not isinstance(routes, list):
raise TypeError
except (ValueError, TypeError) as e:
raise BadRequest("Can't get requests list.")
def __generate():
data = None
for route in routes:
yield data + ', ' if data else '{'
response = self.get_response(route)
json_response = json.dumps(response)
data = '"{}": {}'.format(route, json_response)
yield data + '}'
return Response(__generate(), mimetype='application/json')
|