code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.github.mjreid.flinkwrapper
import play.api.libs.json.{JsPath, Reads}
import play.api.libs.functional.syntax._
case class NodeInput(
num: Long,
id: String,
shipStrategy: Option[String],
localStrategy: Option[String],
caching: Option[String],
exchange: String
)
object NodeInput {
implicit val reads: Reads[NodeInput] = (
(JsPath \\ "num").read[Long] and
(JsPath \\ "id").read[String] and
(JsPath \\ "ship_strategy").readNullable[String] and
(JsPath \\ "local_strategy").readNullable[String] and
(JsPath \\ "caching").readNullable[String] and
(JsPath \\ "exchange").read[String]
)(NodeInput.apply _)
}
| mjreid/flink-rest-scala-wrapper | api/src/main/scala/com/github/mjreid/flinkwrapper/NodeInput.scala | Scala | apache-2.0 | 661 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.memory.cqengine.datastore
import java.awt.RenderingHints.Key
import java.io.Serializable
import java.util
import org.geotools.data.DataAccessFactory.Param
import org.geotools.data.{DataStore, DataStoreFactorySpi}
class GeoCQEngineDataStoreFactory extends DataStoreFactorySpi {
override def createDataStore(params: util.Map[String, Serializable]): DataStore =
if (GeoCQEngineDataStoreFactory.getUseGeoIndex(params))
GeoCQEngineDataStore.engine
else
GeoCQEngineDataStore.engineNoGeoIndex
override def createNewDataStore(params: util.Map[String, Serializable]): DataStore = createDataStore(params)
override def getDisplayName: String = "GeoCQEngine DataStore"
override def getDescription: String = "GeoCQEngine DataStore"
override def canProcess(params: util.Map[String, Serializable]): Boolean = {
params.containsKey("cqengine")
}
override def isAvailable: Boolean = true
override def getParametersInfo: Array[Param] =
GeoCQEngineDataStoreFactory.params
override def getImplementationHints: util.Map[Key, _] = null
}
object GeoCQEngineDataStoreFactory {
val UseGeoIndexKey = "useGeoIndex"
val UseGeoIndexDefault = true
val UseGeoIndexParam = new Param(
UseGeoIndexKey,
classOf[java.lang.Boolean],
"Enable an index on the default geometry",
false,
UseGeoIndexDefault
)
val params = Array(
UseGeoIndexParam
)
def getUseGeoIndex(params: util.Map[String, Serializable]): Boolean = {
if (params.containsKey(UseGeoIndexKey))
UseGeoIndexParam.lookUp(params).asInstanceOf[Boolean]
else
UseGeoIndexDefault
}
}
| ddseapy/geomesa | geomesa-memory/geomesa-cqengine-datastore/src/main/scala/org/locationtech/geomesa/memory/cqengine/datastore/GeoCQEngineDataStoreFactory.scala | Scala | apache-2.0 | 2,115 |
package dailyprogrammer
import org.scalatest.{FreeSpec, Matchers}
class Challenge227Spec extends FreeSpec with Matchers {
"Sample 1" in {
val lines = Array(
"xxxxxxxx",
"x x"
)
Challenge227.solve(2, 8, lines) shouldBe 1
}
"Sample 2" in {
val lines = Array(
"xxxx xxxx",
" x ",
" xx "
)
Challenge227.solve(3, 9, lines) shouldBe 3
}
"Challenge 1" in {
val lines = Array(
"xxxx xxxx",
" xxx ",
"x x x",
"xxxxxxxxx"
)
Challenge227.solve(4, 9, lines) shouldBe 1
}
"Challenge 2" in {
val lines = Array(
"xx x xx x ",
"x x xx x ",
"xx xx x ",
"xxxxxxxxx x",
" xx",
"xxxxxxxxxxx",
" x x x x x ",
" x x x x "
)
Challenge227.solve(8, 11, lines) shouldBe 9
}
"Challenge 1000" in {
val stream = getClass.getResourceAsStream("/1000x1000.txt")
val lines = scala.io.Source.fromInputStream(stream).getLines().drop(1).toArray
val start = System.nanoTime()
Challenge227.solve(1000, 1000, lines) shouldBe 80020
val elapsed = System.nanoTime() - start
println(elapsed)
}
}
| alexandrnikitin/algorithm-sandbox | scala/src/test/scala/dailyprogrammer/Challenge227Spec.scala | Scala | mit | 1,191 |
package lower_tir
import org.apache.commons.lang._
object LowerString {
def apply(s: String) = {
val escapedControlCharacters = StringEscapeUtils.escapeJava(s)
val escapedDashes = escapedControlCharacters.replaceAll("'", "\\\\\\\\'")
"'" + escapedDashes + "'"
}
}
| j-c-w/mlc | src/main/scala/lower_tir/LowerString.scala | Scala | gpl-3.0 | 278 |
package scodec.protocols.mpeg
package transport
import scodec.Codec
import scodec.bits.BitVector
import scodec.codecs._
/**
* Partial modelling of the adaptation field.
* The field extension, if present, is ignored upon decoding.
*/
case class AdaptationField(
flags: AdaptationFieldFlags,
pcr: Option[BitVector],
opcr: Option[BitVector],
spliceCountdown: Option[Int],
transportPrivateData: Option[BitVector]
)
object AdaptationField {
private val transportPrivateData: Codec[BitVector] =
variableSizeBits(uint8, bits)
implicit val codec: Codec[AdaptationField] = "adaptation_field" | {
variableSizeBytes(uint8,
("adaptation_flags" | Codec[AdaptationFieldFlags] ) >>:~ { flags =>
("pcr" | conditional(flags.pcrFlag, bits(48)) ) ::
("opcr" | conditional(flags.opcrFlag, bits(48)) ) ::
("splice_countdown" | conditional(flags.splicingPointFlag, int8) ) ::
("transport_private_data" | conditional(flags.transportPrivateDataFlag, transportPrivateData))
})}.as[AdaptationField]
}
| jrudnick/scodec-protocols | src/main/scala/scodec/protocols/mpeg/transport/AdaptationField.scala | Scala | bsd-3-clause | 1,113 |
import stainless.lang._
case class Inner(var int: Int)
case class Outer(var inner: Inner)
trait FreshCopy {
val o: Outer = Outer(Inner(123))
def f() = {
require(o.inner.int == 123)
val fresh = freshCopy(o)
o.inner.int = 456
assert(fresh.inner.int == 123)
assert(o.inner.int == 456)
val fresh2 = freshCopy(o)
o.inner = Inner(789)
assert(fresh.inner.int == 123)
assert(fresh2.inner.int == 456)
assert(o.inner.int == 789)
}
}
| epfl-lara/stainless | frontends/benchmarks/imperative/valid/FreshCopy.scala | Scala | apache-2.0 | 475 |
package com.softwaremill.clippy
import org.scalatest.{FlatSpec, Matchers}
class CompilationErrorParserTest extends FlatSpec with Matchers {
it should "parse akka's route error message" in {
val e =
"""type mismatch;
| found : akka.http.scaladsl.server.StandardRoute
| required: akka.stream.scaladsl.Flow[akka.http.scaladsl.model.HttpRequest,akka.http.scaladsl.model.HttpResponse,Any]""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(
TypeMismatchError(
ExactT("akka.http.scaladsl.server.StandardRoute"),
None,
ExactT(
"akka.stream.scaladsl.Flow[akka.http.scaladsl.model.HttpRequest,akka.http.scaladsl.model.HttpResponse,Any]"
),
None,
None
)
)
)
}
it should "parse an error message with [error] prefix" in {
val e =
"""[error] /Users/adamw/projects/clippy/tests/src/main/scala/com/softwaremill/clippy/Working.scala:16: type mismatch;
|[error] found : akka.http.scaladsl.server.StandardRoute
|[error] required: akka.stream.scaladsl.Flow[akka.http.scaladsl.model.HttpRequest,akka.http.scaladsl.model.HttpResponse,Any]""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(
TypeMismatchError(
ExactT("akka.http.scaladsl.server.StandardRoute"),
None,
ExactT(
"akka.stream.scaladsl.Flow[akka.http.scaladsl.model.HttpRequest,akka.http.scaladsl.model.HttpResponse,Any]"
),
None,
None
)
)
)
}
it should "parse a type mismatch error with a single expands to section" in {
val e =
"""type mismatch;
|found : japgolly.scalajs.react.CompState.ReadCallbackWriteCallbackOps[com.softwaremill.clippy.Contribute.Step2.State]#This[com.softwaremill.clippy.FormField]
|required: japgolly.scalajs.react.CompState.AccessRD[?]
| (which expands to) japgolly.scalajs.react.CompState.ReadDirectWriteCallbackOps[?]""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(
TypeMismatchError(
ExactT(
"japgolly.scalajs.react.CompState.ReadCallbackWriteCallbackOps[com.softwaremill.clippy.Contribute.Step2.State]#This[com.softwaremill.clippy.FormField]"
),
None,
ExactT("japgolly.scalajs.react.CompState.AccessRD[?]"),
Some(ExactT("japgolly.scalajs.react.CompState.ReadDirectWriteCallbackOps[?]")),
None
)
)
)
}
it should "parse a type mismatch error with two expands to sections" in {
val e =
"""type mismatch;
|found : japgolly.scalajs.react.CompState.ReadCallbackWriteCallbackOps[com.softwaremill.clippy.Contribute.Step2.State]#This[com.softwaremill.clippy.FormField]
| (which expands to) japgolly.scalajs.react.CompState.ReadCallbackWriteCallbackOps[com.softwaremill.clippy.FormField]
|required: japgolly.scalajs.react.CompState.AccessRD[?]
| (which expands to) japgolly.scalajs.react.CompState.ReadDirectWriteCallbackOps[?]""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(
TypeMismatchError(
ExactT(
"japgolly.scalajs.react.CompState.ReadCallbackWriteCallbackOps[com.softwaremill.clippy.Contribute.Step2.State]#This[com.softwaremill.clippy.FormField]"
),
Some(
ExactT("japgolly.scalajs.react.CompState.ReadCallbackWriteCallbackOps[com.softwaremill.clippy.FormField]")
),
ExactT("japgolly.scalajs.react.CompState.AccessRD[?]"),
Some(ExactT("japgolly.scalajs.react.CompState.ReadDirectWriteCallbackOps[?]")),
None
)
)
)
}
it should "parse macwire's wire not found error message" in {
val e = "not found: value wire"
CompilationErrorParser.parse(e) should be(Some(NotFoundError(ExactT("value wire"))))
}
it should "parse not a member of message" in {
val e = "value call is not a member of scala.concurrent.Future[Unit]"
CompilationErrorParser.parse(e) should be(
Some(NotAMemberError(ExactT("value call"), ExactT("scala.concurrent.Future[Unit]")))
)
}
it should "parse not a member of message with extra text" in {
val e =
"[error] /Users/adamw/projects/clippy/ui-client/src/main/scala/com/softwaremill/clippy/Listing.scala:33: value call is not a member of scala.concurrent.Future[Unit]"
CompilationErrorParser.parse(e) should be(
Some(NotAMemberError(ExactT("value call"), ExactT("scala.concurrent.Future[Unit]")))
)
}
it should "parse an implicit not found" in {
val e =
"could not find implicit value for parameter marshaller: spray.httpx.marshalling.ToResponseMarshaller[scala.concurrent.Future[String]]"
CompilationErrorParser.parse(e) should be(
Some(
ImplicitNotFoundError(
ExactT("marshaller"),
ExactT("spray.httpx.marshalling.ToResponseMarshaller[scala.concurrent.Future[String]]")
)
)
)
}
it should "parse a diverging implicit error " in {
val e =
"diverging implicit expansion for type io.circe.Decoder.Secondary[this.Out] starting with method decodeCaseClass in trait GenericInstances"
CompilationErrorParser.parse(e) should be(
Some(
DivergingImplicitExpansionError(
ExactT("io.circe.Decoder.Secondary[this.Out]"),
ExactT("decodeCaseClass"),
ExactT("trait GenericInstances")
)
)
)
}
it should "parse a diverging implicit error with extra text" in {
val e =
"""
|[error] /home/src/main/scala/Routes.scala:19: diverging implicit expansion for type io.circe.Decoder.Secondary[this.Out]
|[error] starting with method decodeCaseClass in trait GenericInstances
""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(
DivergingImplicitExpansionError(
ExactT("io.circe.Decoder.Secondary[this.Out]"),
ExactT("decodeCaseClass"),
ExactT("trait GenericInstances")
)
)
)
}
it should "parse a type arguments do not conform to any overloaded bounds error" in {
val e =
"""
|[error] clippy/Working.scala:32: type arguments [org.softwaremill.clippy.User] conform to the bounds of none of the overloaded alternatives of
|value apply: [E <: slick.lifted.AbstractTable[_]]=> slick.lifted.TableQuery[E] <and> [E <: slick.lifted.AbstractTable[_]](cons: slick.lifted.Tag => E)slick.lifted.TableQuery[E]
|protected val users = TableQuery[User]
""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(
TypeArgumentsDoNotConformToOverloadedBoundsError(
ExactT("org.softwaremill.clippy.User"),
ExactT("value apply"),
Set(
ExactT("[E <: slick.lifted.AbstractTable[_]]=> slick.lifted.TableQuery[E]"),
ExactT("[E <: slick.lifted.AbstractTable[_]](cons: slick.lifted.Tag => E)slick.lifted.TableQuery[E]")
)
)
)
)
}
it should "parse a no implicit defined for" in {
val e =
"""
|[error] /Users/clippy/model/src/main/scala/com/softwaremill/clippy/CompilationErrorParser.scala:18: No implicit Ordering defined for java.time.LocalDate.
|[error] Seq(java.time.LocalDate.MIN, java.time.LocalDate.MAX).sorted
""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(TypeclassNotFoundError(ExactT("Ordering"), ExactT("java.time.LocalDate")))
)
}
it should "parse an error with notes" in {
val e =
"""
|type mismatch;
| found : org.softwaremill.clippy.ImplicitResolutionDiamond.C
| required: Array[String]
|Note that implicit conversions are not applicable because they are ambiguous:
| both method toMessage in object B of type (b: org.softwaremill.clippy.ImplicitResolutionDiamond.B)Array[String]
| and method toMessage in object A of type (a: org.softwaremill.clippy.ImplicitResolutionDiamond.A)Array[String]
| are possible conversion functions from org.softwaremill.clippy.ImplicitResolutionDiamond.C to Array[String]
""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(
TypeMismatchError(
ExactT("org.softwaremill.clippy.ImplicitResolutionDiamond.C"),
None,
ExactT("Array[String]"),
None,
Some(
"""Note that implicit conversions are not applicable because they are ambiguous:
| both method toMessage in object B of type (b: org.softwaremill.clippy.ImplicitResolutionDiamond.B)Array[String]
| and method toMessage in object A of type (a: org.softwaremill.clippy.ImplicitResolutionDiamond.A)Array[String]
| are possible conversion functions from org.softwaremill.clippy.ImplicitResolutionDiamond.C to Array[String]""".stripMargin
)
)
)
)
}
it should "parse an error with expands to & notes" in {
val e =
"""
|type mismatch;
| found : org.softwaremill.clippy.ImplicitResolutionDiamond.C
| required: Array[String]
| (which expands to) japgolly.scalajs.react.CompState.ReadDirectWriteCallbackOps[?]
|Note that implicit conversions are not applicable because they are ambiguous:
| both method toMessage in object B of type (b: org.softwaremill.clippy.ImplicitResolutionDiamond.B)Array[String]
| and method toMessage in object A of type (a: org.softwaremill.clippy.ImplicitResolutionDiamond.A)Array[String]
| are possible conversion functions from org.softwaremill.clippy.ImplicitResolutionDiamond.C to Array[String]
""".stripMargin
CompilationErrorParser.parse(e) should be(
Some(
TypeMismatchError(
ExactT("org.softwaremill.clippy.ImplicitResolutionDiamond.C"),
None,
ExactT("Array[String]"),
Some(ExactT("japgolly.scalajs.react.CompState.ReadDirectWriteCallbackOps[?]")),
Some(
"""Note that implicit conversions are not applicable because they are ambiguous:
| both method toMessage in object B of type (b: org.softwaremill.clippy.ImplicitResolutionDiamond.B)Array[String]
| and method toMessage in object A of type (a: org.softwaremill.clippy.ImplicitResolutionDiamond.A)Array[String]
| are possible conversion functions from org.softwaremill.clippy.ImplicitResolutionDiamond.C to Array[String]""".stripMargin
)
)
)
)
}
}
| softwaremill/scala-clippy | model/src/test/scala/com/softwaremill/clippy/CompilationErrorParserTest.scala | Scala | apache-2.0 | 10,669 |
package scroll.tests.parameterized
import scroll.internal.dispatch.DispatchQuery
import scroll.internal.dispatch.DispatchQuery._
import scroll.internal.errors.SCROLLErrors.RoleNotFound
import scroll.tests.mocks._
class CompartmentRoleFeaturesTest extends AbstractParameterizedSCROLLTest {
test("Dropping role and invoking methods") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCore = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCore play someRole
someCore play new RoleB()
someCore drop someRole
val resA: Int = (+someCore).a()
resA shouldBe -1
(+someCore).isPlaying[RoleA] shouldBe false
(+someCore).isPlaying[RoleB] shouldBe true
val resB: String = (+someCore).b()
resB shouldBe "b"
}
}
}
test("Dropping role and invoking methods with alias methods") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCore = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCore <+> someRole
someCore <+> new RoleB()
someCore <-> someRole
val resA: Int = (+someCore).a()
resA shouldBe -1
(+someCore).isPlaying[RoleA] shouldBe false
(+someCore).isPlaying[RoleB] shouldBe true
val resB: String = (+someCore).b()
resB shouldBe "b"
}
}
}
test("Removing a player using the compartment") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCore = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCore play someRole
rolePlaying.removePlayer(someCore)
(+someCore).isPlaying[RoleA] shouldBe false
(+someCore).s() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
}
}
}
test("Calling allPlayers using the compartment") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCore = new CoreA()
new CompartmentUnderTest(c, cc) {
val roleA = new RoleA()
val roleB = new RoleB()
someCore play roleA
someCore play roleB
val expected = Seq(someCore, roleA, roleB)
rolePlaying.allPlayers shouldBe expected
}
}
}
test("Transferring a role") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
val someCoreB = new CoreB()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCoreA play someRole
someCoreA transfer someRole to someCoreB
val res: Int = (+someCoreB).a()
res shouldBe 0
(+someCoreA).isPlaying[RoleA] shouldBe false
(+someCoreB).isPlaying[RoleA] shouldBe true
}
}
}
test("Role playing and testing isPlaying") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
val someCoreB = new CoreB()
new CompartmentUnderTest(c, cc) {
val someRoleA = new RoleA()
val someRoleB = new RoleB()
someCoreA play someRoleA
someCoreA.isPlaying[RoleB] shouldBe false
someCoreB.isPlaying[RoleA] shouldBe false
someCoreB.isPlaying[RoleB] shouldBe false
someCoreA.isPlaying[RoleA] shouldBe true
}
}
}
test("Handling applyDynamic") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCoreA play someRole
val expected = 0
val actual: Int = (+someCoreA).a()
expected shouldBe actual
val r = (+someCoreA).c()
r match {
case Left(_) => // correct
case Right(_) => fail("A call to the role with a method that does not exist should fail")
}
}
}
}
test("Handling applyDynamicNamed") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCoreA play someRole
val expected = someRole.b("some", param = "out")
val actual: String = (+someCoreA).b("some", param = "out")
expected shouldBe actual
}
}
}
test("Handling selectDynamic") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCoreA play someRole
val expectedA = someRole.valueA
val actualA: String = (+someCoreA).valueA
val expectedB = someRole.valueB
val actualB: Int = (+someCoreA).valueB
expectedA shouldBe actualA
expectedB shouldBe actualB
val r = (+someCoreA).valueD
r match {
case Left(_) => // correct
case Right(_) => fail("A call to the role with a method that does not exist should fail")
}
}
}
}
test("Handling updateDynamic") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCoreA play someRole
val expectedA = "newValue"
(+someCoreA).valueA = expectedA
val actualA: String = (+someCoreA).valueA
val expectedB = -1
(+someCoreA).valueB = expectedB
val actualB: Int = (+someCoreA).valueB
expectedA shouldBe actualA
expectedB shouldBe actualB
}
}
}
test("Playing a role multiple times (same instance)") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCoreA play someRole
someCoreA play someRole
val expected = "updated"
(+someCoreA).update(expected)
val actual1: String = someRole.valueC
val actual2: String = (+someCoreA).valueC
expected shouldBe actual1
expected shouldBe actual2
}
}
}
test("Playing a role multiple times (different instances) from one player") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole1 = new RoleA()
val someRole2 = new RoleA()
someCoreA play someRole1
someCoreA play someRole2
val expected = "updated"
(+someCoreA).update(expected)
val actual1a: String = someRole1.valueC
val actual1b: String = someRole2.valueC
val actual2: String = (+someCoreA).valueC
(expected == actual1a || expected == actual1b) shouldBe true
expected shouldBe actual2
}
}
}
test("Playing a role multiple times (different instances, but using dispatch to select one)") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole1 = new RoleA()
val someRole2 = new RoleA()
someRole1.valueB = 1
someRole2.valueB = 2
someCoreA play someRole1
someCoreA play someRole2
given DispatchQuery =
From(_.isInstanceOf[CoreA])
.To(_.isInstanceOf[RoleA])
.Through(anything)
.Bypassing {
case r: RoleA =>
1 == r.valueB // so we ignore someRole1 here while dispatching the call to update
case _ => false
}
(+someCoreA).update("updated")
val actual1: String = someRole1.valueC
val actual2: String = someRole2.valueC
val actual3: String = (+someCoreA).valueC
"valueC" shouldBe actual1
"updated" shouldBe actual2
"updated" shouldBe actual3
}
}
}
test("Calling multi-argument method in roles") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleD()
someCoreA play someRole
val expected1 = "updated"
val expected2 = 1
(+someCoreA).update(expected1, expected2)
val actual1 = someRole.valueA
val actual2 = someRole.valueB
val actual3: String = (+someCoreA).valueA
val actual4: Int = (+someCoreA).valueB
expected1 shouldBe actual1
expected2 shouldBe actual2
expected1 shouldBe actual3
expected2 shouldBe actual4
}
}
}
/** test case for primitive types: Int Double Float Long Short Byte Char boolean
*/
test("Calling method on a role with different primitive types") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleE()
someCoreA play someRole
val expectedInt: Int = 0
val expectedDouble: Double = 0
val expectedFloat: Float = 0
val expectedLong: Long = 0
val expectedShort: Short = 0
val expectedByte: Byte = 0
val expectedChar: Char = 'B'
val expectedBoolean: Boolean = true
(+someCoreA).updateInt(expectedInt)
(+someCoreA).updateDouble(expectedDouble)
(+someCoreA).updateFloat(expectedFloat)
(+someCoreA).updateLong(expectedLong)
(+someCoreA).updateShort(expectedShort)
(+someCoreA).updateByte(expectedByte)
(+someCoreA).updateChar(expectedChar)
(+someCoreA).updateBoolean(expectedBoolean)
val actualIntR = someRole.valueInt
val actualDoubleR = someRole.valueDouble
val actualFloatR = someRole.valueFloat
val actualLongR = someRole.valueLong
val actualShortR = someRole.valueShort
val actualByteR = someRole.valueByte
val actualCharR = someRole.valueChar
val actualBooleanR = someRole.valueBoolean
actualIntR shouldBe expectedInt
actualDoubleR shouldBe expectedDouble
actualFloatR shouldBe expectedFloat
actualLongR shouldBe expectedLong
actualShortR shouldBe expectedShort
actualByteR shouldBe expectedByte
actualCharR shouldBe expectedChar
actualBooleanR shouldBe expectedBoolean
val actualIntP: Int = (+someCoreA).valueInt
val actualDoubleP: Double = (+someCoreA).valueDouble
val actualFloatP: Float = (+someCoreA).valueFloat
val actualLongP: Long = (+someCoreA).valueLong
val actualShortP: Short = (+someCoreA).valueShort
val actualByteP: Byte = (+someCoreA).valueByte
val actualCharP: Char = (+someCoreA).valueChar
val actualBooleanP: Boolean = (+someCoreA).valueBoolean
actualIntP shouldBe expectedInt
actualDoubleP shouldBe expectedDouble
actualFloatP shouldBe expectedFloat
actualLongP shouldBe expectedLong
actualShortP shouldBe expectedShort
actualByteP shouldBe expectedByte
actualCharP shouldBe expectedChar
actualBooleanP shouldBe expectedBoolean
}
}
}
test("Playing a role multiple times (same instance) from different players") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
val someCoreB = new CoreB()
new CompartmentUnderTest(c, cc) {
val someRole = new RoleA()
someCoreA play someRole
someCoreB play someRole
val expected = "updated"
(+someCoreA).update(expected)
(+someCoreB).update(expected)
val actual1: String = someRole.valueC
val actual2: String = (+someCoreA).valueC
val actual3: String = (+someCoreB).valueC
expected shouldBe actual1
expected shouldBe actual2
expected shouldBe actual3
val player = someRole.player match {
case Left(_) => fail("Player should be defined here!")
case Right(p) => p
}
(player == someCoreA || player == someCoreB) shouldBe true
{
given DispatchQuery =
From(anything)
.To(c => c.isInstanceOf[CoreA] || c.isInstanceOf[CoreB])
.Through(anything)
.Bypassing(_.isInstanceOf[CoreB])
val player2 = someRole.player match {
case Left(_) => fail("Player should be defined here!")
case Right(p) => p
}
player2 shouldBe someCoreA
}
}
}
}
test("Cyclic role-playing relationship") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
whenever(cc) {
val someCoreA = new CoreA()
new CompartmentUnderTest(c, true) {
val someRoleA = new RoleA()
val someRoleB = new RoleB()
val someRoleC = new RoleC()
someCoreA play someRoleA
someRoleA play someRoleB
someRoleB play someRoleC
a[RuntimeException] should be thrownBy {
someRoleC play someRoleA
}
}
}
}
}
test("Compartment plays a role that is part of themselves") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
class ACompartment(c: Boolean, cc: Boolean) extends CompartmentUnderTest(c, cc) {
class ARole
}
new ACompartment(c, cc) {
this play new ARole
this.isPlaying[ARole] shouldBe true
}
}
}
test("Deep roles") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRoleA = new RoleA()
val someRoleB = new RoleB()
val someRoleC = new RoleC()
val someRoleD = new RoleD()
val someRoleE = new RoleE()
val expectedVal = 10
someCoreA play someRoleA
someRoleA play someRoleB
someRoleB play someRoleC
someRoleC play someRoleD
someRoleD play someRoleE
(+someCoreA).valueInt = expectedVal
val actualVal1: Int = (+someCoreA).valueInt
val actualVal2: Int = (+someRoleB).valueInt
val actualVal3: Int = (+someRoleC).valueInt
val actualVal4: Int = (+someRoleD).valueInt
val actualVal5: Int = (+someRoleE).valueInt
val actualVal6: Int = someRoleE.valueInt
actualVal1 shouldBe expectedVal
actualVal2 shouldBe expectedVal
actualVal3 shouldBe expectedVal
actualVal4 shouldBe expectedVal
actualVal5 shouldBe expectedVal
actualVal6 shouldBe expectedVal
val expected = Seq(someRoleD, someRoleC, someRoleB, someRoleA, someCoreA)
someRoleE.predecessors() shouldBe expected
}
}
}
test("Deep roles (chained directly)") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRoleA = new RoleA()
val someRoleB = new RoleB()
val someRoleC = new RoleC()
val someRoleD = new RoleD()
val someRoleE = new RoleE()
val expectedVal = 10
someCoreA playing someRoleA playing someRoleB playing someRoleC playing someRoleD playing someRoleE
(+someCoreA).valueInt = expectedVal
val actualVal1: Int = (+someCoreA).valueInt
val actualVal2: Int = (+someRoleB).valueInt
val actualVal3: Int = (+someRoleC).valueInt
val actualVal4: Int = (+someRoleD).valueInt
val actualVal5: Int = (+someRoleE).valueInt
val actualVal6: Int = someRoleE.valueInt
actualVal1 shouldBe expectedVal
actualVal2 shouldBe expectedVal
actualVal3 shouldBe expectedVal
actualVal4 shouldBe expectedVal
actualVal5 shouldBe expectedVal
actualVal6 shouldBe expectedVal
val expected = Seq(someCoreA)
someRoleE.predecessors() shouldBe expected
}
}
}
test("Deep roles (chained directly with alias method)") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRoleA = new RoleA()
val someRoleB = new RoleB()
val someRoleC = new RoleC()
val someRoleD = new RoleD()
val someRoleE = new RoleE()
val expectedVal = 10
someCoreA <=> someRoleA <=> someRoleB <=> someRoleC <=> someRoleD <=> someRoleE
(+someCoreA).valueInt = expectedVal
val actualVal1: Int = (+someCoreA).valueInt
val actualVal2: Int = (+someRoleB).valueInt
val actualVal3: Int = (+someRoleC).valueInt
val actualVal4: Int = (+someRoleD).valueInt
val actualVal5: Int = (+someRoleE).valueInt
val actualVal6: Int = someRoleE.valueInt
actualVal1 shouldBe expectedVal
actualVal2 shouldBe expectedVal
actualVal3 shouldBe expectedVal
actualVal4 shouldBe expectedVal
actualVal5 shouldBe expectedVal
actualVal6 shouldBe expectedVal
val expected = Seq(someCoreA)
someRoleE.predecessors() shouldBe expected
}
}
}
test("Handling null arguments for applyDynamic") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCoreA = new CoreA()
new CompartmentUnderTest(c, cc) {
val someRoleA = new RoleA()
val expected: String = "valueC"
val p = someCoreA play someRoleA
var actual: String = p.valueC
actual shouldBe expected
p.update(null)
actual = p.valueC
actual shouldBe null
}
}
}
test("Dropping roles when using deep roles") {
class Core() {
def a(): String = "a"
}
class RoleWithB() {
def b(): String = "b"
}
class RoleWithC() {
def c(): String = "c"
}
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
val someCore = new Core()
val roleWithB = new RoleWithB()
val roleWithC = new RoleWithC()
new CompartmentUnderTest(c, cc) {
someCore play roleWithB
var actual: String = (+someCore).a()
actual shouldBe "a"
actual = (+roleWithB).a()
actual shouldBe "a"
actual = (+someCore).b()
actual shouldBe "b"
actual = (+roleWithB).b()
actual shouldBe "b"
roleWithB play roleWithC
actual = (+someCore).a()
actual shouldBe "a"
actual = (+roleWithB).a()
actual shouldBe "a"
actual = (+roleWithC).a()
actual shouldBe "a"
actual = (+someCore).b()
actual shouldBe "b"
actual = (+roleWithB).b()
actual shouldBe "b"
actual = (+roleWithC).b()
actual shouldBe "b"
actual = (+someCore).c()
actual shouldBe "c"
actual = (+roleWithB).c()
actual shouldBe "c"
actual = (+roleWithC).c()
actual shouldBe "c"
someCore.drop(roleWithB)
actual = (+someCore).a()
actual shouldBe "a"
(+roleWithB).a() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
(+roleWithC).a() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
(+someCore).b() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
actual = (+roleWithB).b()
actual shouldBe "b"
actual = (+roleWithC).b()
actual shouldBe "b"
(+someCore).c() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
actual = (+roleWithB).c()
actual shouldBe "c"
actual = (+roleWithC).c()
actual shouldBe "c"
someCore.play(roleWithB)
actual = (+someCore).a()
actual shouldBe "a"
actual = (+roleWithB).a()
actual shouldBe "a"
actual = (+roleWithC).a()
actual shouldBe "a"
actual = (+someCore).b()
actual shouldBe "b"
actual = (+roleWithB).b()
actual shouldBe "b"
actual = (+roleWithC).b()
actual shouldBe "b"
actual = (+someCore).c()
actual shouldBe "c"
actual = (+roleWithB).c()
actual shouldBe "c"
actual = (+roleWithC).c()
actual shouldBe "c"
roleWithB.remove()
actual = (+someCore).a()
actual shouldBe "a"
(+roleWithB).a() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
(+roleWithC).a() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
(+someCore).b() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
actual = roleWithB.b()
actual shouldBe "b"
(+roleWithC).b() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
(+someCore).c() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
(+roleWithB).c() match {
case Right(_) => fail("Player should have no access anymore!")
case Left(err) if err.isInstanceOf[RoleNotFound] => // this is fine
case Left(err) => fail("This exception is not expected: ", err)
}
actual = (+roleWithC).c()
actual shouldBe "c"
}
}
}
}
| max-leuthaeuser/SCROLL | tests/src/test/scala/scroll/tests/parameterized/CompartmentRoleFeaturesTest.scala | Scala | lgpl-3.0 | 22,887 |
package com.naughtyzombie.scalacourse.week1
/**
* Created by pram on 16/09/15.
*/
object SquareRoot {
def sqrt(x: Double) = {
def abs(x: Double) = if (x < 0) -x else x
def sqrtIter(guess: Double): Double =
if (isGoodEnough(guess)) guess
else sqrtIter(improve(guess))
def isGoodEnough(guess: Double) =
abs(guess * guess - x) / x < 1.0e-3
def improve(guess: Double) =
(guess + x / guess) / 2
sqrtIter(1.0)
} //> sqrt: (x: Double)Double
def main(args: Array[String]) {
println(sqrt(2)) //> res0: Double = 1.4142156862745097
println(sqrt(4)) //> res1: Double = 2.000609756097561
println(sqrt(1.0e-6) ) //> res2: Double = 0.0010000001533016628
println(sqrt(1.0e60))
}
}
| pram/scalawork | src/main/scala/com/naughtyzombie/scalacourse/week1/SquareRoot.scala | Scala | mit | 892 |
package com.zeminer.examples.google
/*
ZeMiner.com Code Examples
Copyright (c) 2017 ZeMiner.com
Released under the MIT License
See LICENSE on this project's root folder for details.
ZeMiner.com : Adsense(TM) Dataset processing example code.
This program uses the AdSense(TM) Datasets located at https://ZeMiner.com/lists/adsense
The AdSense trademark is owned by Google Inc. ZeMiner.com is in no way associated with Google or any of its subsidiaries.
ZeMiner.com datasets are built using publicly available websites and data sources.
*/
import com.zeminer.Spark
import java.io.File
object AdSenseStats {
def usage(): Unit = {
println("Usage: sbt \\"runMain com.zeminer.examples.google.AdSenseStats <DATAFILE>\\"")
}
def main(args: Array[String]): Unit = {
if (args.size != 1) {
usage()
System.exit(1)
}
val filename = args(0)
val f = new File(filename)
if (!f.exists()) {
println(s"FATAL: File $filename not found.")
System.exit(1)
}
/* Load our dataset. */
val data = Spark.getContext.textFile(filename)
/* First Spark Job: Count number of lines contained in dataset. */
val lineCount = data.count()
println(s"LINE COUNT: $lineCount")
/* Find AdSense advertisers with URLs matching the word "loans" */
val loanAdvertisers = data.flatMap(_.toLowerCase.split("\\\\,"))
.filter(_.startsWith("http"))
.filter(_.contains("loans"))
loanAdvertisers.foreach(println)
}
} | ZeMiner/zeminer-examples | src/main/scala/com/zeminer/examples/google/AdSense.scala | Scala | mit | 1,630 |
package rest.client.support
import authentication.entities.{AuthToken, BearerToken}
import spray.json.{JsString, _}
import support.JsonApiSupport
trait JsonSupport extends JsonApiSupport {
implicit object AuthTokenFormat extends RootJsonFormat[AuthToken] {
override def read(json: JsValue): AuthToken = json match {
case jsObject: JsObject =>
jsObject.getFields("bearer") match {
case Seq(JsString(bearer)) => BearerToken(bearer)
}
case _ => deserializationError("An error occurred while deserialize entity.")
}
override def write(obj: AuthToken): JsValue = obj match {
case _ => deserializationError("Operation not supported.")
}
}
}
| lymr/fun-chat | fun-chat-client/src/main/scala/rest/client/support/JsonSupport.scala | Scala | mit | 703 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters.csp
import com.typesafe.config.ConfigFactory
import javax.inject.Inject
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.libs.json.JsArray
import play.api.libs.json.JsNumber
import play.api.libs.json.JsObject
import play.api.libs.json.JsString
import play.api.libs.json.Json
import play.api.mvc.AbstractController
import play.api.mvc.ControllerComponents
import play.api.test.FakeRequest
import play.api.test.PlaySpecification
import play.api.Application
import play.api.Configuration
import play.api.http.Status
import scala.reflect.ClassTag
class ScalaCSPReportSpec extends PlaySpecification {
sequential
def toConfiguration(rawConfig: String) = {
val typesafeConfig = ConfigFactory.parseString(rawConfig)
Configuration(typesafeConfig)
}
private def inject[T: ClassTag](implicit app: Application) = app.injector.instanceOf[T]
private def myAction(implicit app: Application) = inject[ScalaCSPReportSpec.MyAction]
def withApplication[T]()(block: Application => T): T = {
val app = new GuiceApplicationBuilder()
.configure(Map("play.http.errorHandler" -> "play.api.http.JsonHttpErrorHandler"))
.appRoutes(implicit app => { case _ => myAction.cspReport })
.build()
running(app)(block(app))
}
"Scala CSP report" should {
"work with a chrome style csp-report" in withApplication() { implicit app =>
val chromeJson = Json.parse(
"""{
| "csp-report": {
| "document-uri": "http://45.55.25.245:8123/csp?os=OS%20X&device=&browser_version=43.0&browser=chrome&os_version=Lion",
| "referrer": "",
| "violated-directive": "child-src https://45.55.25.245:8123/",
| "effective-directive": "frame-src",
| "original-policy": "default-src https://45.55.25.245:8123/; child-src https://45.55.25.245:8123/; connect-src https://45.55.25.245:8123/; font-src https://45.55.25.245:8123/; img-src https://45.55.25.245:8123/; media-src https://45.55.25.245:8123/; object-src https://45.55.25.245:8123/; script-src https://45.55.25.245:8123/; style-src https://45.55.25.245:8123/; form-action https://45.55.25.245:8123/; frame-ancestors 'none'; plugin-types 'none'; report-uri http://45.55.25.245:8123/csp-report?os=OS%20X&device=&browser_version=43.0&browser=chrome&os_version=Lion",
| "blocked-uri": "http://google.com",
| "status-code": 200
| }
|}
""".stripMargin
)
val request = FakeRequest("POST", "/report-to").withJsonBody(chromeJson)
val Some(result) = route(app, request)
status(result) must_=== Status.OK
contentType(result) must beSome("application/json")
contentAsJson(result) must be_==(Json.obj("violation" -> "child-src https://45.55.25.245:8123/"))
}
"work with a firefox style csp-report" in withApplication() { implicit app =>
val firefoxJson = Json.parse(
"""{
|"csp-report": {
| "blocked-uri": "data:image/gif;base64,R0lGODlhEAAQAMQAAORHHOVSKudfOulrSOp3WOyDZu6QdvCchPGolfO0o/XBs/fNwfjZ0frl3/zy7////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAkAABAALAAAAAAQABAAAAVVICSOZGlCQAosJ6mu7fiyZeKqNKToQGDsM8hBADgUXoGAiqhSvp5QAnQKGIgUhwFUYLCVDFCrKUE1lBavAViFIDlTImbKC5Gm2hB0SlBCBMQiB0UjIQA7",
| "document-uri": "http://45.55.25.245:8123/csp?os=OS%20X&device=&browser_version=37.0&browser=firefox&os_version=Yosemite",
| "original-policy": "default-src https://45.55.25.245:8123/; connect-src https://45.55.25.245:8123/; font-src https://45.55.25.245:8123/; img-src https://45.55.25.245:8123/; media-src https://45.55.25.245:8123/; object-src https://45.55.25.245:8123/; script-src https://45.55.25.245:8123/; style-src https://45.55.25.245:8123/; form-action https://45.55.25.245:8123/; frame-ancestors 'none'; report-uri http://45.55.25.245:8123/csp-report?os=OS%20X&device=&browser_version=37.0&browser=firefox&os_version=Yosemite",
| "referrer": "",
| "violated-directive": "img-src https://45.55.25.245:8123/"
| }
|}
""".stripMargin
)
val request = FakeRequest("POST", "/report-to").withJsonBody(firefoxJson)
val Some(result) = route(app, request)
status(result) must_=== Status.OK
contentType(result) must beSome("application/json")
contentAsJson(result) must be_==(Json.obj("violation" -> "img-src https://45.55.25.245:8123/"))
}
"work with a webkit style csp-report" in withApplication() { implicit app =>
val webkitJson = Json.parse(
"""{
|"csp-report": {
| "document-uri": "http://45.55.25.245:8123/csp?os=OS%20X&device=&browser_version=23.0&browser=chrome&os_version=Lion",
| "violated-directive": "default-src https://45.55.25.245:8123/",
| "original-policy": "default-src https://45.55.25.245:8123/; child-src https://45.55.25.245:8123/; connect-src https://45.55.25.245:8123/; font-src https://45.55.25.245:8123/; img-src https://45.55.25.245:8123/; media-src https://45.55.25.245:8123/; object-src https://45.55.25.245:8123/; script-src https://45.55.25.245:8123/; style-src https://45.55.25.245:8123/; form-action https://45.55.25.245:8123/; frame-ancestors 'none'; plugin-types 'none'; report-uri http://45.55.25.245:8123/csp-report?os=OS%20X&device=&browser_version=23.0&browser=chrome&os_version=Lion",
| "blocked-uri": "http://google.com"
| }
|}
""".stripMargin
)
val request = FakeRequest("POST", "/report-to").withJsonBody(webkitJson)
val Some(result) = route(app, request)
status(result) must_=== Status.OK
contentType(result) must beSome("application/json")
contentAsJson(result) must be_==(Json.obj("violation" -> "default-src https://45.55.25.245:8123/"))
}
"work with a old webkit style csp-report" in withApplication() { implicit app =>
val request = FakeRequest("POST", "/report-to").withFormUrlEncodedBody(
"document-url" -> "http://45.55.25.245:8123/csp?os=OS%2520X&device=&browser_version=3.6&browser=firefox&os_version=Yosemite",
"violated-directive" -> "object-src https://45.55.25.245:8123/"
)
val Some(result) = route(app, request)
status(result) must_=== Status.OK
contentType(result) must beSome("application/json")
contentAsJson(result) must be_==(Json.obj("violation" -> "object-src https://45.55.25.245:8123/"))
}
"work with inline script violation" in withApplication() { implicit app =>
val inlineScriptJson = Json.parse(
"""{
|"csp-report": {
| "blocked-uri": "inline",
| "column-number": 153,
| "document-uri": "http://45.55.25.245:8123/csp?os=OS%20X&device=&browser_version=37.0&browser=firefox&os_version=Yosemite",
| "line-number": 1,
| "original-policy": "script-src 'self'; report-uri http://45.55.25.245:8123/csp/report-to;",
| "referrer": "",
| "source-file": "http://45.55.25.245:8123/csp?os=OS%20X&device=&browser_version=37.0&browser=firefox&os_version=Yosemite",
| "violated-directive": "script-src"
| }
|}
""".stripMargin
)
val request = FakeRequest("POST", "/report-to").withJsonBody(inlineScriptJson)
val Some(result) = route(app, request)
status(result) must_=== Status.OK
contentType(result) must beSome("application/json")
contentAsJson(result) must be_==(Json.obj("violation" -> "script-src"))
}
"work with inline script violation when String implementation is used" in withApplication() { implicit app =>
val inlineScriptJson = Json.parse(
"""{
|"csp-report": {
| "blocked-uri": "inline",
| "column-number": "153",
| "document-uri": "http://45.55.25.245:8123/csp?os=OS%20X&device=&browser_version=37.0&browser=firefox&os_version=Yosemite",
| "line-number": "1",
| "original-policy": "script-src 'self'; report-uri http://45.55.25.245:8123/csp/report-to;",
| "referrer": "",
| "source-file": "http://45.55.25.245:8123/csp?os=OS%20X&device=&browser_version=37.0&browser=firefox&os_version=Yosemite",
| "violated-directive": "script-src"
| }
|}
""".stripMargin
)
val request = FakeRequest("POST", "/report-to").withJsonBody(inlineScriptJson)
val Some(result) = route(app, request)
status(result) must_=== Status.OK
contentType(result) must beSome("application/json")
contentAsJson(result) must be_==(Json.obj("violation" -> "script-src"))
}
"fail when receiving an unsupported media type (text/plain) in content type header" in withApplication() {
implicit app =>
val request = FakeRequest("POST", "/report-to").withTextBody("foo")
val Some(result) = route(app, request)
status(result) must_=== Status.UNSUPPORTED_MEDIA_TYPE
contentType(result) must beSome("application/problem+json")
val fullJson = contentAsJson(result).asInstanceOf[JsObject]
// The value of "requestId" is not constant, it changes, so we just check for its existence
fullJson.fields.count(_._1 == "requestId") must_=== 1
// Lets remove "requestId" now
fullJson - "requestId" must be_==(
JsObject(
Seq(
"title" -> JsString("Unsupported Media Type"),
"status" -> JsNumber(Status.UNSUPPORTED_MEDIA_TYPE),
"detail" -> JsString(
"Content type must be one of application/x-www-form-urlencoded,text/json,application/json,application/csp-report but was Some(text/plain)"
),
)
)
)
}
"fail when receiving invalid csp-report JSON" in withApplication() { implicit app =>
val invalidCspReportJson = Json.parse(
"""{
| "csp-report": {
| "foo": "bar"
| }
|}
""".stripMargin
)
val request = FakeRequest("POST", "/report-to").withJsonBody(invalidCspReportJson)
val Some(result) = route(app, request)
status(result) must_=== Status.BAD_REQUEST
contentType(result) must beSome("application/problem+json")
val fullJson = contentAsJson(result).asInstanceOf[JsObject]
// The value of "requestId" is not constant, it changes, so we just check for its existence
fullJson.fields.count(_._1 == "requestId") must_=== 1
// Lets remove "requestId" now
fullJson - "requestId" must be_==(
JsObject(
Seq(
"title" -> JsString("Bad Request"),
"status" -> JsNumber(Status.BAD_REQUEST),
"detail" -> JsString("Could not parse CSP"),
"errors" -> JsObject(
Seq(
"obj.document-uri" ->
JsArray(
Seq(
JsObject(Seq("msg" -> JsArray(Seq(JsString("error.path.missing"))), "args" -> JsArray(Seq.empty)))
)
),
"obj.violated-directive" ->
JsArray(
Seq(
JsObject(Seq("msg" -> JsArray(Seq(JsString("error.path.missing"))), "args" -> JsArray(Seq.empty)))
)
),
)
)
)
)
)
}
}
}
object ScalaCSPReportSpec {
class MyAction @Inject() (cspReportAction: CSPReportActionBuilder, cc: ControllerComponents)
extends AbstractController(cc) {
def cspReport = cspReportAction { implicit request =>
val json = Json.toJson(Map("violation" -> request.body.violatedDirective))
Ok(json)
}
}
}
| mkurz/playframework | web/play-filters-helpers/src/test/scala/play/filters/csp/ScalaCSPReportSpec.scala | Scala | apache-2.0 | 11,976 |
package sri.mobile
import sri.core._
import sri.mobile.apis._
import sri.mobile.apis.android.{BackAndroid, IntentAndroid, ToastAndroid}
import sri.mobile.apis.ios._
import sri.mobile.components.android.{TimerPickerAndroid, DatePickerAndroid}
import sri.mobile.modules.NativeModules
import sri.universal.ReactUniversal
import sri.universal.apis.{Alert, Clipboard, LayoutAnimation, NetInfo}
import scala.scalajs.js
import scala.scalajs.js.annotation.JSName
@js.native
trait ReactNative extends ReactUniversal {
//components
val ActivityIndicatorIOS: ReactClass = js.native
val DatePickerIOS: ReactClass = js.native
val MapView: ReactClass = js.native
val NavigatorIOS: ReactClass = js.native
val SliderIOS: ReactClass = js.native
val SwitchIOS: ReactClass = js.native
val TabBarItemIOS: ReactClass = js.native
val WebView: ReactClass = js.native
val TabBarIOS: ReactClass = js.native
val SegmentedControlIOS: ReactClass = js.native
val DrawerLayoutAndroid: ReactClass = js.native
val ProgressBarAndroid: ReactClass = js.native
val ProgressViewIOS: ReactClass = js.native
val PickerIOS: ReactClass = js.native
val SwitchAndroid: ReactClass = js.native
val ToolbarAndroid: ReactClass = js.native
val TouchableNativeFeedback: ReactClass = js.native
val ViewPagerAndroid: ReactClass = js.native
val DatePickerAndroid: DatePickerAndroid = js.native
val TimerPickerAndroid: TimerPickerAndroid = js.native
val StatusBar: ReactClass = js.native
// apis
val Alert: Alert = js.native
val Clipboard: Clipboard = js.native
val AlertIOS: AlertIOS = js.native
val StyleSheet: StyleSheet = js.native
val AppStateIOS: AppStateIOS = js.native
val AppState: AppState = js.native
val CameraRoll: CameraRoll = js.native
val InteractionManager: InteractionManager = js.native
val LinkingIOS: LinkingIOS = js.native
val NetInfo: NetInfo = js.native
val LayoutAnimation: LayoutAnimation = js.native
val PushNotificationIOS: PushNotificationIOS = js.native
val PanResponder: PanResponder = js.native
val StatusBarIOS: js.Dynamic = js.native
val VibrationIOS: VibrationIOS = js.native
val Dimensions: Dimensions = js.native
val Animated: js.Dynamic = js.native
val IntentAndroid: IntentAndroid = js.native
val Linking: Linking = js.native
val ToastAndroid: ToastAndroid = js.native
val BackAndroid: BackAndroid = js.native
val UIManager: UIManager = js.native
// native stuff
val NativeModules: NativeModules = js.native
}
@js.native
trait ReactPlatForm extends js.Object {
val OS: String = js.native
val Version: Double = js.native
}
@js.native
@JSName("ReactNative")
object ReactNative extends ReactNative
| hamazy/sri | mobile/src/main/scala/sri/mobile/ReactNative.scala | Scala | apache-2.0 | 2,693 |
package cs4r.labs.learningscala.adventofcode
object AdventOfCode7 extends App {
val puzzleInput = """lf AND lq -> ls
|iu RSHIFT 1 -> jn
|bo OR bu -> bv
|gj RSHIFT 1 -> hc
|et RSHIFT 2 -> eu
|bv AND bx -> by
|is OR it -> iu
|b OR n -> o
|gf OR ge -> gg
|NOT kt -> ku
|ea AND eb -> ed
|kl OR kr -> ks
|hi AND hk -> hl
|au AND av -> ax
|lf RSHIFT 2 -> lg
|dd RSHIFT 3 -> df
|eu AND fa -> fc
|df AND dg -> di
|ip LSHIFT 15 -> it
|NOT el -> em
|et OR fe -> ff
|fj LSHIFT 15 -> fn
|t OR s -> u
|ly OR lz -> ma
|ko AND kq -> kr
|NOT fx -> fy
|et RSHIFT 1 -> fm
|eu OR fa -> fb
|dd RSHIFT 2 -> de
|NOT go -> gp
|kb AND kd -> ke
|hg OR hh -> hi
|jm LSHIFT 1 -> kg
|NOT cn -> co
|jp RSHIFT 2 -> jq
|jp RSHIFT 5 -> js
|1 AND io -> ip
|eo LSHIFT 15 -> es
|1 AND jj -> jk
|g AND i -> j
|ci RSHIFT 3 -> ck
|gn AND gp -> gq
|fs AND fu -> fv
|lj AND ll -> lm
|jk LSHIFT 15 -> jo
|iu RSHIFT 3 -> iw
|NOT ii -> ij
|1 AND cc -> cd
|bn RSHIFT 3 -> bp
|NOT gw -> gx
|NOT ft -> fu
|jn OR jo -> jp
|iv OR jb -> jc
|hv OR hu -> hw
|19138 -> b
|gj RSHIFT 5 -> gm
|hq AND hs -> ht
|dy RSHIFT 1 -> er
|ao OR an -> ap
|ld OR le -> lf
|bk LSHIFT 1 -> ce
|bz AND cb -> cc
|bi LSHIFT 15 -> bm
|il AND in -> io
|af AND ah -> ai
|as RSHIFT 1 -> bl
|lf RSHIFT 3 -> lh
|er OR es -> et
|NOT ax -> ay
|ci RSHIFT 1 -> db
|et AND fe -> fg
|lg OR lm -> ln
|k AND m -> n
|hz RSHIFT 2 -> ia
|kh LSHIFT 1 -> lb
|NOT ey -> ez
|NOT di -> dj
|dz OR ef -> eg
|lx -> a
|NOT iz -> ja
|gz LSHIFT 15 -> hd
|ce OR cd -> cf
|fq AND fr -> ft
|at AND az -> bb
|ha OR gz -> hb
|fp AND fv -> fx
|NOT gb -> gc
|ia AND ig -> ii
|gl OR gm -> gn
|0 -> c
|NOT ca -> cb
|bn RSHIFT 1 -> cg
|c LSHIFT 1 -> t
|iw OR ix -> iy
|kg OR kf -> kh
|dy OR ej -> ek
|km AND kn -> kp
|NOT fc -> fd
|hz RSHIFT 3 -> ib
|NOT dq -> dr
|NOT fg -> fh
|dy RSHIFT 2 -> dz
|kk RSHIFT 2 -> kl
|1 AND fi -> fj
|NOT hr -> hs
|jp RSHIFT 1 -> ki
|bl OR bm -> bn
|1 AND gy -> gz
|gr AND gt -> gu
|db OR dc -> dd
|de OR dk -> dl
|as RSHIFT 5 -> av
|lf RSHIFT 5 -> li
|hm AND ho -> hp
|cg OR ch -> ci
|gj AND gu -> gw
|ge LSHIFT 15 -> gi
|e OR f -> g
|fp OR fv -> fw
|fb AND fd -> fe
|cd LSHIFT 15 -> ch
|b RSHIFT 1 -> v
|at OR az -> ba
|bn RSHIFT 2 -> bo
|lh AND li -> lk
|dl AND dn -> do
|eg AND ei -> ej
|ex AND ez -> fa
|NOT kp -> kq
|NOT lk -> ll
|x AND ai -> ak
|jp OR ka -> kb
|NOT jd -> je
|iy AND ja -> jb
|jp RSHIFT 3 -> jr
|fo OR fz -> ga
|df OR dg -> dh
|gj RSHIFT 2 -> gk
|gj OR gu -> gv
|NOT jh -> ji
|ap LSHIFT 1 -> bj
|NOT ls -> lt
|ir LSHIFT 1 -> jl
|bn AND by -> ca
|lv LSHIFT 15 -> lz
|ba AND bc -> bd
|cy LSHIFT 15 -> dc
|ln AND lp -> lq
|x RSHIFT 1 -> aq
|gk OR gq -> gr
|NOT kx -> ky
|jg AND ji -> jj
|bn OR by -> bz
|fl LSHIFT 1 -> gf
|bp OR bq -> br
|he OR hp -> hq
|et RSHIFT 5 -> ew
|iu RSHIFT 2 -> iv
|gl AND gm -> go
|x OR ai -> aj
|hc OR hd -> he
|lg AND lm -> lo
|lh OR li -> lj
|da LSHIFT 1 -> du
|fo RSHIFT 2 -> fp
|gk AND gq -> gs
|bj OR bi -> bk
|lf OR lq -> lr
|cj AND cp -> cr
|hu LSHIFT 15 -> hy
|1 AND bh -> bi
|fo RSHIFT 3 -> fq
|NOT lo -> lp
|hw LSHIFT 1 -> iq
|dd RSHIFT 1 -> dw
|dt LSHIFT 15 -> dx
|dy AND ej -> el
|an LSHIFT 15 -> ar
|aq OR ar -> as
|1 AND r -> s
|fw AND fy -> fz
|NOT im -> in
|et RSHIFT 3 -> ev
|1 AND ds -> dt
|ec AND ee -> ef
|NOT ak -> al
|jl OR jk -> jm
|1 AND en -> eo
|lb OR la -> lc
|iu AND jf -> jh
|iu RSHIFT 5 -> ix
|bo AND bu -> bw
|cz OR cy -> da
|iv AND jb -> jd
|iw AND ix -> iz
|lf RSHIFT 1 -> ly
|iu OR jf -> jg
|NOT dm -> dn
|lw OR lv -> lx
|gg LSHIFT 1 -> ha
|lr AND lt -> lu
|fm OR fn -> fo
|he RSHIFT 3 -> hg
|aj AND al -> am
|1 AND kz -> la
|dy RSHIFT 5 -> eb
|jc AND je -> jf
|cm AND co -> cp
|gv AND gx -> gy
|ev OR ew -> ex
|jp AND ka -> kc
|fk OR fj -> fl
|dy RSHIFT 3 -> ea
|NOT bs -> bt
|NOT ag -> ah
|dz AND ef -> eh
|cf LSHIFT 1 -> cz
|NOT cv -> cw
|1 AND cx -> cy
|de AND dk -> dm
|ck AND cl -> cn
|x RSHIFT 5 -> aa
|dv LSHIFT 1 -> ep
|he RSHIFT 2 -> hf
|NOT bw -> bx
|ck OR cl -> cm
|bp AND bq -> bs
|as OR bd -> be
|he AND hp -> hr
|ev AND ew -> ey
|1 AND lu -> lv
|kk RSHIFT 3 -> km
|b AND n -> p
|NOT kc -> kd
|lc LSHIFT 1 -> lw
|km OR kn -> ko
|id AND if -> ig
|ih AND ij -> ik
|jr AND js -> ju
|ci RSHIFT 5 -> cl
|hz RSHIFT 1 -> is
|1 AND ke -> kf
|NOT gs -> gt
|aw AND ay -> az
|x RSHIFT 2 -> y
|ab AND ad -> ae
|ff AND fh -> fi
|ci AND ct -> cv
|eq LSHIFT 1 -> fk
|gj RSHIFT 3 -> gl
|u LSHIFT 1 -> ao
|NOT bb -> bc
|NOT hj -> hk
|kw AND ky -> kz
|as AND bd -> bf
|dw OR dx -> dy
|br AND bt -> bu
|kk AND kv -> kx
|ep OR eo -> eq
|he RSHIFT 1 -> hx
|ki OR kj -> kk
|NOT ju -> jv
|ek AND em -> en
|kk RSHIFT 5 -> kn
|NOT eh -> ei
|hx OR hy -> hz
|ea OR eb -> ec
|s LSHIFT 15 -> w
|fo RSHIFT 1 -> gh
|kk OR kv -> kw
|bn RSHIFT 5 -> bq
|NOT ed -> ee
|1 AND ht -> hu
|cu AND cw -> cx
|b RSHIFT 5 -> f
|kl AND kr -> kt
|iq OR ip -> ir
|ci RSHIFT 2 -> cj
|cj OR cp -> cq
|o AND q -> r
|dd RSHIFT 5 -> dg
|b RSHIFT 2 -> d
|ks AND ku -> kv
|b RSHIFT 3 -> e
|d OR j -> k
|NOT p -> q
|NOT cr -> cs
|du OR dt -> dv
|kf LSHIFT 15 -> kj
|NOT ac -> ad
|fo RSHIFT 5 -> fr
|hz OR ik -> il
|jx AND jz -> ka
|gh OR gi -> gj
|kk RSHIFT 1 -> ld
|hz RSHIFT 5 -> ic
|as RSHIFT 2 -> at
|NOT jy -> jz
|1 AND am -> an
|ci OR ct -> cu
|hg AND hh -> hj
|jq OR jw -> jx
|v OR w -> x
|la LSHIFT 15 -> le
|dh AND dj -> dk
|dp AND dr -> ds
|jq AND jw -> jy
|au OR av -> aw
|NOT bf -> bg
|z OR aa -> ab
|ga AND gc -> gd
|hz AND ik -> im
|jt AND jv -> jw
|z AND aa -> ac
|jr OR js -> jt
|hb LSHIFT 1 -> hv
|hf OR hl -> hm
|ib OR ic -> id
|fq OR fr -> fs
|cq AND cs -> ct
|ia OR ig -> ih
|dd OR do -> dp
|d AND j -> l
|ib AND ic -> ie
|as RSHIFT 3 -> au
|be AND bg -> bh
|dd AND do -> dq
|NOT l -> m
|1 AND gd -> ge
|y AND ae -> ag
|fo AND fz -> gb
|NOT ie -> if
|e AND f -> h
|x RSHIFT 3 -> z
|y OR ae -> af
|hf AND hl -> hn
|NOT h -> i
|NOT hn -> ho
|he RSHIFT 5 -> hh""".stripMargin
sealed trait Expr
case class Resolved(value: Int) extends Expr
case class Variable(variable: String) extends Expr
case class And(left: String, right: String) extends Expr
case class LShift(expr: String, by: String) extends Expr
case class Not(expr: String) extends Expr
case class Or(left: String, right: String) extends Expr
case class RShift(expr: String, by: String) extends Expr
var number = "[0-9]+"
var variables = puzzleInput.split("\n").map(l => {
val assignment = "([a-z]+|[0-9]+) -> ([a-z]+)".r
val and = "([a-z]+|[0-9]+) AND ([a-z]+|[0-9]+) -> ([a-z]+)".r
val lshift = "([a-z]+) LSHIFT ([0-9]+) -> ([a-z]+)".r
val not = "NOT ([a-z]+) -> ([a-z]+)".r
val or = "([a-z]+) OR ([a-z]+) -> ([a-z]+)".r
val rshift = "([a-z]+) RSHIFT ([0-9]+) -> ([a-z]+)".r
var variable = "[a-z]+"
l match {
case assignment(n, variable) =>
if(n.matches(number)) (variable -> Resolved(n.toInt))
else variable -> Variable(n)
case and(l, r, variable) => variable -> And(l, r)
case lshift(input, n, variable) => variable -> LShift(input, n)
case not(input, variable) => variable -> Not(input)
case or(l, r, variable) => variable -> Or(l, r)
case rshift(input, n, variable) => variable -> RShift(input, n)
case _ => { println(l); throw new RuntimeException }
}
}).toMap
var cache = Map[String,Int]()
def getVariableValue(v: String): Int = {
cache.getOrElse(v, {
val res = if(v.matches(number)) v.toInt
else variables(v) match {
case Resolved(x) => x
case Variable(v) => getVariableValue(v)
case And(x, y) => getVariableValue(x) & getVariableValue(y)
case Or(x, y) => getVariableValue(x) | getVariableValue(y)
case Not(x) => ~getVariableValue(x)
case LShift(x, y) => getVariableValue(x) << getVariableValue(y)
case RShift(x, y) => getVariableValue(x) >> getVariableValue(y)
}
cache += v -> res
res
})
}
val partA = getVariableValue("a")
println(partA)
cache = Map("b" -> getVariableValue("a"))
var partB = getVariableValue("a")
println(partB)
}
| Cs4r/AdventOfCode | src/main/scala/cs4r/labs/learningscala/adventofcode/AdventOfCode7.scala | Scala | gpl-3.0 | 15,376 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.action.async.sse
trait SseStream {
def close(): Unit
}
| wiacekm/gatling | gatling-http/src/main/scala/io/gatling/http/action/async/sse/SseStream.scala | Scala | apache-2.0 | 700 |
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
class C
object C
object Test extends dotty.runtime.LegacyApp {
type T = C
println(showRaw(symbolOf[C].companion, printKinds = true))
println(showRaw(symbolOf[C].companion.companion, printKinds = true))
println(showRaw(symbolOf[C.type].companion, printKinds = true))
println(showRaw(symbolOf[T].companion, printKinds = true))
println(showRaw(cm.staticPackage("scala").moduleClass.companion, printKinds = true))
println(showRaw(cm.staticPackage("scala").companion, printKinds = true))
}
| yusuke2255/dotty | tests/pending/run/reflection-companion.scala | Scala | bsd-3-clause | 593 |
package name.abhijitsarkar.scala.scalaimpatient.types
import name.abhijitsarkar.scala.scalaimpatient.UnitSpec
class BugSpec extends UnitSpec {
"Making the bug move 4 units followed by 6 units" should "yield current position 10" in {
val bugsy = new Bug()
bugsy.move(4).move(6).currentPosition should be(10)
}
"Making the bug move 4 units and then turning around" should "yield current position 0" in {
val bugsy = new Bug()
bugsy.move(4).turn().currentPosition should be(0)
}
"Bug" should "support fluent style calls to move, show and turn" in {
val bugsy = new Bug()
bugsy move 4 and Show and ThenBugsy move 6 and Show turn Around move 5 and Show
}
} | abhijitsarkar/scala-impatient | src/test/scala/name/abhijitsarkar/scala/scalaimpatient/types/BugSpec.scala | Scala | gpl-3.0 | 700 |
package com.twitter.finagle.memcached.migration
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.twitter.common.zookeeper.ZooKeeperClient
import com.twitter.finagle.Memcached
import com.twitter.finagle.cacheresolver.ZookeeperStateMonitor
import com.twitter.finagle.memcached._
import com.twitter.finagle.stats.{ClientStatsReceiver, NullStatsReceiver, StatsReceiver}
import com.twitter.finagle.zookeeper.DefaultZkClientFactory
import com.twitter.io.Buf
import com.twitter.util.{Future, Time}
/**
* migration config data
*/
private[memcached] object MigrationConstants {
case class MigrationConfig(
state: String,
readRepairBack: Boolean,
readRepairFront: Boolean)
val jsonMapper = new ObjectMapper().registerModule(DefaultScalaModule)
object MigrationState extends Enumeration {
type t = Value
val Pending, Warming, Verifying, Done = Value
}
object PoolNames extends Enumeration {
type t = Value
val OldPool, NewPool = Value
}
}
/**
* Migration client. This client manages a two cache clients representing source and
* destination cache pool. Depending on the migration state, this client may send dark traffic
* to destination pool to warm up the cache, or send light traffic to destination pool and fall
* back to original pool for cache misses. The state transitioning is controlled by operator
* by setting corresponding metadata in zookeeper.
*/
class MigrationClient(
oldClient: Client,
newClient: Client,
protected val zkPath: String,
protected val zkClient: ZooKeeperClient,
protected val statsReceiver: StatsReceiver = NullStatsReceiver
) extends ProxyClient with ZookeeperStateMonitor {
import MigrationConstants._
// private type def of FrontendClient to be actually a proxy client
class FrontendClient(client: Client) extends ProxyClient {
override def proxyClient = client
}
@volatile var proxyClient = new FrontendClient(oldClient)
override def applyZKData(data: Array[Byte]): Unit = synchronized {
val config = jsonMapper.readValue(data, classOf[MigrationConfig])
val migrationState = MigrationState.withName(config.state)
migrationState match {
case MigrationState.Pending =>
proxyClient = new FrontendClient(oldClient)
case MigrationState.Warming if (config.readRepairBack) =>
proxyClient = new FrontendClient(oldClient) with DarkRead with ReadWarmup with DarkWrite {
val backendClient = newClient
}
case MigrationState.Warming =>
proxyClient = new FrontendClient(oldClient) with DarkRead with DarkWrite {
val backendClient = newClient
}
case MigrationState.Verifying if (config.readRepairFront) =>
proxyClient = new FrontendClient(newClient) with FallbackRead with ReadRepair {
val backendClient = oldClient
}
case MigrationState.Verifying =>
proxyClient = new FrontendClient(newClient) with FallbackRead {
val backendClient = oldClient
}
case MigrationState.Done =>
proxyClient = new FrontendClient(newClient)
}
}
}
/**
* DarkRead client.
* requires backendClient;
* override Get and Gets to send dark read to backend pool;
* backend read results are not blocking or exposed.
*/
trait DarkRead extends Client {
protected val backendClient: Client
abstract override def getResult(keys: Iterable[String]) = {
val frontResult = super.getResult(keys)
val backResult = backendClient.getResult(keys)
chooseGetResult(frontResult, backResult)
}
// DarkRead always choose the front result and ignore the backend one
protected def chooseGetResult(frontResult: Future[GetResult], backResult: Future[GetResult]): Future[GetResult] = {
frontResult
}
abstract override def getsResult(keys: Iterable[String]) = {
val frontResult = super.getsResult(keys)
val backResult = backendClient.getsResult(keys)
chooseGetsResult(frontResult, backResult)
}
// DarkRead always choose the front result and ignore the backend one
protected def chooseGetsResult(frontResult: Future[GetsResult], backResult: Future[GetsResult]): Future[GetsResult] = {
frontResult
}
abstract override def release() {
super.release()
backendClient.release()
}
}
/**
* ReadWarmup client.
* can only be mixed into DarkRead client;
* before returning frontend result, use frontend result to warm up backend missed key;
* backend warming up is not not blocking or exposed.
*/
trait ReadWarmup { self: DarkRead =>
override protected def chooseGetResult(frontResult: Future[GetResult], backResult: Future[GetResult]): Future[GetResult] = {
// when readRepairDark, hit on front should repair miss on back in the background
Future.join(frontResult, backResult) onSuccess {
case (frontR, backR) => backR.misses foreach {
case key if frontR.hits.contains(key) => backendClient.set(key, frontR.hits.get(key).get.value)
case _ =>
}
}
frontResult
}
override protected def chooseGetsResult(frontResult: Future[GetsResult], backResult: Future[GetsResult]): Future[GetsResult] = {
Future.join(frontResult, backResult) onSuccess {
case (frontR, backR) => backR.misses foreach {
case key if frontR.hits.contains(key) => backendClient.set(key, frontR.hits.get(key).get.value)
case _ =>
}
}
frontResult
}
}
/**
* DarkWrite client.
* requires backendClient;
* override all write operation (except cas) to send dark write to backend pool;
* backend write results are not blocking or exposed.
*/
trait DarkWrite extends Client {
protected val backendClient: Client
abstract override def set(key: String, flags: Int, expiry: Time, value: Buf) = {
val result = super.set(key, flags, expiry, value)
backendClient.set(key, flags, expiry, value)
result
}
abstract override def add(key: String, flags: Int, expiry: Time, value: Buf) = {
val result = super.add(key, flags, expiry, value)
backendClient.add(key, flags, expiry, value)
result
}
abstract override def append(key: String, flags: Int, expiry: Time, value: Buf) = {
val result = super.append(key, flags, expiry, value)
backendClient.append(key, flags, expiry, value)
result
}
abstract override def prepend(key: String, flags: Int, expiry: Time, value: Buf) = {
val result = super.prepend(key, flags, expiry, value)
backendClient.prepend(key, flags, expiry, value)
result
}
abstract override def replace(key: String, flags: Int, expiry: Time, value: Buf) = {
val result = super.replace(key, flags, expiry, value)
backendClient.replace(key, flags, expiry, value)
result
}
abstract override def incr(key: String, delta: Long) = {
val result = super.incr(key, delta)
backendClient.incr(key, delta)
result
}
abstract override def decr(key: String, delta: Long) = {
val result = super.decr(key, delta)
backendClient.decr(key, delta)
result
}
// cas operation does not migrate
abstract override def checkAndSet(key: String, flags: Int, expiry: Time, value: Buf, casUnique: Buf) =
super.checkAndSet(key, flags, expiry, value, casUnique)
abstract override def delete(key: String) = {
val result = super.delete(key)
backendClient.delete(key)
result
}
abstract override def release() {
super.release()
backendClient.release()
}
}
/**
* FallbackRead client.
* requires backendClient;
* override Get to read the backend pool if for frontend pool misses;
* Gets remains the same, as fallback reading backend pool for cas_unique is useless.
*/
trait FallbackRead extends Client {
protected val backendClient: Client
abstract override def getResult(keys: Iterable[String]) = {
val frontResult = super.getResult(keys)
frontResult flatMap {
case frontR if (frontR.misses.nonEmpty || frontR.failures.nonEmpty) =>
backendClient.getResult(frontR.misses ++ frontR.failures.keySet) map { backR =>
combineGetResult(frontR, backR)
}
case frontR => Future.value(frontR)
}
}
protected def combineGetResult(frontR: GetResult, backR: GetResult): GetResult = {
// when fallback, merge the front hits with back result
GetResult.merged(Seq(GetResult(frontR.hits), backR))
}
// Gets remains the same
abstract override def getsResult(keys: Iterable[String]) = {
super.getsResult(keys)
}
abstract override def release() {
super.release()
backendClient.release()
}
}
/**
* ReadRepair client.
* can only be mixed into FallbackRead client;
* before combining frontend and backend result, use backend result to repair frontend missed key;
* frontend repairing is not not blocking or exposed.
*/
trait ReadRepair { self: FallbackRead =>
override def combineGetResult(frontR: GetResult, backR: GetResult): GetResult = {
// when readrepair, use back hit to repair front miss
backR.hits foreach {
case (k, v) => set(k, v.value)
}
GetResult.merged(Seq(GetResult(frontR.hits), backR))
}
}
object MigrationClient {
def newMigrationClient(zkHosts: String, zkPath: String) = {
val zkClient = DefaultZkClientFactory.get(DefaultZkClientFactory.hostSet(zkHosts))._1
val oldPoolPath = zkPath+"/" + MigrationConstants.PoolNames.OldPool.toString
val newPoolPath = zkPath+"/" + MigrationConstants.PoolNames.NewPool.toString
// verify the format of the path (zkPath, zkClient)
assert(zkClient.get().exists(zkPath, false) != null)
assert(zkClient.get().exists(oldPoolPath, false) != null)
assert(zkClient.get().exists(newPoolPath, false) != null)
// create client for old and new pool
val oldClient = Memcached.client
.configured(Memcached.param.EjectFailedHost(false))
.newRichClient("twcache!"+zkHosts+"!"+oldPoolPath)
val newClient = Memcached.client
.configured(Memcached.param.EjectFailedHost(false))
.newRichClient("twcache!"+zkHosts+"!"+newPoolPath)
val migrationStatsReceiver = ClientStatsReceiver.scope("migrationclient")
// create MigrationClient, by oldClient newClient, (zkPath, zkClient)
new MigrationClient(oldClient, newClient, zkPath, zkClient, migrationStatsReceiver)
}
}
| lukiano/finagle | finagle-memcached/src/main/scala/com/twitter/finagle/memcached/migration/MigrationClient.scala | Scala | apache-2.0 | 10,345 |
package com.alexitc.coinalerts.modules
import com.alexitc.coinalerts.config._
import com.google.inject.AbstractModule
class ConfigModule extends AbstractModule {
override def configure(): Unit = {
bind(classOf[FixedPriceAlertConfig]).to(classOf[PlayFixedPriceAlertConfig])
bind(classOf[FixedPriceAlertsTaskConfig]).to(classOf[PlayFixedPriceAlertsTaskConfig])
bind(classOf[ExchangeCurrencySeederTaskConfig]).to(classOf[PlayExchangeCurrencySeederTaskConfig])
bind(classOf[AppConfig]).to(classOf[PlayAppConfig])
bind(classOf[JWTConfig]).to(classOf[PlayJWTConfig])
bind(classOf[MailgunConfig]).to(classOf[PlayMailgunConfig])
bind(classOf[ReCaptchaConfig]).to(classOf[PlayReCaptchaSecretKey])
}
}
| AlexITC/crypto-coin-alerts | alerts-server/app/com/alexitc/coinalerts/modules/ConfigModule.scala | Scala | gpl-3.0 | 727 |
package systems.opalia.commons.identifier
import java.nio.ByteBuffer
import java.util.UUID
import scala.util.Try
import systems.opalia.commons.application.SystemProperty
import systems.opalia.commons.codec.Hex
class UniversallyUniqueId private(protected val data: Vector[Byte])
extends Identifier[Byte] {
protected val string =
Hex.encode(data.slice(0, 4)) + "-" +
Hex.encode(data.slice(4, 6)) + "-" +
Hex.encode(data.slice(6, 8)) + "-" +
Hex.encode(data.slice(8, 10)) + "-" +
Hex.encode(data.slice(10, 16))
}
object UniversallyUniqueId
extends IdentifierCompanion[Byte, UniversallyUniqueId] {
def isValid(that: String): Boolean = {
val parts =
that.split("-")
if (parts.length != 5)
false
else
parts
.zip(List(4, 2, 2, 2, 6))
.forall(part => Hex.isValid(part._1) && part._1.length == part._2 * 2)
}
def isValid(that: Seq[Byte]): Boolean =
that.length == length
def length: Int =
16
def getNew: UniversallyUniqueId = {
val x =
UUID.randomUUID()
val bytes =
ByteBuffer.allocate(length)
.putLong(x.getMostSignificantBits) // 8 bytes
.putLong(x.getLeastSignificantBits) // 8 bytes
new UniversallyUniqueId(bytes.array.toVector)
}
def getFrom(that: String): UniversallyUniqueId =
getFromOpt(that)
.getOrElse(throw new IllegalArgumentException(s"Cannot generate UUID from: $that"))
def getFrom(that: Seq[Byte]): UniversallyUniqueId =
getFromOpt(that)
.getOrElse(throw new IllegalArgumentException(s"Cannot generate UUID from: $that"))
def getFromOpt(that: String): Option[UniversallyUniqueId] =
if (UniversallyUniqueId.isValid(that))
Try(UUID.fromString(that)).toOption.map((x) => {
val bytes =
ByteBuffer.allocate(length)
.putLong(x.getMostSignificantBits) // 8 bytes
.putLong(x.getLeastSignificantBits) // 8 bytes
new UniversallyUniqueId(bytes.array.toVector)
})
else
None
def getFromOpt(that: Seq[Byte]): Option[UniversallyUniqueId] =
if (UniversallyUniqueId.isValid(that))
Some(new UniversallyUniqueId(that.toVector))
else
None
def getFromName(that: String): UniversallyUniqueId =
getFromName(that.getBytes(SystemProperty.defaultCharset))
def getFromName(that: Seq[Byte]): UniversallyUniqueId = {
val x =
UUID.nameUUIDFromBytes(that.toArray)
val bytes =
ByteBuffer.allocate(length)
.putLong(x.getMostSignificantBits) // 8 bytes
.putLong(x.getLeastSignificantBits) // 8 bytes
new UniversallyUniqueId(bytes.array.toVector)
}
}
| OpaliaSystems/commons | src/main/scala/systems/opalia/commons/identifier/UniversallyUniqueId.scala | Scala | apache-2.0 | 2,662 |
// scalac: -Xsource:3.0
//
object Foo {
List(1,2,3).toSet()
class A[T](val x: T)
new A
import java.text.SimpleDateFormat
val sdf = new SimpleDateFormat("yyyyMMdd-HH0000")
sdf.format()
}
| martijnhoekstra/scala | test/files/neg/t8035-removed.scala | Scala | apache-2.0 | 200 |
package it.unibo.drescue.localModel
import scalafx.beans.property.StringProperty
/**
* A class representing an entry of the manage rescues view rescue teams list
*
* @param teamID_ the team ID
* @param teamName_ the team name
* @param phoneNumber_ the team phone number
* @param availability_ the team availability
* @param cpID_ the civil protection ID
* @param alertID_ the alert ID
*/
class EnrolledTeamInfo(teamID_ : String,
teamName_ : String,
phoneNumber_ : String,
availability_ : Boolean,
cpID_ : String,
alertID_ : String) {
val teamID = new StringProperty(this, "teamID", teamID_)
val teamName = new StringProperty(this, "teamName", teamName_)
val phoneNumber = new StringProperty(this, "phoneNumber", phoneNumber_)
val availability = new StringProperty(this, "availability", availability_.toString)
val cpID = new StringProperty(this, "cpID", cpID_)
val alertID = new StringProperty(this, "alertID", alertID_)
}
| SofiaRosetti/S3-16-d-rescue | civilprotection/src/main/scala/it/unibo/drescue/localModel/EnrolledTeamInfo.scala | Scala | gpl-3.0 | 1,096 |
/*
* Copyright (C) 2010 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core.serializer.converter
import collection.mutable
import com.thoughtworks.xstream.XStream
import collection.mutable.ListBuffer
trait Factory {
private val pool = new mutable.Stack[T]
private val _instantiated = ListBuffer.empty[T]
type T <: {
def xStream: XStream
def clean
}
def make: T
def initialize(t: T): T = t
def instantiated = synchronized(_instantiated.toList)
def borrow: T = synchronized {
if (!pool.isEmpty) pool.pop
else {
val t = initialize(make)
_instantiated += t
t
}
}
def release(serial: T) = synchronized {
try serial.clean
finally pool.push(serial)
}
def exec[A](f: T ⇒ A): A = {
val o = borrow
try f(o)
finally release(o)
}
}
| ISCPIF/PSEExperiments | openmole-src/openmole/core/org.openmole.core.serializer/src/main/scala/org/openmole/core/serializer/converter/Factory.scala | Scala | agpl-3.0 | 1,482 |
package demo
package pages
import demo.components.LeftNavPage
import demo.routes.{LeftRoute, ReactPopoverRouteModule}
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import japgolly.scalajs.react.extra.router.RouterCtl
object ReactPopoverPage {
case class Backend($ : BackendScope[Props, _]) {
def render(P: Props): VdomElement = {
LeftNavPage(ReactPopoverRouteModule.menu, P.selectedPage, P.ctrl)
}
}
val component = ScalaComponent
.builder[Props]("ReactPopOverPage")
.renderBackend[Backend]
.build
case class Props(selectedPage: LeftRoute, ctrl: RouterCtl[LeftRoute])
def apply(selectedPage: LeftRoute, ctrl: RouterCtl[LeftRoute]) =
component(Props(selectedPage, ctrl))
}
| rleibman/scalajs-react-components | demo/src/main/scala/demo/pages/ReactPopoverPage.scala | Scala | apache-2.0 | 749 |
package service
import play.api.db.slick.Config.driver.simple._
import scala.slick.lifted.Tag
import models.Post
import models.Comment
import javax.inject.Inject
import models.daos.UserDAO
import org.joda.time.DateTime
import java.util.UUID
trait PostService {
def findAll: List[Post]
def findAllPublished(inTitle: Option[String]): List[Post]
def findAllPublished(maxResults: Int, startIndex: Int): List[Post]
def countAllPublished: Int
def findAllUnpublished: List[Post]
def findSinglePost(date: DateTime, title: String): List[Post]
def find(postId: UUID): Post
def insert(post: Post)
def update(post: Post)
def delete(postId: UUID)
def deleteAll
def addComment(postId: UUID, comment: Comment)
}
| sne11ius/playlog | app/service/PostService.scala | Scala | gpl-3.0 | 757 |
package net.resonious.sburb.game
import java.io.File
import java.io.FileInputStream
import java.io.FileNotFoundException
import java.io.FileOutputStream
import java.io.IOException
import java.io.ObjectInputStream
import java.io.ObjectOutputStream
import java.lang.reflect.Field
import java.util.Scanner
import java.io.FilenameFilter
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
import scala.util.Random
import scala.util.{Success, Failure}
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.annotation.meta.param
import SburbGame._
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.server.MinecraftServer
import net.resonious.sburb.Sburb
import net.resonious.sburb.abstracts.SburbException
import net.resonious.sburb.abstracts.Vector3
import net.resonious.sburb.game.grist.Grist
import net.resonious.sburb.Structure
import net.minecraft.world.World
import net.minecraft.item.ItemStack
import net.resonious.sburb.items.SburbDisc
import net.resonious.sburb.commands.SburbCommand.PlayerWithChat
import com.xcompwiz.mystcraft.api.impl.InternalAPI
class AlreadyHasServerException(plr: SburbGame.PlayerEntry) extends SburbException(
plr.name + " already has a server player: " + plr.server
) {
def playerEntry = plr
}
class AlreadyHasClientException(plr: SburbGame.PlayerEntry) extends SburbException(
plr.name + " already has a client player: " + plr.client
) {
def playerEntry = plr
}
class DifferentGameException(c: SburbGame.PlayerEntry, s: SburbGame.PlayerEntry) extends SburbException(
c.name + " and " + s.name + " are in different Sburb games."
)
class NotInGameException(p1: SburbProperties, p2: SburbProperties) extends SburbException(
{
var msgs = new ArrayBuffer[String]
val addToMsg = (p: SburbProperties) =>
msgs += p.player.getDisplayName+" is not in an SBURB game"
if (!p1.hasGame)
addToMsg(p1)
if (!p2.hasGame)
addToMsg(p2)
msgs mkString " | "
}
)
class HouseDoesNotExistException(houseName: String) extends SburbException(
"There is no house file "+houseName+".sst in the houses directory"
)
object SburbGame {
val rand = new Random
final val builtInHouseNames = Array("amber", "kyle", "neokyle", "r1", "ryan", "travis")
// Because writing `+ ".sburb"` is simply too much...
implicit class SburbFileString(str: String) {
def sburb() = str + ".sburb"
}
// Load an SBURB game from a file
def load(param: Any) = {
val fileName = param match {
case f: File => f.getPath
case gid: String => gid.sburb
case _ => throw new IllegalArgumentException("Can't load sburb game from " + param)
}
try {
var fileIn = new FileInputStream(fileName)
var in = new ObjectInputStream(fileIn)
in.readObject().asInstanceOf[SburbGame].onLoad()
} catch {
case e: FileNotFoundException => {
Sburb logWarning "Couldn't find "+fileName+"! Clearing SBURB data."
new SburbGame
}
case e: Exception => {
Sburb logError "Corrupt sburb file: "+e.getMessage
new File(fileName).delete()
null
}
}
}
// This is OLD SHIT.
def readHouseData(games: Iterable[SburbGame]): Unit = {
throw new SburbException("NO MORE HOUSES.DAT!")
}
// The place to throw people who aren't playing SBURB.
// I don't think this is even used.
var defaultSpawn = new Vector3[Int]
// Every house name.
// This either...
val allHouseNames = new ArrayBuffer[String]
// This structure contains all sburb-related state that used to be a part of SburbProperties
@SerialVersionUID(-4970160673116206907L)
class PlayerEntry(n:String = "", h:PlayerHouse = null) extends Serializable {
@transient private var _game: SburbGame = null
def game = _game
def game_=(g:SburbGame) =
if (_game != null)
throw new SburbException("Cannot assign game to PlayerEntry!")
else _game = g
def name = n
var server, client = ""
var mediumId = 0
var mediumColor: String = null
var mediumPointOfInterest: Vector3[Int] = null
var mediumCatacombsThemes: ArrayBuffer[Int] = new ArrayBuffer[Int]
// Accessed by Medium.scala
var lastPortalSpot: Vector3[Int] = null
// Assigned by teleporting to medium, accessed in SburbProperties#onJoin.
// This is in case the player crashes while house is generating.
var spawnPointDirty = false
var houseCurrentlyBeingMoved = false
// Used by the homestuck command to know where it's at.
var houseCurrentlyBeingGenerated = false
// Also for homestuck command, so that relogging players can get their disc
var needsSburbDisc = false
val grist = new HashMap[Grist.Value, Long]
val house: PlayerHouse = h
final def hasServer = !server.isEmpty
final def hasClient = !client.isEmpty
final def serverEntry = game entryOf server
final def clientEntry = game entryOf client
final def tryServerEntry = game tryEntryOf server
final def tryClientEntry = game tryEntryOf client
final def eachServer(f: (PlayerEntry) => Unit, stopAtName: String, doStop: Boolean): Unit = {
if (name.equalsIgnoreCase(stopAtName) && doStop) return
tryServerEntry match {
case Some(s) => {
f(s)
s.eachServer(f, stopAtName, true)
}
case None => return
}
}
final def eachServer(f: (PlayerEntry) => Unit): Unit = {
eachServer(f, name, false)
}
final def eachClient(f: (PlayerEntry) => Unit, stopAtName: String, doStop: Boolean): Unit = {
if (name.equalsIgnoreCase(stopAtName) && doStop) return
tryClientEntry match {
case Some(s) => {
f(s)
s.eachClient(f, stopAtName, true)
}
case None => return
}
}
final def eachClient(f: (PlayerEntry) => Unit): Unit = {
eachClient(f, name, false)
}
// Gets the entities of the client / server players if they are online.
def serverPlayer =
if (hasServer)
Sburb.playerOfName(server)
else null
def clientPlayer =
if (hasClient)
Sburb.playerOfName(client)
else null
// This should be called before Sburb data is cleared so the house can be returned.
// ===
// At this point I think it doesn't matter TOO much if there are duplicate houses
def beforeClear() = {
// SburbGame.availableHouses += house
// Sburb log "Returned house "+house.name
}
private def str(s: String) = if (s.isEmpty) "---" else s
override def toString() = house.name+": "+str(server)+" -> "+str(name)+" -> "+str(client)
}
class PlayerHouse(_name:String, @(transient @param) world: World) extends Serializable {
var _spawn: Vector3[Int] = new Vector3[Int]
var minX: Int = 0
var maxX: Int = 0
var minZ: Int = 0
var maxZ: Int = 0
var minY: Int = 0
var centerY: Int = 0
// Flag for SburbServerMode to know when to refetch spawn and dimension
var wasMoved = false
@transient lazy val rand = new Random
def genTestCoord: Int = {
// NOTE Random#nextInt gives from 0 to max, and we want some negatives too.
val r = rand.nextInt(10000)
r - r/2
}
final def placeAt(struct: Structure, world: World, point: Vector3[Int]) = {
minX = point.x - struct.centerOffset.x
maxX = point.x + struct.centerOffset.x
minZ = point.z - struct.centerOffset.z
maxZ = point.z + struct.centerOffset.z
centerY = struct.centerOffset.y
minY = point.y - struct.centerOffset.y
_spawn.x = minX + struct.spawnPoint.x
_spawn.y = minY + struct.spawnPoint.y
_spawn.z = minZ + struct.spawnPoint.z
// TODO this is super lame, but quicker than fixing the structure file...
if (_name == "kyle") _spawn.y += 1
// Some houses are small and not comfortable for the server player to move
// around within the boundary.
val minSize = 20
val halfMinSize = minSize / 2
val xDif = maxX - minX
val halfXDif = xDif / 2
if (xDif < minSize) {
maxX += halfMinSize - halfXDif
minX -= halfMinSize - halfXDif
}
val zDif = maxZ - minZ
val halfZDif = zDif / 2
if (zDif < minSize) {
maxZ += halfMinSize - halfZDif
minZ -= halfMinSize - halfZDif
}
Sburb log "Placing structure..."
struct.placeAt(world, point, false)
Sburb log "Done."
}
final def placeIntoWorld(struct: Structure, world: World, callback: (Vector3[Int]) => Unit, takingTooLong: (Int) => Unit): Unit =
placeIntoWorld(struct, world, 1, callback, takingTooLong)
final def placeIntoWorld(struct: Structure, world: World, callback: (Vector3[Int]) => Unit): Unit =
placeIntoWorld(struct, world, 1, callback, null)
final def placeIntoWorld(
struct: Structure,
world: World,
tryCount: Int,
callback: (Vector3[Int]) => Unit,
takingTooLong: (Int) => Unit
): Unit = {
// Right here... Place the house.
val testPoint = new Vector3[Int](genTestCoord, 150, genTestCoord)
val radius = 500
Sburb log "Checking ["+testPoint.x+", "+testPoint.y+", "+testPoint.z+"] with a "+radius+" block radius for house spawn point."
val acceptWater = InternalAPI.dimension.isMystcraftAge(world.provider.dimensionId)
struct.findReasonableSpawnPoint(world, testPoint, radius, acceptWater) onceDone {
case Some(point) => {
Sburb log "Found a spot! ["+point.x+", "+point.y+", "+point.z+"]"
placeAt(struct, world, point)
if (callback != null) callback(point)
}
case None => {
Sburb log "Try #"+tryCount+" - couldn't find any good spot."
if (takingTooLong != null) takingTooLong(tryCount)
if (tryCount >= 2) {
if (_whenFailedToPlace == null)
throw new SburbException("Tried 3 TIMES TO PLACE A DAMN HOUSE. BRUH.")
else {
_whenFailedToPlace(tryCount)
}
}
else
placeIntoWorld(struct, world, tryCount + 1, callback, takingTooLong)
}
}
}
def load() = {
try {
val struct = Structure.load("houses/"+_name+".sst")
placeIntoWorld(struct, world, {
point =>
// Sburb log "PLACED HOUSE"
if (_onceLoaded != null) _onceLoaded(point)
// else Sburb log "AND HAD NO CALLBACK!!!!!"
},
{ i => if (_whenTakingAwhile != null) _whenTakingAwhile(i) })
} catch {
case e: IOException => {
throw new HouseDoesNotExistException(_name)
}
}
}
@transient var _onceLoaded: (Vector3[Int]) => Unit = null
def onceLoaded(callback: (Vector3[Int]) => Unit) = _onceLoaded = callback
// This is kind of a hack so that you can optionally inform the player that
// the house is in fact still being generated...
@transient var _whenTakingAwhile: (Int) => Unit = null
def whenTakingAwhile(callback: (Int) => Unit) = _whenTakingAwhile = callback
// Just as hacky as the previous callback, this gets called when the thing
// fails to place entirely.
@transient var _whenFailedToPlace: (Int) => Unit = null
def whenFailedToPlace(callback: (Int) => Unit) = _whenFailedToPlace = callback
// So this now means the file name.
var name = _name
def spawn: Vector3[Int] = _spawn
@transient lazy val maxFields = getClass.getDeclaredFields filter {
_.getName contains "max" }
@transient lazy val minFields = getClass.getDeclaredFields filter {
_.getName contains "min" }
// Returns a string indicating which coordinate is out of bounds, and in
// which direction. i.e. "x>" if pos.x is greater than maxX
def outOfBounds(pos: Vector3[Double]): List[Symbol] = {
def findField(fields: Array[Field], symb: Symbol) = {
(fields find { f=>
f.getName endsWith symb.toString.toUpperCase()(1)+""
}).get
}
pos foreach { (s:Symbol, v:Double) =>
if (s != 'y) {
val max = findField(maxFields, s)
val min = findField(minFields, s)
max setAccessible true
min setAccessible true
if (v > max.getInt(this))
return s :: '> :: Nil
if (v < min.getInt(this))
return s :: '< :: Nil
}
}
Nil
}
}
}
class SburbGame(gid: String = "") extends Serializable {
@transient val rand = new Random
@transient var currentlySaving = false
var players = new HashMap[String, PlayerEntry]
def takenHouseNames = players.values map { _.house.name }
def onLoad() = {
players foreach { kv =>
val plr = kv._2
plr.game = this
checkPlayerGrist(plr)
}
this
}
def mediumColors() = players.values
.map(_.mediumColor)
.filter(_ != null)
// Makes sure the player's grist has all the correct grist types.
def checkPlayerGrist(plr: PlayerEntry) = {
Sburb log "Grist for "+plr.name+": "+plr.grist.toString
Grist.values filterNot plr.grist.keySet foreach {
plr.grist(_) = 0L
}
}
// If this is a new game; assign it a new game ID
var gameId = if(gid.isEmpty) {
var str = ""
rand.alphanumeric take 10 foreach { str += _ }
str
} else gid
def randomHouseName(): String = {
val houseFiles =
(new File("houses")).listFiles(new FilenameFilter {
override def accept(_dir: File, name: String): Boolean = name contains ".sst"
}) match {
case null => Array[String]()
case files => files.map(_.getName.replace(".sst", ""))
}
val houseNames: Array[String] = houseFiles ++ SburbGame.builtInHouseNames
houseNames.filterNot(n => takenHouseNames.exists(_ == n)) match {
// If all houses are taken, then we have no choice...
case Array() => { houseNames(SburbGame.rand.nextInt(houseNames.length)) }
// Otherwise don't produce duplicates.
case availableHouses => { availableHouses(SburbGame.rand.nextInt(availableHouses.length)) }
}
}
// Assign a client-server relationship. Players will be created if they don't exist.
// Also doesn't care whether or not these names are real players, so don't call this.
private def assignNames(client: String, server: String, force: Boolean = false) {
def assure(s: String) = {
entryOf(s) // Used to add players if not existent
}
val clientPlr = assure(client)
val serverPlr = assure(server)
if (!force) {
if (clientPlr.hasServer) throw new AlreadyHasServerException(clientPlr)
if (serverPlr.hasClient) throw new AlreadyHasClientException(serverPlr)
}
if (client equalsIgnoreCase server)
Sburb logWarning "Assigning "+client+" as his/her own server...."
clientPlr.server = server
serverPlr.client = client
save()
}
// This'll assign real players as client->server if it's a valid combo
def assign(client: EntityPlayer, server: EntityPlayer, force: Boolean = false) {
val clientProps = SburbProperties of client
val serverProps = SburbProperties of server
if (clientProps.hasGame && serverProps.hasGame) {
if (clientProps.gameId != serverProps.gameId)
// Client/server can never be assigned to players of
// separate games.
// TODO perhaps fuse the games, though?
// But... We don't actually have support for multiple games atm.
throw new DifferentGameException(entryOf(client), entryOf(server))
} else {
// Since houses must be assigned first, both players must be playing
// in order to become client / server
throw new NotInGameException(clientProps, serverProps)
}
assignNames(clientProps.playerName, serverProps.playerName, force)
}
// Add a new player with no associations to the game
def newPlayer(plr: Any, wantedHouse: Any, logError: Boolean = true):Boolean = {
var entityPlr: EntityPlayer = null
val name = plr match {
case ep: EntityPlayer => {
entityPlr = ep
entityPlr.getGameProfile.getName
}
case props: SburbProperties => {
entityPlr = props.player
entityPlr.getGameProfile.getName
}
case str: String => {
entityPlr = Sburb.playerOfName(str)
str
}
}
if (players contains name) {
if(logError) Sburb logError "Game "+gameId+" already has an entry for "+name
return false
}
if (entityPlr == null) {
if(logError) Sburb logError "Player "+name+" is not logged in"
return false
}
val house = wantedHouse match {
case s: String => new PlayerHouse(s, entityPlr.worldObj)
case h: PlayerHouse => h
}
house.load()
val newEntry = new PlayerEntry(name, house)
newEntry.game = this
players.put(name, newEntry)
checkPlayerGrist(newEntry)
if (entityPlr != null) {
(SburbProperties of entityPlr).game = this
}
true
}
// Get player entry of the given player, or throw exception if it isn't there
final def entryOf(plr: Any) = {
val aname = plr match {
case entityPlr: EntityPlayer => entityPlr.getGameProfile.getName
case str: String => str
}
// For testing once again...
val name = if (("Player\\\\d+".r findAllIn aname).length > 0) "Player" else aname
// TODO throw or return null?
try {
players(name)
} catch {
case e: NoSuchElementException => throw new SburbException("There is no entry for "+name+" in game "+gameId)
}
}
final def tryEntryOf(plr: Any): Option[PlayerEntry] = {
try Some(entryOf(plr)) catch {
case e: SburbException => None
}
}
// Of course, save to the appropriate file
def save() = if (!currentlySaving) {
currentlySaving = true
val gameData = this
val saving = Future {
var fileOut = new FileOutputStream(gameData.gameId.sburb)
var out = new ObjectOutputStream(fileOut)
out.writeObject(gameData)
out.close()
}
saving onComplete {
case Success(_) => gameData.currentlySaving = false
case Failure(e) => {
Sburb logError "ERROR WHILE SAVING: "+e.getMessage
gameData.currentlySaving = false
}
}
}
}
| Resonious/mcsburb | src/main/scala/net/resonious/sburb/game/SburbGame.scala | Scala | mit | 18,424 |
// Copyright 2011-2012 James Michael Callahan
// See LICENSE-2.0 file for licensing information.
package org.scalagfx.math2
import scala.math.{ abs, sin, cos }
import java.nio.{ FloatBuffer, DoubleBuffer }
/** An immutable coordinate frame defined by three basis vectors and an origin. */
trait FrameOps[@specialized(Double) Elem, Vec <: Vector, Pos <: Position, Repr <: Frame] {
/** A method that should be called from every well-designed equals method that is open
* to be overridden in a subclass.
*/
def canEqual(that: Any): Boolean
/** The component-wise comparison of whether the given coordinate frame in within a given
* epsilon of this coordinate frame.
*/
def equiv(that: Repr, epsilon: Double): Boolean
/** The component-wise comparison of whether the given coordinate frame is within a type
* specific epsilon of this coordinate frame.
*/
def equiv(that: Repr): Boolean
/** Transform a point in THIS coordinate frame to the identity (world space) coordinate
* frame.
*
* Equivalent to post-multiplying a column vector by the basis matrix and offset by
* origin.
*/
def xform(p: Pos): Pos
/** Transform a point in THIS coordinate frame to the identity (world space) coordinate
* frame.
*
* Equivalent to post-multiplying a column vector by the basis matrix and offset by
* origin.
*/
def *(p: Pos): Pos
/** Transform a direction in THIS coordinate frame to the identity (world space) coordinate
* frame.
*
* Equivalent to post-multiplying a column vector by the basis matrix.
*/
def xform(v: Vec): Vec
/** Transform a direction in THIS coordinate frame to the identity (world space) coordinate
* frame.
*
* Equivalent to post-multiplying a column vector by the basis matrix.
*/
def *(v: Vec): Vec
/** Concatenate (multiply) a coordinate frame (on the right) with this coordinate frame. */
def concat(that: Repr): Repr
/** Concatenate (multiply) a coordinate frame (on the right) with this coordinate frame. */
def *(that: Repr): Repr
/** Find the inverse (if possible) of this coordinate frame.
*
* The inverse being that coordinate frame that transforms points from the world (identity)
* coordinate frame to this one.
*/
def inverse(): Option[Repr]
/** Create a coordinate frame in which two virtual matrix rows have been exchanged.
*
* @param i1 The index of the row to swap.
* @param i2 The index of the row to swap.
*/
def rowOpI(i1: Int, i2: Int): Repr
/** Create a coordinate frame in which a given virtual matrix row is scaled by a
* constant factor.
*
* @param i1 The index of the row to scale.
* @param scale The scaling factor.
*/
def rowOpII(i: Int, scale: Elem): Repr
/** Create a coordinate frame in which a multiple of one virtual matrix row is summed
* with another row.
*
* @param i1 The index of the row to change.
* @param i2 The index of the row to scale and sum.
* @param scale The scaling factor.
*/
def rowOpIII(i1: Int, i2: Int, scale: Elem): Repr
/** Tests whether the given predicate holds true for all components of this coordinate
* frame.
*/
def forall(p: (Elem) => Boolean): Boolean
/** Tests whether the given predicate holds true for all of the corresponding components
* of this and the given coordinate frame.
*/
def forall(that: Repr)(p: (Elem, Elem) => Boolean): Boolean
/** Tests whether the given predicate holds true for any components of this coordinate
* frame.
*/
def forany(p: (Elem) => Boolean): Boolean
/** Tests whether the given predicate holds true for any of the corresponding components
* of this and the given coordinate frame.
*/
def forany(that: Repr)(p: (Elem, Elem) => Boolean): Boolean
/** Applies a function to all components of this coordinate frame.
*
* @param f The function that is applied for its side-effect to every component.
*/
def foreach(f: (Elem) => Unit): Unit
/** Builds a new coordinate frame by applying a function to each component of this
* coordinate frame.
*/
def map(f: (Elem) => Elem): Repr
/** Convert to a nested list (basis vectors followed by origin) of the corresponding 4x4
* matrix.
*/
def toList: List[List[Elem]]
/** Convert to a nested array (basis vectors followed by origin) of the corresponding 4x4
* matrix.
*/
def toArray: Array[Array[Elem]]
/** Add the component values (as Floats, basis vectors followed by origin) of the corresponding 4x4
* matrix starting at the current position to given native array.
*/
def putNative(buf: FloatBuffer)
/** Add the component values (as Floats, basis vectors followed by origin) of the corresponding 4x4
* matrix starting at the current position to given native array.
*/
def >>>(buf: FloatBuffer)
/** Add the component values (as Doubles, basis vectors followed by origin) of the corresponding 4x4
* matrix starting at the current position to given native array.
*/
def putNative(buf: DoubleBuffer)
/** Add the component values (as Doubles, basis vectors followed by origin) of the corresponding 4x4
* matrix starting at the current position to given native array.
*/
def >>>(buf: DoubleBuffer)
}
| JimCallahan/Graphics | src/org/scalagfx/math2/FrameOps.scala | Scala | apache-2.0 | 5,350 |
package sample.usecase
import org.springframework.cache.annotation.Cacheable
import org.springframework.stereotype.Service
import sample.model.master._
/**
* サービスマスタドメインに対する社内ユースケース処理。
*/
@Service
class MasterAdminService extends ServiceSupport {
/** 社員を取得します。 */
@Cacheable(Array("MasterAdminService.getStaff"))
def getStaff(id: String): Option[Staff] =
tx(implicit session => Staff.get(id))
/** 社員権限を取得します。 */
@Cacheable(Array("MasterAdminService.findStaffAuthority"))
def findStaffAuthority(staffId: String): List[StaffAuthority] =
tx(implicit session => StaffAuthority.findByStaffId(staffId))
def registerHoliday(p: RegHoliday): Unit =
audit.audit("休日情報を登録する",
tx(implicit session => Holiday.register(p)))
}
| jkazama/sample-boot-scala | src/main/scala/sample/usecase/MasterAdminService.scala | Scala | mit | 863 |
/*
* Copyright (C) 2015 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.tool.crypto
import java.io.{ File, FileInputStream, FileOutputStream }
import java.math.BigInteger
import java.security.{ KeyPairGenerator, SecureRandom }
import java.util.Date
import org.bouncycastle.asn1.pkcs.PrivateKeyInfo
import org.bouncycastle.asn1.x500.X500Name
import org.bouncycastle.asn1.x509.{ Extension, GeneralName, GeneralNames, SubjectPublicKeyInfo }
import org.bouncycastle.cert.X509v3CertificateBuilder
import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter
import org.bouncycastle.crypto.util.PrivateKeyFactory
import org.bouncycastle.operator.bc.BcRSAContentSignerBuilder
import org.bouncycastle.operator.{ DefaultDigestAlgorithmIdentifierFinder, DefaultSignatureAlgorithmIdentifierFinder }
import org.bouncycastle.jce.provider.BouncyCastleProvider
object Certificate {
//Security.addProvider(new BouncyCastleProvider())
def loadOrGenerate(file: File, ksPassword: String, hostName: Option[String] = Some("OpenMOLE")) = {
val ks = java.security.KeyStore.getInstance(java.security.KeyStore.getDefaultType)
if (file.exists()) {
val fis = new FileInputStream(file)
try ks.load(fis, ksPassword.toCharArray)
finally fis.close
}
else {
ks.load(null, "".toCharArray)
val kpg = KeyPairGenerator.getInstance("RSA")
kpg.initialize(2048, new SecureRandom())
val kp = kpg.generateKeyPair()
val serialNumber = BigInteger.valueOf(System.currentTimeMillis())
val issuerDN = new X500Name(s"CN=${hostName getOrElse "cn"}, O=o, L=L, ST=il, C= c")
val subjectDN = new X500Name(s"CN=${hostName getOrElse "cn"}, O=o, L=L, ST=il, C= c")
val noBefore = new Date(System.currentTimeMillis() - 1000l * 60 * 60 * 24)
val noAfter = new Date(System.currentTimeMillis() + 1000l * 60 * 60 * 24 * 365 * 1000)
val subjectPublicInfo = SubjectPublicKeyInfo.getInstance(kp.getPublic.getEncoded)
val certificateBuilder = new X509v3CertificateBuilder(issuerDN, serialNumber, noBefore, noAfter, subjectDN, subjectPublicInfo)
val altNameExtension = Extension.subjectAlternativeName
val subjectAltName = new GeneralNames(new GeneralName(GeneralName.rfc822Name, "127.0.0.1"))
certificateBuilder.addExtension(altNameExtension, false, subjectAltName)
val sigAlgId = new DefaultSignatureAlgorithmIdentifierFinder().find("SHA256withRSA")
val digAlgId = new DefaultDigestAlgorithmIdentifierFinder().find(sigAlgId)
val privateKeyInfo = PrivateKeyInfo.getInstance(kp.getPrivate.getEncoded)
val signer = new BcRSAContentSignerBuilder(sigAlgId, digAlgId).build(PrivateKeyFactory.createKey(privateKeyInfo))
val holder = certificateBuilder.build(signer)
val cert = new JcaX509CertificateConverter().setProvider(new BouncyCastleProvider()).getCertificate(holder)
ks.setKeyEntry(hostName getOrElse "", kp.getPrivate, ksPassword.toCharArray, Array[java.security.cert.Certificate](cert))
val fos = new FileOutputStream(file)
try ks.store(fos, ksPassword.toCharArray)
finally fos.close
}
ks
}
}
| openmole/openmole | openmole/third-parties/org.openmole.tool.crypto/src/main/scala/org/openmole/tool/crypto/Certificate.scala | Scala | agpl-3.0 | 3,790 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
object CheckUtils {
def reldiff(a: NDArray, b: NDArray): Float = {
val diff = NDArray.sum(NDArray.abs(a - b)).toScalar
val norm = NDArray.sum(NDArray.abs(a)).toScalar
if (diff < Float.MinPositiveValue) diff else diff / norm
}
def reldiff(a: Array[Float], b: Array[Float]): Float = {
val diff =
(a zip b).map { case (aElem, bElem) => Math.abs(aElem - bElem) }.sum
val norm: Float = a.reduce(Math.abs(_) + Math.abs(_))
if (diff < Float.MinPositiveValue) diff else diff / norm
}
}
| dmlc/mxnet | scala-package/core/src/test/scala/org/apache/mxnet/CheckUtils.scala | Scala | apache-2.0 | 1,345 |
package com.twitter.conversions
import scala.collection.{SortedMap, immutable, mutable}
/**
* Implicits for converting [[Map]]s.
*
* @example
* {{{
* import com.twitter.conversions.MapOps._
*
* Map(1 -> "a").mapKeys { _.toString }
* Map(1 -> "a").invert
* Map(1 -> "a", 2 -> "b").filterValues { _ == "b" }
* Map(2 -> "b", 1 -> "a").toSortedMap
* }}}
*/
object MapOps {
implicit class RichMap[K, V](val self: Map[K, V]) extends AnyVal {
def mapKeys[T](func: K => T): Map[T, V] = MapOps.mapKeys(self, func)
def invert: Map[V, Seq[K]] = MapOps.invert(self)
def invertSingleValue: Map[V, K] = MapOps.invertSingleValue(self)
def filterValues(func: V => Boolean): Map[K, V] =
MapOps.filterValues(self, func)
def filterNotValues(func: V => Boolean): Map[K, V] =
MapOps.filterNotValues(self, func)
def filterNotKeys(func: K => Boolean): Map[K, V] =
MapOps.filterNotKeys(self, func)
def toSortedMap(implicit ordering: Ordering[K]): SortedMap[K, V] =
MapOps.toSortedMap(self)
}
/**
* Transforms the keys of the map according to the given `func`.
*
* @param inputMap the map to transform the keys
* @param func the function literal which will be applied to the
* keys of the map
* @return the map with transformed keys and original values
*/
def mapKeys[K, V, T](inputMap: Map[K, V], func: K => T): Map[T, V] = {
for ((k, v) <- inputMap) yield {
func(k) -> v
}
}
/**
* Inverts the map so that the input map's values are the distinct keys and
* the corresponding keys, represented in a sequence, as the values.
*
* @param inputMap the map to invert
* @return the inverted map
*/
def invert[K, V](inputMap: Map[K, V]): Map[V, Seq[K]] = {
val invertedMapWithBuilderValues = mutable.Map.empty[V, mutable.Builder[K, Seq[K]]]
for ((k, v) <- inputMap) {
val valueBuilder = invertedMapWithBuilderValues.getOrElseUpdate(v, Seq.newBuilder[K])
valueBuilder += k
}
val invertedMap = immutable.Map.newBuilder[V, Seq[K]]
for ((k, valueBuilder) <- invertedMapWithBuilderValues) {
invertedMap += (k -> valueBuilder.result())
}
invertedMap.result()
}
/**
* Inverts the map so that every input map value becomes the key and the
* input map key becomes the value.
*
* @param inputMap the map to invert
* @return the inverted map
*/
def invertSingleValue[K, V](inputMap: Map[K, V]): Map[V, K] = {
inputMap map { _.swap }
}
/**
* Filters the pairs in the map which values satisfy the predicate
* represented by `func`.
*
* @param inputMap the map which will be filtered
* @param func the predicate that needs to be satisfied to
* select a key-value pair
* @return the filtered map
*/
def filterValues[K, V](inputMap: Map[K, V], func: V => Boolean): Map[K, V] = {
inputMap filter {
case (_, value) =>
func(value)
}
}
/**
* Filters the pairs in the map which values do NOT satisfy the
* predicate represented by `func`.
*
* @param inputMap the map which will be filtered
* @param func the predicate that needs to be satisfied to NOT
* select a key-value pair
* @return the filtered map
*/
def filterNotValues[K, V](inputMap: Map[K, V], func: V => Boolean): Map[K, V] =
filterValues(inputMap, (v: V) => !func(v))
/**
* Filters the pairs in the map which keys do NOT satisfy the
* predicate represented by `func`.
*
* @param inputMap the map which will be filtered
* @param func the predicate that needs to be satisfied to NOT
* select a key-value pair
* @return the filtered map
*/
def filterNotKeys[K, V](inputMap: Map[K, V], func: K => Boolean): Map[K, V] = {
// use filter instead of filterKeys(deprecated since 2.13.0) for cross-building.
inputMap.filter { case (key, _) => !func(key) }
}
/**
* Sorts the map by the keys and returns a SortedMap.
*
* @param inputMap the map to sort
* @param ordering the order in which to sort the map
* @return a SortedMap
*/
def toSortedMap[K, V](inputMap: Map[K, V])(implicit ordering: Ordering[K]): SortedMap[K, V] = {
SortedMap[K, V](inputMap.toSeq: _*)
}
}
| twitter/util | util-core/src/main/scala/com/twitter/conversions/MapOps.scala | Scala | apache-2.0 | 4,289 |
/*
* Copyright 2012 Simon Kelly
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.google.safebrowsing2.parsers
import scala.util.parsing.combinator._
import scala.util.parsing.input.{ Position, Reader }
import scala.util.parsing.input.CharArrayReader.EofCh
import scala.annotation.tailrec
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
import util.Helpers._
import java.io.EOFException
/**
* @see http://www.scala-lang.org/node/4693
*/
trait ParsersUtil extends Parsers {
lazy val anyElem: Parser[Elem] = elem("anyElem", _ => true)
def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => !(xs contains x))
def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _)
def take(n: Int): Parser[Seq[Elem]] = repN(n, anyElem)
def takeUntil(cond: Parser[Elem]): Parser[Seq[Elem]] = takeUntil(cond, anyElem)
def takeUntil(cond: Parser[Elem], p: Parser[Elem]): Parser[Seq[Elem]] = rep(not(cond) ~> p)
def takeWhile(p: Parser[Elem]): Parser[Seq[Elem]] = rep(p)
}
case class ByteOffsetPosition(offset: Int) extends Position {
final val line = 1
def column = offset + 1
def lineContents: String = ""
}
class ByteReader(val bytes: Array[Byte], override val offset: Int) extends Reader[Byte] {
def this(reader: Reader[_]) = this(reader.source.toString.getBytes, 0)
def this(bytes: Seq[Byte]) = this(bytes.toArray, 0)
def this(str: String) = this(str.getBytes, 0)
def first: Byte = {
if (offset < bytes.length) {
bytes(offset)
} else {
throw new EOFException
}
}
def rest: ByteReader = if (atEnd) this else new ByteReader(bytes, offset + 1)
def pos: Position = ByteOffsetPosition(offset)
def atEnd = offset >= bytes.length
def byteAt(n: Int) = bytes(n)
def length = bytes.length - offset
override def drop(n: Int): ByteReader = new ByteReader(bytes, offset + n)
def take(n: Int): Seq[Byte] = bytes drop offset take n
override def toString = "ByteReader(%d / %d)".format(offset, bytes.length)
}
trait BinaryParsers extends Parsers with ParsersUtil {
type Elem = Byte
protected implicit def readerToByteReader(x: Input): ByteReader = x match {
case br: ByteReader => br
case _ => new ByteReader(x)
}
override def acceptIf(p: Elem => Boolean)(err: Elem => String): Parser[Elem] = Parser { in =>
try {
if (p(in.first)) {
Success(in.first, in.rest)
} else {
Failure(err(in.first), in)
}
} catch {
case e: EOFException => Failure("EOF unexpected", in)
}
}
override def acceptMatch[U](expected: String, f: PartialFunction[Elem, U]): Parser[U] = Parser{ in =>
try {
if (f.isDefinedAt(in.first)) {
Success(f(in.first), in.rest)
} else {
Failure(expected + " expected", in)
}
} catch {
case e: EOFException => Failure("EOF unexpected: " + expected + " expected", in)
}
}
def toInt(bytes: Seq[Byte]): Int = bytes.foldLeft(0)((x, b) => (x << 8) + (b & 0xFF))
def toLong(bytes: Seq[Byte]): Long = bytes.foldLeft(0L)((x, b) => (x << 8) + (b & 0xFF))
def toString(bytes: Seq[Byte]): String = new String(bytes.toArray)
def toHex(bytes: Seq[Byte]): String = bytes2Hex(bytes)
def asciiInt(byte: Seq[Byte]): Int = toString(byte).toInt
lazy val byte: Parser[Byte] = anyElem
lazy val u1: Parser[Int] = byte ^^ (_ & 0xFF)
lazy val u2: Parser[Int] = bytes(2) ^^ toInt
lazy val u4: Parser[Int] = bytes(4) ^^ toInt
lazy val u4f: Parser[Float] = u4 ^^ intBitsToFloat
lazy val u8: Parser[Long] = bytes(8) ^^ toLong
lazy val u8d: Parser[Double] = u8 ^^ longBitsToDouble
def bytes(n: Int): Parser[Seq[Byte]] = Parser { in =>
if (n <= in.length) Success(in take n, in drop n)
else Failure("Requested %d bytes but only %d remain".format(n, in.length), in)
}
override def phrase[T](p: Parser[T]): Parser[T] =
super.phrase(p <~ opt(elem(EofCh)))
/** Parse all of character sequence `in` with parser `p`. */
def parseAll[T](p: Parser[T], in: Input): ParseResult[T] =
parse(phrase(p), in)
def parse[T](p: Parser[T], in: Input): ParseResult[T] = p(in)
def parseAll[T](p: Parser[T], in: String): ParseResult[T] = parseAll(p, new ByteReader(in))
} | snopoke/google-safebrowsing2 | src/main/scala/net/google/safebrowsing2/parsers/BinaryParsers.scala | Scala | apache-2.0 | 4,751 |
package phenan.prj
import phenan.prj.exception.InvalidTypeException
trait JMembers {
this: JClassLoader with Syntax with JModules with JErasedTypes with Application =>
trait JMember {
def modifier: JModifier
def declaring: JModule
def isPrivate: Boolean = modifier.check(JModifier.accPrivate)
def isProtected: Boolean = modifier.check(JModifier.accProtected)
def isPublic: Boolean = modifier.check(JModifier.accPublic)
}
case class JField(fieldDef: JFieldDef, fieldType: JType, declaring: JModule) extends JMember {
def modifier: JModifier = fieldDef.mod
def name: String = fieldDef.name
def declaringClass: JClass = fieldDef.declaringClass
}
trait JProcedure extends JMember {
def methodDef: JMethodDef
def env: MetaArgs
def modifier: JModifier = methodDef.mod
def declaringClass: JClass = methodDef.declaringClass
lazy val metaParameters: Map[String, FormalMetaParameter] = methodDef.signature.metaParams.map(param => param.name -> param).toMap
lazy val returnType: JGenericType = JGenericType(methodDef.signature.returnType, env)
lazy val returnBounds: List[JGenericType] = methodDef.signature.returnBounds.map(sig => JGenericType(sig, env))
lazy val parameterTypes: List[JParameter] = methodDef.signature.parameters.map(sig => JParameter(sig, env))
lazy val exceptionTypes: List[JGenericType] = methodDef.signature.throwTypes.map(sig => JGenericType(sig, env))
lazy val activates: List[JGenericType] = methodDef.signature.activates.map(sig => JGenericType(sig, env))
lazy val deactivates: List[JGenericType] = methodDef.signature.deactivates.map(sig => JGenericType(sig, env))
lazy val requires: List[JGenericType] = methodDef.signature.requires.map(sig => JGenericType(sig, env))
lazy val syntax: Option[JSyntax] = methodDef.syntax.map {
case JExpressionSyntaxDef(p, s) => JExpressionSyntax(p, translatePattern(s, Nil, parameterTypes))
case JLiteralSyntaxDef(p, s) => JLiteralSyntax(p, translatePattern(s, Nil, parameterTypes))
case JStatementSyntaxDef(_, _) => ???
}
private def translatePattern(pattern: List[JSyntaxElementDef], result: List[JSyntaxElement], restParameters: List[JParameter]): List[JSyntaxElement] = pattern match {
case (hole: JHoleDef) :: rest => restParameters match {
case param :: ps => translatePattern(rest, result :+ translateHole(hole, param), ps)
case Nil => errorAndReturn("corresponding parameter cannot be found", result)
}
case JOperatorNameDef(name) :: rest => translatePattern(rest, result :+ JOperatorName(name), restParameters)
case (mv: JMetaValueRefDef) :: rest => translateMetaValueRef(mv) match {
case Some(e) => translatePattern(rest, result :+ e, restParameters)
case None => errorAndReturn("meta parameter " + mv.name + " cannot be found", result)
}
case (pred: JPredicateDef) :: rest => translatePattern(rest, result :+ translatePredicate(pred), restParameters)
case Nil if restParameters.isEmpty => result
case Nil => errorAndReturn("corresponding operand cannot be found", result)
}
private def translateHole(elem: JHoleDef, param: JParameter): JSyntaxElement = elem match {
case JOperandDef(p) => JOperand(param, p)
case JOptionalOperandDef(p) => JOptionalOperand(param, p)
case JRepetition0Def(p) => JRepetition0(param, p)
case JRepetition1Def(p) => JRepetition1(param, p)
case JRegexNameDef(name) => JRegexName(name)
}
private def translateMetaValueRef(mv: JMetaValueRefDef): Option[JSyntaxElement] = {
if (env.contains(mv.name)) Some(JMetaName(mv.name, env(mv.name), mv.priority))
else if (metaParameters.contains(mv.name)) {
val mp = metaParameters(mv.name)
Some(JMetaOperand(mv.name, JParameter(mp.metaType, env), mv.priority))
}
else None
}
private def translatePredicate(elem: JPredicateDef): JSyntaxElement = elem match {
case JAndPredicateDef(sig, p) => JAndPredicate(JParameter(sig, env), p)
case JNotPredicateDef(sig, p) => JNotPredicate(JParameter(sig, env), p)
}
}
class JMethod(val methodDef: JMethodDef, val env: MetaArgs, val declaring: JModule) extends JProcedure {
def name: String = methodDef.name
def erasedReturnType: JErasedType = methodDef.erasedReturnType
def erasedParameterTypes: List[JErasedType] = methodDef.erasedParameterTypes
def overrides(that: JMethod): Boolean = {
this.name == that.name && this.erasedReturnType.isSubclassOf(that.erasedReturnType) && this.erasedParameterTypes == that.erasedParameterTypes
}
}
class JConstructor(val methodDef: JMethodDef, val env: MetaArgs, val declaring: JObjectType) extends JProcedure
case class JParameter(signature: JParameterSignature, env: MetaArgs) {
lazy val contexts: List[JGenericType] = signature.contexts.map(sig => JGenericType(sig, env))
lazy val withoutCts: List[JGenericType] = signature.without.map(sig => JGenericType(sig, env))
lazy val genericType: JGenericType = JGenericType(signature.typeSig, env)
lazy val actualGenericType: JGenericType = JGenericType(signature.actualTypeSignature, env)
lazy val scopes: List[JGenericType] = signature.scopes.map(sig => JGenericType(sig, env))
def varArgs: Boolean = signature.varArgs
def defaultArg: Option[String] = signature.defaultArg
}
object JParameter {
def apply(sig: JTypeSignature, env: MetaArgs): JParameter = {
JParameter(JParameterSignature(Nil, Nil, sig, false, None, Nil), env)
}
}
} | csg-tokyo/proteaj2 | src/main/scala/phenan/prj/JMembers.scala | Scala | mit | 5,609 |
/* Copyright 2009-2015 EPFL, Lausanne
*
* Author: Ravi
* Date: 20.11.2013
**/
import leon.lang._
import leon.collection._
object Heaps {
sealed abstract class Heap {
val rank : BigInt = this match {
case Leaf() => 0
case Node(_, l, r) =>
1 + max(l.rank, r.rank)
}
def content : Set[BigInt] = this match {
case Leaf() => Set[BigInt]()
case Node(v,l,r) => l.content ++ Set(v) ++ r.content
}
}
case class Leaf() extends Heap
case class Node(value:BigInt, left: Heap, right: Heap) extends Heap
def max(i1 : BigInt, i2 : BigInt) = if (i1 >= i2) i1 else i2
def hasHeapProperty(h : Heap) : Boolean = h match {
case Leaf() => true
case Node(v, l, r) =>
( l match {
case Leaf() => true
case n@Node(v2,_,_) => v >= v2 && hasHeapProperty(n)
}) &&
( r match {
case Leaf() => true
case n@Node(v2,_,_) => v >= v2 && hasHeapProperty(n)
})
}
def hasLeftistProperty(h: Heap) : Boolean = h match {
case Leaf() => true
case Node(_,l,r) =>
hasLeftistProperty(l) &&
hasLeftistProperty(r) &&
l.rank >= r.rank
}
def heapSize(t: Heap): BigInt = { t match {
case Leaf() => BigInt(0)
case Node(v, l, r) => heapSize(l) + 1 + heapSize(r)
}} ensuring(_ >= 0)
private def merge(h1: Heap, h2: Heap) : Heap = {
require(
hasLeftistProperty(h1) && hasLeftistProperty(h2) &&
hasHeapProperty(h1) && hasHeapProperty(h2)
)
(h1,h2) match {
case (Leaf(), _) => h2
case (_, Leaf()) => h1
case (Node(v1, l1, r1), Node(v2, l2, r2)) =>
if(v1 >= v2)
makeN(v1, l1, merge(r1, h2))
else
makeN(v2, l1, merge(h1, r2)) // FIXME: l1 instead of l2
}
} ensuring { res =>
hasLeftistProperty(res) && hasHeapProperty(res) &&
heapSize(h1) + heapSize(h2) == heapSize(res) &&
h1.content ++ h2.content == res.content
}
private def makeN(value: BigInt, left: Heap, right: Heap) : Heap = {
require(
hasLeftistProperty(left) && hasLeftistProperty(right)
)
if(left.rank >= right.rank)
Node(value, left, right)
else
Node(value, right, left)
} ensuring { res =>
hasLeftistProperty(res) }
def insert(element: BigInt, heap: Heap) : Heap = {
require(hasLeftistProperty(heap) && hasHeapProperty(heap))
merge(Node(element, Leaf(), Leaf()), heap)
} ensuring { res =>
hasLeftistProperty(res) && hasHeapProperty(res) &&
heapSize(res) == heapSize(heap) + 1 &&
res.content == heap.content ++ Set(element)
}
def findMax(h: Heap) : Option[BigInt] = {
h match {
case Node(m,_,_) => Some(m)
case Leaf() => None()
}
}
def removeMax(h: Heap) : Heap = {
require(hasLeftistProperty(h) && hasHeapProperty(h))
h match {
case Node(_,l,r) => merge(l, r)
case l => l
}
} ensuring { res =>
hasLeftistProperty(res) && hasHeapProperty(res)
}
}
| regb/leon | testcases/synt2016/repair/Heap/Heap6.scala | Scala | gpl-3.0 | 2,977 |
/*
Copyright 2016 ScalABM
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.economicsl.agora.markets.auctions
import org.economicsl.agora.markets.tradables.orders.Persistent
import org.economicsl.agora.markets.tradables.orders.ask.AskOrder
import org.economicsl.agora.markets.tradables.orders.bid.BidOrder
/** Trait defining a partial interface for a two-sided, posted price auction.
*
* @tparam A a sub-type of `AskOrder with Persistent`.
* @tparam B a sub-type of `BidOrder with Persistent`.
*/
trait TwoSidedAuctionLike[A <: AskOrder with Persistent, B <: BidOrder with Persistent] {
def cancel(order: A): Option[A]
def cancel(order: B): Option[B]
def place(order: A): Unit
def place(order: B): Unit
}
| EconomicSL/agora | src/main/scala/org/economicsl/agora/markets/auctions/TwoSidedAuctionLike.scala | Scala | apache-2.0 | 1,218 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.optim.parameters
import com.intel.analytics.bigdl.dllib.utils.Engine
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
private[bigdl] object Util {
/** Get square sum of a tensor in parallel, which has better
* performance if tensor is in large size
* @param parameters
* @param parallelism
* @return square sum of the tensor
*/
def getSumsquareInParallel[T](parameters: Tensor[T], parallelism: Int)
(implicit ev: TensorNumeric[T]): Double = {
val gradLength = parameters.nElement()
val taskSize = gradLength / parallelism
val extraTask = gradLength % parallelism
val parallelNum = if (taskSize == 0) extraTask else parallelism
val squares = new Array[Double](parallelNum)
Engine.default.invokeAndWait((0 until parallelNum).map(tid => () => {
val offset = tid * taskSize + math.min(tid, extraTask)
val length = taskSize + (if (tid < extraTask) 1 else 0)
squares(tid) = ev.toType[Double](
parameters.narrow(1, offset + 1, length).sumSquare())
}))
var sum = 0.0
var i = 0
while (i < parallelNum) {
sum += squares(i)
i += 1
}
sum
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/optim/parameters/Util.scala | Scala | apache-2.0 | 1,895 |
package fpinscala.testing
import java.util.concurrent.Executors
import fpinscala.state.RNG
import fpinscala.state.RNG.SimpleRNG
import org.scalatest.prop.PropertyChecks
import org.scalatest.{FlatSpec, Matchers}
class GenSpec extends FlatSpec with Matchers with PropertyChecks {
// Exercise 4
"Gen" should "generate integers from the given range" in {
forAll { seed: Long =>
val result = Gen.choose(-10, 10).sample.run(SimpleRNG(seed))._1
result should be >= -10
result should be < 10
}
}
// Exercise 5
it should "generate constant value (unit)" in {
forAll { seed: Long =>
val result = Gen.choose(-10, 10).sample.run(SimpleRNG(seed))._1
Gen.unit(1).sample.run(SimpleRNG(seed))._1 shouldBe 1
}
}
it should "generate boolean values" in {
Gen.boolean.sample.run(SimpleRNG(0))._1 shouldBe true
Gen.boolean.sample.run(SimpleRNG(2))._1 shouldBe false
}
it should "generate a random list of size n" in {
forAll { (seed: Long, n: Byte) =>
whenever(n > 0) {
Gen
.listOfN(n.toInt, Gen.boolean)
.sample
.run(SimpleRNG(seed))
._1 should have size n
}
}
}
// Exercise 6
it should "implement flatMap" in {
Gen
.choose(0, 99)
.flatMap(i => Gen.unit(i % 2 == 0))
.sample
.run(SimpleRNG(0))
._1 shouldBe true
}
it should "generate a list from Gen using flatMap" in {
forAll { (seed: Long, n: Byte) =>
whenever(n > 0) {
Gen.boolean
.listOfN(n.toInt)
.sample
.run(SimpleRNG(seed))
._1 should have size n
}
}
}
// Exercise 7
it should "combine two generators by pulling values from each with equal likelihood" in {
val (result, _) = (0 until 100).foldLeft((0, SimpleRNG(0): RNG)) {
(acc, _) =>
Gen.union(Gen.unit(1), Gen.unit(-1)).sample.run(acc._2)
}
result shouldBe (0 +- 3)
}
// Exercise 8
it should "combine two generators by pulling values from each with equal likelihood using weighted" in {
val (result, _) = (0 until 100).foldLeft((0, SimpleRNG(0): RNG)) {
(acc, _) =>
Gen.weighted((Gen.unit(1), 1.0), (Gen.unit(-1), 1.0)).sample.run(acc._2)
}
result shouldBe (0 +- 3)
}
it should "combine two generators by pulling values from each with likelihood according to weights" in {
val (result, _) = (0 until 100).foldLeft((0.0, SimpleRNG(0): RNG)) {
(acc, _) =>
Gen
.weighted((Gen.unit(0.25), 4.0), (Gen.unit(-1.0), 1.0))
.sample
.run(acc._2)
}
result shouldBe (0.0 +- 3)
}
// Exercise 11
"SGen" should "implement flatMap" in {
Gen
.choose(0, 99)
.unsized
.flatMap(i => Gen.unit(i % 2 == 0))
.forSize(1)
.sample
.run(SimpleRNG(0))
._1 shouldBe true
}
it should "zip two SGen together" in {
(Gen.unit(1).unsized ** Gen.unit(2).unsized)
.forSize(1)
.sample
.run(SimpleRNG(0))
._1 shouldBe (1, 2)
}
// Exercise 12
it should "generate a sized list of elements" in {
forAll { (seed: Long, sizeSeed: Int) =>
whenever(sizeSeed != Int.MinValue) {
val s = (math.abs(sizeSeed) % 100) + 1
Gen
.listOf(Gen.boolean)
.forSize(s)
.sample
.run(SimpleRNG(seed))
._1 should have size s
}
}
}
// Exercise 13
it should "generate a sized, non empty list of elements" in {
forAll { (seed: Long, sizeSeed: Int) =>
whenever(sizeSeed != Int.MinValue) {
val s = math.abs(sizeSeed) % 100
Gen
.listOf1(Gen.boolean)
.forSize(s)
.sample
.run(SimpleRNG(seed))
._1 should have size math.max(1, s)
}
}
}
// Exercise 16
it should "generate Par[Int]" in {
val executor = Executors.newCachedThreadPool()
forAll { seed: Long =>
Gen.parInt.sample
.run(SimpleRNG(seed))
._1(executor)
.get() shouldBe an[Integer]
}
}
// Exercise 19
it should "generate a function A => B that uses a mapping from n elements" in {
Gen
.funN[Int, Boolean](2, Gen.boolean)
.sample
.run(SimpleRNG(1))
._1(0) shouldBe true
}
}
| goboss/fpinscala | exercises/src/test/scala/fpinscala/testing/GenSpec.scala | Scala | mit | 4,307 |
package org.jetbrains.plugins.scala
package findUsages.vals
import com.intellij.openapi.project.{IndexNotReadyException, Project}
import com.intellij.psi.search.searches.ReferencesSearch
import com.intellij.psi.search.searches.ReferencesSearch.SearchParameters
import com.intellij.psi.search.{PsiSearchHelper, SearchScope, TextOccurenceProcessor, UsageSearchContext}
import com.intellij.psi.{PsiElement, PsiReference}
import com.intellij.util.{Processor, QueryExecutor}
import org.jetbrains.plugins.scala.extensions.inReadAction
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScBindingPattern, ScConstructorPattern}
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScMethodCall, ScNewTemplateDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDefinition
import org.jetbrains.plugins.scala.lang.resolve.{ResolvableReferenceElement, ResolvableReferenceExpression, ScalaResolveResult}
/**
* Nikolay.Tropin
* 8/29/13
*/
class ApplyUnapplyForBindingSearcher extends QueryExecutor[PsiReference, ReferencesSearch.SearchParameters] {
def execute(queryParameters: SearchParameters, consumer: Processor[PsiReference]): Boolean = {
val scope = inReadAction(queryParameters.getEffectiveSearchScope)
val element = queryParameters.getElementToSearch
element match {
case _ if inReadAction(!element.isValid) => true
case binding: ScBindingPattern =>
val processor = createProcessor(consumer, binding, checkApply = true, checkUnapply = true)
processBinding(processor, scope, binding, queryParameters.getProject)
case inAnonClassWithBinding((binding, checkApply, checkUnapply)) =>
val processor = createProcessor(consumer, binding, checkApply, checkUnapply)
processBinding(processor, scope, binding, queryParameters.getProject)
case _ => true
}
}
private def createProcessor(consumer: Processor[PsiReference], binding: ScBindingPattern, checkApply: Boolean, checkUnapply: Boolean) =
new TextOccurenceProcessor {
def execute(element: PsiElement, offsetInElement: Int): Boolean = {
val references = inReadAction(element.getReferences)
val IsApply = new Apply(binding)
val IsUnapply = new Unapply(binding)
for (ref <- references if ref.getRangeInElement.contains(offsetInElement)) {
inReadAction {
ref match {
case IsApply(reference) if checkApply => if (!consumer.process(reference)) return false
case IsUnapply(reference) if checkUnapply => if (!consumer.process(reference)) return false
case _ =>
}
}
}
true
}
}
private def processBinding(processor: TextOccurenceProcessor, scope: SearchScope, binding: ScBindingPattern, project: Project): Boolean = {
val helper: PsiSearchHelper = PsiSearchHelper.SERVICE.getInstance(project)
try {
helper.processElementsWithWord(processor, scope, binding.name, UsageSearchContext.IN_CODE, true)
}
catch {
case ignore: IndexNotReadyException => true
}
}
private class Unapply(binding: ScBindingPattern) {
def unapply(ref: PsiReference): Option[ResolvableReferenceElement] = {
(ref, ref.getElement.getContext) match {
case (sref: ScStableCodeReferenceElement, x: ScConstructorPattern) =>
sref.bind() match {
case Some(resolve@ScalaResolveResult(fun: ScFunctionDefinition, _))
if Set("unapply", "unapplySeq").contains(fun.name) =>
resolve.innerResolveResult match {
case Some(ScalaResolveResult(`binding`, _)) => Some(sref)
case _ => None
}
case Some(resolve@ScalaResolveResult(`binding`, _)) =>
resolve.innerResolveResult match {
case Some(ScalaResolveResult(fun: ScFunctionDefinition, _))
if Set("unapply", "unapplySeq").contains(fun.name) => Some(sref)
case _ => None
}
case _ => None
}
case _ => None
}
}
}
private class Apply(binding: ScBindingPattern) {
def unapply(ref: PsiReference): Option[ResolvableReferenceElement] = {
(ref, ref.getElement.getContext) match {
case (sref: ResolvableReferenceExpression, x: ScMethodCall) if x.getInvokedExpr == ref.getElement =>
sref.bind() match {
case Some(ScalaResolveResult(fun: ScFunctionDefinition, _))
if fun.name == "apply" && sref.isReferenceTo(binding) => Some(sref)
case Some(resolve@ScalaResolveResult(`binding`, _)) =>
resolve.innerResolveResult match {
case Some(ScalaResolveResult(fun: ScFunctionDefinition, _)) if fun.name == "apply" => Some(sref)
case _ => None
}
case _ => None
}
case _ => None
}
}
}
private object inAnonClassWithBinding {
def unapply(fun: ScFunctionDefinition): Option[(ScBindingPattern, Boolean, Boolean)] =
inReadAction {
val (checkApply, checkUnapply) = fun.name match {
case "apply" => (true, false)
case "unapply" | "unapplySeq" => (false, true)
case _ => (false, false)
}
if (checkApply || checkUnapply) {
fun.containingClass match {
case anon: ScNewTemplateDefinition => ScalaPsiUtil.findInstanceBinding(anon).flatMap(Some(_, checkApply, checkUnapply))
case _ => None
}
}
else None
}
}
}
| double-y/translation-idea-plugin | src/org/jetbrains/plugins/scala/findUsages/vals/ApplyUnapplyForBindingSearcher.scala | Scala | apache-2.0 | 5,714 |
/*
* Odessa State environmental University
* Copyright (C) 2014
*/
package ua.edu.odeku.ceem.mapRadar.tools.adminBorder.imports
import java.awt.Component
import java.awt.event.{ActionEvent, ActionListener}
import java.io.File
import java.util.ResourceBundle
import javax.swing.filechooser.FileFilter
import javax.swing.{JFileChooser, JTextField}
import ua.edu.odeku.ceem.mapRadar.utils.thread.StopProcess
/**
* Данный класс занят импортом данных
*
* Created by Aleo on 22.03.14.
*/
class ImporterAdminBorder(val tool: ImportAdminBorderTool) {
private val form = tool.form.asInstanceOf[ImportAdminBorderForm]
private var importer: Importer = null
private val chooserCountryButtonListener = new ChooserFileButtonListener(this, form.fileCountry)
private val chooserProvincesButtonListener = new ChooserFileButtonListener(this, form.fileProvinces)
form.chooserCountryButton.addActionListener(chooserCountryButtonListener)
form.chooserProvincesButton.addActionListener(chooserProvincesButtonListener)
form.importButton.addActionListener(new ActionListener {
override def actionPerformed(e: ActionEvent): Unit = {
val countryFile: File = chooserCountryButtonListener.file
val provincesFile: File = chooserProvincesButtonListener.file
if(countryFile != null){
if(importer != null)
importer.stopProcess = true
importer = new Importer(countryFile, provincesFile, tool)
importer.setPriority(Thread.MAX_PRIORITY)
importer.start()
viewStartImport()
}
}
})
form.cancelButton.addActionListener(new ActionListener {
override def actionPerformed(e: ActionEvent): Unit = {
if(importer != null){
importer.stopProcess = true
viewStopImport()
}
tool.endFunction(tool.parentToolFrame)
}
})
def changeSelectedFileName(file: File) {
form.fileCountry.setText(file.getAbsolutePath)
}
def viewStartImport(){
form.progressBar.setIndeterminate(true)
form.importButton.setEnabled(false)
form.cancelButton.setEnabled(true)
}
def viewStopImport(){
form.progressBar.setIndeterminate(false)
form.importButton.setEnabled(true)
form.cancelButton.setEnabled(false)
}
}
private class ChooserFileButtonListener(val importer: ImporterAdminBorder, val textFieldButton: JTextField) extends ActionListener {
var file: File = null
val fileFilter = new FileFilter {
override def accept(f: File): Boolean = {
if (f.isDirectory)
return true
val nameFile = f.getName
for (validName: String <- VALID_NAME_ADMIN_BORDER_FILES) {
if (validName == nameFile)
return true
}
false
}
override def getDescription: String = "*.csv"
}
val fileChooser = new JFileChooser()
fileChooser.setFileFilter(fileFilter)
fileChooser.setCurrentDirectory(new File("resources/"))
override def actionPerformed(e: ActionEvent): Unit = {
val res = fileChooser.showDialog(importer.tool.form.rootPanel().asInstanceOf[Component], ResourceBundle.getBundle("bundles/frameTitle").getString("importAdminBorder_dialog_fileChooser_title"))
if (res == JFileChooser.APPROVE_OPTION) {
file = fileChooser.getSelectedFile
textFieldButton.setText(file.getAbsolutePath)
}
}
}
protected class Importer(val countryFile: File, val provincesFile: File, val tool: ImportAdminBorderTool) extends Thread with StopProcess {
stopProcess = false
override def run() {
if(countryFile.getName == MAP_COUNTRIES && (provincesFile == null || provincesFile.getName == STATES_PROVINCES_SHP )) {
ImporterAdminBorders(countryFile, provincesFile, this)
tool.endFunction.apply(tool.parentToolFrame)
} else {
tool.importer.viewStopImport()
}
}
} | aleo72/ww-ceem-radar | src/main/scala/ua/edu/odeku/ceem/mapRadar/tools/adminBorder/imports/importerAdminBorder.scala | Scala | apache-2.0 | 3,659 |
package scalan.compilation.lms.cxx.sharedptr
import scalan.compilation.language.Cxx
import scalan.compilation.lms.{ObjectOrientedLmsBridge, CoreLmsBridge}
trait CoreLmsBridgeCxx extends CoreLmsBridge with ObjectOrientedLmsBridge {
val language = Cxx
override def staticReceiverString(typeMapping: language.TypeMapping): String =
typeMapping.library.namespace.fold("")(_ + "::") + typeMapping.tpe.mappedName
}
| scalan/scalan | lms-backend/core/src/main/scala/scalan/compilation/lms/cxx/sharedptr/CoreLmsBridgeCxx.scala | Scala | apache-2.0 | 420 |
package slick.relational
import slick.ast._
import slick.basic.{BasicActionComponent, BasicProfile}
import slick.compiler.{EmulateOuterJoins, Phase, QueryCompiler}
import slick.dbio._
import slick.lifted.FunctionSymbolExtensionMethods._
import slick.lifted._
import scala.language.{higherKinds, implicitConversions}
import scala.reflect.ClassTag
/** A profile for relational databases that does not assume the existence
* of SQL (or any other text-based language for executing statements).
* It requires a relational table structure as its basic model of data. */
trait RelationalProfile extends BasicProfile with RelationalTableComponent
with RelationalSequenceComponent with RelationalTypesComponent
with RelationalActionComponent { self: RelationalProfile =>
@deprecated("Use the Profile object directly instead of calling `.profile` on it", "3.2")
override val profile: RelationalProfile = this
type Backend <: RelationalBackend
override protected def computeCapabilities = super.computeCapabilities ++ RelationalCapabilities.all
trait API extends super.API with ImplicitColumnTypes {
type FastPath[T] = SimpleFastPathResultConverter[ResultConverterDomain, T]
type Table[T] = self.Table[T]
type Sequence[T] = self.Sequence[T]
val Sequence = self.Sequence
type ColumnType[T] = self.ColumnType[T]
type BaseColumnType[T] = self.BaseColumnType[T]
val MappedColumnType = self.MappedColumnType
@deprecated("Use an explicit conversion to an Option column with `.?`", "3.0")
implicit def columnToOptionColumn[T : BaseTypedType](c: Rep[T]): Rep[Option[T]] = c.?
implicit def valueToConstColumn[T : TypedType](v: T): LiteralColumn[T] = new LiteralColumn[T](v)
implicit def columnToOrdered[T : TypedType](c: Rep[T]): ColumnOrdered[T] = ColumnOrdered[T](c, Ordering())
implicit def tableQueryToTableQueryExtensionMethods[T <: RelationalProfile#Table[_], U](q: Query[T, U, Seq] with TableQuery[T]): TableQueryExtensionMethods[T, U] =
new TableQueryExtensionMethods[T, U](q)
implicit def streamableCompiledInsertActionExtensionMethods[EU](c: StreamableCompiled[_, _, EU]): InsertActionExtensionMethods[EU] = createInsertActionExtensionMethods[EU](c.compiledInsert.asInstanceOf[CompiledInsert])
implicit def queryInsertActionExtensionMethods[U, C[_]](q: Query[_, U, C]): InsertActionExtensionMethods[U] = createInsertActionExtensionMethods[U](compileInsert(q.toNode))
implicit def schemaActionExtensionMethods(sd: SchemaDescription): SchemaActionExtensionMethods = createSchemaActionExtensionMethods(sd)
implicit def fastPathExtensionMethods[T, P](mp: MappedProjection[T, P]): FastPathExtensionMethods[ResultConverterDomain, T, P] = new FastPathExtensionMethods[ResultConverterDomain, T, P](mp)
}
val api: API
final lazy val compiler = computeQueryCompiler
protected def computeQueryCompiler: QueryCompiler = {
val base = QueryCompiler.standard
val canJoinLeft = capabilities contains RelationalCapabilities.joinLeft
val canJoinRight = capabilities contains RelationalCapabilities.joinRight
val canJoinFull = capabilities contains RelationalCapabilities.joinFull
if(canJoinLeft && canJoinRight && canJoinFull) base
else base.addBefore(new EmulateOuterJoins(canJoinLeft, canJoinRight), Phase.expandRecords)
}
class TableQueryExtensionMethods[T <: RelationalProfile#Table[_], U](val q: Query[T, U, Seq] with TableQuery[T]) {
/** Get the schema description (DDL) for this table. */
def schema: SchemaDescription = buildTableSchemaDescription(q.shaped.value.asInstanceOf[Table[_]])
/** Create a `Compiled` query which selects all rows where the specified
* key matches the parameter value. */
def findBy[P](f: (T => Rep[P]))(implicit ashape: Shape[ColumnsShapeLevel, Rep[P], P, Rep[P]], pshape: Shape[ColumnsShapeLevel, P, P, _]): CompiledFunction[Rep[P] => Query[T, U, Seq], Rep[P], P, Query[T, U, Seq], Seq[U]] = {
import self.api._
Compiled { (p: Rep[P]) => (q: Query[T, U, Seq]).filter(table => Library.==.column[Boolean](f(table).toNode, p.toNode)) }
}
}
/** Run a query synchronously on the provided session. This is used by DistributedProfile until we
* can make it fully asynchronous. */
def runSynchronousQuery[R](tree: Node, param: Any)(implicit session: Backend#Session): R
class FastPathExtensionMethods[M <: ResultConverterDomain, T, P](val mp: MappedProjection[T, P]) {
def fastPath(fpf: (TypeMappingResultConverter[M, T, _] => SimpleFastPathResultConverter[M, T])): MappedProjection[T, P] = mp.genericFastPath {
case tm @ TypeMappingResultConverter(_: ProductResultConverter[_, _], _, _) =>
fpf(tm.asInstanceOf[TypeMappingResultConverter[M, T, _]])
case tm => tm
}
}
}
object RelationalProfile {
/** Extra column options for RelationalProfile */
object ColumnOption {
/** Default value for the column. Needs to wrap an Option for nullable Columns. */
case class Default[T](defaultValue: T) extends ColumnOption[T]
/** Number of unicode characters for string-like types. Unlike DBType this is portable
* between different DBMS. Note that for DDL Slick currently picks type CHAR when
* varying=false and VARCHAR when varying=true. Slick uses VARCHAR or VARCHAR(254) in DDL for
* String columns if neither ColumnOption DBType nor Length are given.
*
* @param varying indicates wether this is just the maximum length of a varying */
case class Length(length: Int, varying: Boolean = true) extends ColumnOption[Nothing]
}
}
trait RelationalTableComponent { self: RelationalProfile =>
def buildTableSchemaDescription(table: Table[_]): SchemaDescription
trait ColumnOptions {
val PrimaryKey = ColumnOption.PrimaryKey
def Default[T](defaultValue: T) = RelationalProfile.ColumnOption.Default[T](defaultValue)
val AutoInc = ColumnOption.AutoInc
val Unique = ColumnOption.Unique
val Length = RelationalProfile.ColumnOption.Length
}
val columnOptions: ColumnOptions = new ColumnOptions {}
abstract class Table[T](_tableTag: Tag, _schemaName: Option[String], _tableName: String) extends AbstractTable[T](_tableTag, _schemaName, _tableName) { table =>
final type TableElementType = T
def this(_tableTag: Tag, _tableName: String) = this(_tableTag, None, _tableName)
def tableProvider: RelationalProfile = self
def tableIdentitySymbol: TableIdentitySymbol = SimpleTableIdentitySymbol(self, schemaName.getOrElse("_"), tableName)
val O: self.columnOptions.type = columnOptions
/**
* Note that Slick uses VARCHAR or VARCHAR(254) in DDL for String
* columns if neither ColumnOption DBType nor Length are given.
*/
def column[C](n: String, options: ColumnOption[C]*)(implicit tt: TypedType[C]): Rep[C] = {
if(tt == null) throw new NullPointerException(
"implicit TypedType[C] for column[C] is null. "+
"This may be an initialization order problem. "+
"When using a MappedColumnType, you may want to change it from a val to a lazy val or def.")
new Rep.TypedRep[C] {
override def toNode =
Select((tableTag match {
case r: RefTag => r.path
case _ => tableNode
}), FieldSymbol(n)(options, tt)) :@ tt
override def toString = (tableTag match {
case r: RefTag => "(" + _tableName + " " + r.path + ")"
case _ => _tableName
}) + "." + n
}
}
}
}
trait RelationalSequenceComponent { self: RelationalProfile =>
def buildSequenceSchemaDescription(seq: Sequence[_]): SchemaDescription
class Sequence[T] private[Sequence] (val name: String,
val _minValue: Option[T],
val _maxValue: Option[T],
val _increment: Option[T],
val _start: Option[T],
val _cycle: Boolean)(implicit val tpe: TypedType[T], val integral: Integral[T])
{ seq =>
def min(v: T) = new Sequence[T](name, Some(v), _maxValue, _increment, _start, _cycle)
def max(v: T) = new Sequence[T](name, _minValue, Some(v), _increment, _start, _cycle)
def inc(v: T) = new Sequence[T](name, _minValue, _maxValue, Some(v), _start, _cycle)
def start(v: T) = new Sequence[T](name, _minValue, _maxValue, _increment, Some(v), _cycle)
def cycle = new Sequence[T](name, _minValue, _maxValue, _increment, _start, true)
final def next = Library.NextValue.column[T](toNode)
final def curr = Library.CurrentValue.column[T](toNode)
def toNode = SequenceNode(name)(_increment.map(integral.toLong).getOrElse(1))
def schema: SchemaDescription = buildSequenceSchemaDescription(this)
}
object Sequence {
def apply[T : TypedType : Integral](name: String) = new Sequence[T](name, None, None, None, None, false)
}
}
trait RelationalTypesComponent { self: RelationalProfile =>
type ColumnType[T] <: TypedType[T]
type BaseColumnType[T] <: ColumnType[T] with BaseTypedType[T]
val MappedColumnType: MappedColumnTypeFactory
trait MappedColumnTypeFactory {
def base[T : ClassTag, U : BaseColumnType](tmap: T => U, tcomap: U => T): BaseColumnType[T]
protected[this] def assertNonNullType(t: BaseColumnType[_]): Unit =
if(t == null)
throw new NullPointerException("implicit BaseColumnType[U] for MappedColumnType.base[T, U] is null. This may be an initialization order problem.")
}
trait ImplicitColumnTypes {
implicit def isomorphicType[A, B](implicit iso: Isomorphism[A, B], ct: ClassTag[A], jt: BaseColumnType[B]): BaseColumnType[A] =
MappedColumnType.base[A, B](iso.map, iso.comap)
implicit def booleanColumnType: BaseColumnType[Boolean]
implicit def bigDecimalColumnType: BaseColumnType[BigDecimal] with NumericTypedType
implicit def byteColumnType: BaseColumnType[Byte] with NumericTypedType
implicit def charColumnType: BaseColumnType[Char]
implicit def doubleColumnType: BaseColumnType[Double] with NumericTypedType
implicit def floatColumnType: BaseColumnType[Float] with NumericTypedType
implicit def intColumnType: BaseColumnType[Int] with NumericTypedType
implicit def longColumnType: BaseColumnType[Long] with NumericTypedType
implicit def shortColumnType: BaseColumnType[Short] with NumericTypedType
implicit def stringColumnType: BaseColumnType[String]
}
}
trait RelationalActionComponent extends BasicActionComponent { self: RelationalProfile =>
//////////////////////////////////////////////////////////// Insert Actions
type InsertActionExtensionMethods[T] <: InsertActionExtensionMethodsImpl[T]
def createInsertActionExtensionMethods[T](compiled: CompiledInsert): InsertActionExtensionMethods[T]
trait InsertActionExtensionMethodsImpl[T] {
/** The result type when inserting a single value. */
type SingleInsertResult
/** The result type when inserting a collection of values. */
type MultiInsertResult
/** An Action that inserts a single value. */
def += (value: T): ProfileAction[SingleInsertResult, NoStream, Effect.Write]
/** An Action that inserts a collection of values. */
def ++= (values: Iterable[T]): ProfileAction[MultiInsertResult, NoStream, Effect.Write]
}
//////////////////////////////////////////////////////////// Schema Actions
type SchemaActionExtensionMethods <: SchemaActionExtensionMethodsImpl
def createSchemaActionExtensionMethods(schema: SchemaDescription): SchemaActionExtensionMethods
trait SchemaActionExtensionMethodsImpl {
/** Create an Action that creates the entities described by this schema description. */
def create: ProfileAction[Unit, NoStream, Effect.Schema]
/** Create an Action that creates the entities described by this schema description if the entities do not exist. */
def createIfNotExists: ProfileAction[Unit, NoStream, Effect.Schema]
/** Create an Action that drops the entities described by this schema description. */
def drop: ProfileAction[Unit, NoStream, Effect.Schema]
/** Create an Action that drops the entities described by this schema description only if the entities exist. */
def dropIfExists: ProfileAction[Unit, NoStream, Effect.Schema]
/** Create an Action that truncates entries described by this schema description */
def truncate: ProfileAction[Unit, NoStream, Effect.Schema]
}
}
| slick/slick | slick/src/main/scala/slick/relational/RelationalProfile.scala | Scala | bsd-2-clause | 12,509 |
package edu.depauw.scales.music;
/*Bugs: throws unsupported audio file format exception for some files. */
object DemoSampled extends Application {
val test = new Clip(0, System.getProperty("user.dir") + "/resources/bassoon-g4.wav")
val director = new Director(test + C + test + E + G + C.> + G + E + C + test~3)
director.start()
}
| bhoward/EscalatorOld | Scales2/test/edu/depauw/scales/music/TestSampled.scala | Scala | apache-2.0 | 344 |
package com.sksamuel.elastic4s.requests.get
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
object MultiGetBodyBuilder {
def apply(request: MultiGetRequest): XContentBuilder = {
val builder = XContentFactory.obj()
builder.startArray("docs")
request.gets.foreach { get =>
builder.startObject()
builder.field("_index", get.index.index)
builder.field("_id", get.id)
get.routing.foreach(builder.field("routing", _))
get.fetchSource.foreach { context =>
if (context.includes.nonEmpty || context.excludes.nonEmpty) {
builder.startObject("_source")
if (context.includes.nonEmpty)
builder.array("include", context.includes.toList)
if (context.excludes.nonEmpty)
builder.array("exclude", context.excludes.toList)
builder.endObject()
} else
builder.field("_source", boolean = false)
}
if (get.storedFields.nonEmpty)
builder.array("stored_fields", get.storedFields.toArray)
builder.endObject()
}
builder.endArray()
builder.endObject()
}
}
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/get/MultiGetBodyBuilder.scala | Scala | apache-2.0 | 1,126 |
package aima.core.environment.vacuum
import aima.core.random.{DefaultRandomness, SetRandomness}
/**
* @author Shawn Garner
*/
sealed trait VacuumAction
sealed trait MoveAction extends VacuumAction
case object LeftMoveAction extends MoveAction
case object RightMoveAction extends MoveAction
object MoveAction extends SetRandomness[MoveAction] with DefaultRandomness {
val valueSet: Set[MoveAction] = Set(LeftMoveAction, RightMoveAction)
}
sealed trait SuckerAction extends VacuumAction
case object Suck extends SuckerAction
object SuckerAction extends SetRandomness[SuckerAction] with DefaultRandomness {
val valueSet: Set[SuckerAction] = Set(Suck)
}
case object NoAction extends VacuumAction
| aimacode/aima-scala | core/src/main/scala/aima/core/environment/vacuum/actions.scala | Scala | mit | 721 |
package com.cloudray.scalapress.plugin.search.tags
import com.cloudray.scalapress.widgets.Widget
import javax.persistence.{Entity, Table}
import com.cloudray.scalapress.search.Search
import scala.xml.Utility
import com.cloudray.scalapress.framework.ScalapressRequest
/** @author Stephen Samuel */
@Table(name = "plugin_tagswidget")
@Entity
class TagsWidget extends Widget {
def render(req: ScalapressRequest): Option[String] = {
val search = new Search
//search.facets = Seq(SearchService.FACET_TAGS)
val result = req.context.searchService.search(search)
val tags = Nil
//result.facets.find(_.name == SearchService.FACET_TAGS) match {
//case None => Nil
// case Some(facet) => facet.terms.map(_.term)
// }
val xml = <ul class="tags-widget">
{_renderTags(tags)}
</ul>
Some(xml.toString())
}
def _renderTags(tags: Iterable[String]) =
tags.map(tag => <span>
{tag}
</span>).map(Utility.trim(_))
}
| vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/plugin/search/tags/TagsWidget.scala | Scala | apache-2.0 | 977 |
package io.github.psychicwaddle.kappadi.graph
/**
* @author Jaca777
* Created 2016-01-28 at 14
*/
abstract class GraphVisitor[-A, +Self] { _: Self =>
def visit(value: A): Self
def enter(value: A): Boolean = true
}
| KapibaraInc/Kapibara | miner/src/main/scala/io/github/psychicwaddle/kappadi/graph/GraphVisitor.scala | Scala | lgpl-3.0 | 234 |
package blended.jms.bridge.internal
import java.io.File
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.scaladsl.{Flow, GraphDSL, Merge}
import akka.stream.{FlowShape, Graph, KillSwitch}
import blended.jms.utils.IdAwareConnectionFactory
import blended.streams.message.FlowEnvelope
import blended.streams.{BlendedStreamsConfig, FlowProcessor}
import blended.testsupport.{BlendedTestSupport, RequiresForkedJVM}
import scala.concurrent.duration._
/**
* Verify that a message is routed correctly after it has been redirected through the
* retry queue at least once.
*/
@RequiresForkedJVM
class RouteAfterRetrySpec extends BridgeSpecSupport {
override def baseDir: String = new File(BlendedTestSupport.projectTestOutput, "withRetries").getAbsolutePath()
private def sendOutbound(cf : IdAwareConnectionFactory, timeout : FiniteDuration, msgCount : Int, track : Boolean) : KillSwitch = {
val msgs : Seq[FlowEnvelope] = generateMessages(msgCount){ env =>
env
.withHeader(headerCfg.headerBridgeVendor, "activemq").get
.withHeader(headerCfg.headerBridgeProvider, "external").get
.withHeader("SIBBridgeDestination", "sampleOut").get
//.withHeader(destHeader(headerCfg.prefix), s"sampleOut").get
.withHeader(headerCfg.headerTrack, track).get
}.get
sendMessages("bridge.data.out", cf, timeout)(msgs:_*)
}
// We override the send flow with a flow simply triggering an exception, so that the
// exceptional path will be triggered
override protected def bridgeActivator: BridgeActivator = new BridgeActivator() {
override protected def streamBuilderFactory(system: ActorSystem)(
cfg: BridgeStreamConfig, streamsCfg : BlendedStreamsConfig
): BridgeStreamBuilder =
new BridgeStreamBuilder(cfg, streamsCfg)(system) {
override protected def jmsSend: Flow[FlowEnvelope, FlowEnvelope, NotUsed] = {
val g : Graph[FlowShape[FlowEnvelope, FlowEnvelope], NotUsed] = GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val partition = b.add(FlowProcessor.partition[FlowEnvelope]{ env =>
env.header[Long](headerCfg.headerRetryCount).getOrElse(0L) < 2
})
val merge = b.add(Merge[FlowEnvelope](2))
val fail = b.add(
Flow.fromFunction[FlowEnvelope, FlowEnvelope] { env => env.withException(new Exception("Boom")) }
)
val send = b.add(super.jmsSend)
partition.out0 ~> fail ~> merge.in(0)
partition.out1 ~> send ~> merge.in(1)
FlowShape(partition.in, merge.out)
}
Flow.fromGraph(g)
}
}
}
"The outbound bridge should " - {
"correctly route outbound messages after one or more retries" in logException {
val timeout : FiniteDuration = 1.second
val msgCount = 1
val actorSys = system(registry)
val (internal, external) = getConnectionFactories(registry)
val switch = sendOutbound(internal, timeout, msgCount, track = true)
//
println("Waiting for the retry loop to complete ...")
0.to(15).foreach{ i =>
Thread.sleep(1000)
print(s"-")
}
println()
val retried : List[FlowEnvelope] = consumeMessages(
cf = internal,
destName = "retries",
timeout = timeout
)(actorSys).get
retried should be (empty)
consumeEvents(internal, timeout)(actorSys).get should not be empty
val messages : List[FlowEnvelope] =
consumeMessages(
cf = external,
destName = "sampleOut",
expected = msgCount,
timeout = timeout
)(actorSys).get
messages should have size(msgCount)
switch.shutdown()
}
}
}
| woq-blended/blended | blended.jms.bridge/src/test/scala/blended/jms/bridge/internal/RouteAfterRetrySpec.scala | Scala | apache-2.0 | 3,797 |
/**
* Copyright (C) 2008 Scalable Solutions.
*/
package se.scalablesolutions.skalman
import org.scalatest._
import org.scalatest.testng.TestNGSuite
import se.scalablesolutions.skalman.util.Helpers._
import se.scalablesolutions.skalman.util.{Can, Full, Empty, Failure}
import org.testng.annotations.{
BeforeSuite,
BeforeMethod,
Test
}
import org.easymock.EasyMock.{
expect => exp,
createMock,
verify,
replay,
reset,
notNull,
isA
}
import javax.ejb.{TransactionAttribute, TransactionAttributeType}
import scala.actors.Actor
import scala.actors.Actor._
// FIXME: Turn into real test, now there are no asserts just print outs
class ManagedComponentProxySuite extends TestNGSuite with Logging {
var status = ""
trait Foo extends ManagedComponent {
@TransactionAttribute(TransactionAttributeType.REQUIRED)
def foo(msg: String)
def bar(msg: String)
}
class FooImpl extends Foo {
val bar: Bar = new BarImpl // is normally injected by Guice
def foo(msg: String) = log.info("msg: " + msg)
def bar(msg: String) = bar.bar(msg)
}
trait Bar extends ManagedComponent {
def bar(msg: String)
}
class BarImpl extends Bar {
def bar(msg: String) = log.info("msg: " + msg)
}
// Logging interceptor
trait LoggingInterceptor extends Interceptor {
val logPointcut = parser.parsePointcutExpression("execution(* *.foo(..))")
abstract override def invoke(invocation: Invocation): AnyRef = if (matches(logPointcut, invocation)) {
log.info("=====> Enter: " + invocation.method.getName + " @ " + invocation.target.getClass.getName)
val result = super.invoke(invocation)
log.info("=====> Exit: " + invocation.method.getName + " @ " + invocation.target.getClass.getName)
result
} else super.invoke(invocation)
}
// Transaction interceptor
trait TransactionInterceptor extends Interceptor {
val txPointcut = parser.parsePointcutExpression("execution(* *.bar(..))")
abstract override def invoke(invocation: Invocation): AnyRef = if (matches(txPointcut, invocation)) {
log.info("=====> TX begin")
val result = super.invoke(invocation)
log.info("=====> TX commit")
result
} else super.invoke(invocation)
}
@BeforeMethod { val groups = Array("unit") }
def resetLog = status = ""
@Test { val groups=Array("unit") }
def testCreateNonActorBasedComponent = {
log.info("\\n-------- CREATING NON ACTOR BASED COMPONENT ------")
val foo = ManagedComponentFactory.createComponent[Foo](
classOf[Foo],
new ManagedComponentProxy(new FooImpl)
with LoggingInterceptor
with TransactionInterceptor)
foo.foo("foo")
foo.bar("bar")
assert(true === true)
}
@Test { val groups=Array("unit") }
def testCreateActorBasedComponent = {
log.info("\\n-------- CREATING ACTOR BASED COMPONENT USING SAME INTERFACE AND IMPL ------")
val foo = ManagedComponentFactory.createComponent[Foo](
classOf[Foo],
new ManagedComponentProxy(new FooImpl)
with LoggingInterceptor
with TransactionInterceptor)
foo.foo("foo")
foo.bar("bar")
assert(true === true)
}
@Test { val groups=Array("unit") }
def testCreateCustomActorBasedComponent = {
log.info("\\n-------- CREATING CUSTOM ACTOR BASED COMPONENT USING SAME INTERFACE AND IMPL ------")
// Usually spawned up once and stored away somewhere
val customActor = actor(loop(react({
case invocation: Invocation => reply(Full(invocation.invoke))
case 'exit => exit(); reply(Empty)
})))
val foo = ManagedComponentFactory.createActorBasedComponent[Foo](
classOf[Foo],
new ManagedComponentProxy(new FooImpl)
with LoggingInterceptor
with TransactionInterceptor,
customActor)
foo.foo("foo")
foo.bar("bar")
// to close down in an elegant way
customActor ! 'exit
Runtime.getRuntime.gc()
assert(true === true)
}
}
| jboner/skalman | core/src/test/scala/ManagedComponentProxySuite.scala | Scala | bsd-3-clause | 3,957 |
package com.softwaremill.bootzooka.api.swagger
import com.softwaremill.bootzooka.api.Mappable
import com.typesafe.scalalogging.LazyLogging
import org.scalatra.ScalatraServlet
import org.scalatra.swagger.{ApiInfo, NativeSwaggerBase, Swagger}
class SwaggerServlet(implicit val swagger: Swagger) extends ScalatraServlet with NativeSwaggerBase with LazyLogging with Mappable {
override def mappingPath: String = "api-docs"
}
object AppSwagger {
val Info = ApiInfo(
"Web API",
"Docs for the web API",
"http://bootzooka.softwaremill.com",
"hello@softwaremill.com",
"Apache License, Version 2.0",
"http://www.apache.org/licenses/LICENSE-2.0.html"
)
}
class AppSwagger extends Swagger(Swagger.SpecVersion, "1.0.0", AppSwagger.Info) | umitunal/bootzooka | backend/src/main/scala/com/softwaremill/bootzooka/api/swagger/SwaggerServlet.scala | Scala | apache-2.0 | 759 |
package com.ubirch.util.elasticsearch.util
import com.typesafe.scalalogging.slf4j.StrictLogging
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest
import org.elasticsearch.client.transport.TransportClient
import org.elasticsearch.common.xcontent.XContentType
/**
* This a util helping us to created Elasticsearch indexes and mappings. To use it overwrite only the fields marked
* below.
*
* author: cvandrei
* since: 2017-01-10
*/
trait ElasticsearchMappingsBase extends StrictLogging {
/**
* All indexes and their mappings (<b>OVERWRITE!!!</b>).
*
* A Map of indexes and optional mappings. The data is structured as follows:
* <code>
* Map(
* "INDEX_1_NAME" -> Map(
* "TYPE_1_NAME" -> "MAPPING_TYPE_1",
* "TYPE_2_NAME" -> "MAPPING_TYPE_2"
* ),
* "INDEX_2_NAME" -> Map.empty
* )
* </code>
*/
val indexesAndMappings: Map[String, Map[String, String]]
lazy final val indicesToDelete: Set[String] = indexesAndMappings.keys.toSet
final def createElasticsearchMappings()(implicit esClient: TransportClient): Unit = indexesAndMappings foreach {
case (index, indexMappings) => create(index, indexMappings)
}
private def create(index: String, mappings: Map[String, String])(implicit esClient: TransportClient) = {
val indicesClient = esClient.admin.indices()
val existsRequest = new IndicesExistsRequest(index)
if (indicesClient.exists(existsRequest).get().isExists) {
logger.info(s"index already exists: '$index'")
} else {
val indexCreated = indicesClient.prepareCreate(index).get
if (indexCreated.isAcknowledged) {
logger.info(s"created index: '$index'")
var putMappingRequestBuilder = indicesClient.preparePutMapping(index)
mappings foreach {
case (typeName, typeMapping) =>
putMappingRequestBuilder = putMappingRequestBuilder.setType(typeName)
.setSource(typeMapping, XContentType.JSON)
}
if (putMappingRequestBuilder.get().isAcknowledged) {
logger.info(s"created mapping: index='$index'")
} else {
logger.error(s"failed to created mappings: index='$index'")
}
} else {
logger.error(s"failed to create index: '$index'")
}
}
}
}
| ubirch/ubirch-scala-utils | elasticsearch-util/src/main/scala/com/ubirch/util/elasticsearch/util/ElasticsearchMappingsBase.scala | Scala | apache-2.0 | 2,341 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming.sources
import java.util
import java.util.Collections
import scala.collection.JavaConverters._
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.sql.connector.catalog.{SessionConfigSupport, SupportsRead, SupportsWrite, Table, TableCapability, TableProvider}
import org.apache.spark.sql.connector.catalog.TableCapability._
import org.apache.spark.sql.connector.read.{InputPartition, PartitionReaderFactory, Scan, ScanBuilder}
import org.apache.spark.sql.connector.read.streaming.{ContinuousPartitionReaderFactory, ContinuousStream, MicroBatchStream, Offset, PartitionOffset}
import org.apache.spark.sql.connector.write.{LogicalWriteInfo, PhysicalWriteInfo, WriteBuilder, WriterCommitMessage}
import org.apache.spark.sql.connector.write.streaming.{StreamingDataWriterFactory, StreamingWrite}
import org.apache.spark.sql.execution.datasources.DataSource
import org.apache.spark.sql.execution.streaming.{ContinuousTrigger, RateStreamOffset, Sink, StreamingQueryWrapper}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.{DataSourceRegister, StreamSinkProvider}
import org.apache.spark.sql.streaming.{OutputMode, StreamingQuery, StreamTest, Trigger}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.util.Utils
class FakeDataStream extends MicroBatchStream with ContinuousStream {
override def deserializeOffset(json: String): Offset = RateStreamOffset(Map())
override def commit(end: Offset): Unit = {}
override def stop(): Unit = {}
override def initialOffset(): Offset = RateStreamOffset(Map())
override def latestOffset(): Offset = RateStreamOffset(Map())
override def mergeOffsets(offsets: Array[PartitionOffset]): Offset = RateStreamOffset(Map())
override def planInputPartitions(start: Offset, end: Offset): Array[InputPartition] = {
throw new IllegalStateException("fake source - cannot actually read")
}
override def planInputPartitions(start: Offset): Array[InputPartition] = {
throw new IllegalStateException("fake source - cannot actually read")
}
override def createReaderFactory(): PartitionReaderFactory = {
throw new IllegalStateException("fake source - cannot actually read")
}
override def createContinuousReaderFactory(): ContinuousPartitionReaderFactory = {
throw new IllegalStateException("fake source - cannot actually read")
}
}
class FakeScanBuilder extends ScanBuilder with Scan {
override def build(): Scan = this
override def readSchema(): StructType = StructType(Seq())
override def toMicroBatchStream(checkpointLocation: String): MicroBatchStream = new FakeDataStream
override def toContinuousStream(checkpointLocation: String): ContinuousStream = new FakeDataStream
}
class FakeWriteBuilder extends WriteBuilder with StreamingWrite {
override def buildForStreaming(): StreamingWrite = this
override def createStreamingWriterFactory(
info: PhysicalWriteInfo): StreamingDataWriterFactory = {
throw new IllegalStateException("fake sink - cannot actually write")
}
override def commit(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {
throw new IllegalStateException("fake sink - cannot actually write")
}
override def abort(epochId: Long, messages: Array[WriterCommitMessage]): Unit = {
throw new IllegalStateException("fake sink - cannot actually write")
}
}
trait FakeStreamingWriteTable extends Table with SupportsWrite {
override def name(): String = "fake"
override def schema(): StructType = StructType(Seq())
override def capabilities(): util.Set[TableCapability] = {
Set(STREAMING_WRITE).asJava
}
override def newWriteBuilder(info: LogicalWriteInfo): WriteBuilder = {
new FakeWriteBuilder
}
}
class FakeReadMicroBatchOnly
extends DataSourceRegister
with TableProvider
with SessionConfigSupport {
override def shortName(): String = "fake-read-microbatch-only"
override def keyPrefix: String = shortName()
override def getTable(options: CaseInsensitiveStringMap): Table = {
LastReadOptions.options = options
new Table with SupportsRead {
override def name(): String = "fake"
override def schema(): StructType = StructType(Seq())
override def capabilities(): util.Set[TableCapability] = {
Set(MICRO_BATCH_READ).asJava
}
override def newScanBuilder(options: CaseInsensitiveStringMap): ScanBuilder = {
new FakeScanBuilder
}
}
}
}
class FakeReadContinuousOnly
extends DataSourceRegister
with TableProvider
with SessionConfigSupport {
override def shortName(): String = "fake-read-continuous-only"
override def keyPrefix: String = shortName()
override def getTable(options: CaseInsensitiveStringMap): Table = {
LastReadOptions.options = options
new Table with SupportsRead {
override def name(): String = "fake"
override def schema(): StructType = StructType(Seq())
override def capabilities(): util.Set[TableCapability] = {
Set(CONTINUOUS_READ).asJava
}
override def newScanBuilder(options: CaseInsensitiveStringMap): ScanBuilder = {
new FakeScanBuilder
}
}
}
}
class FakeReadBothModes extends DataSourceRegister with TableProvider {
override def shortName(): String = "fake-read-microbatch-continuous"
override def getTable(options: CaseInsensitiveStringMap): Table = {
new Table with SupportsRead {
override def name(): String = "fake"
override def schema(): StructType = StructType(Seq())
override def capabilities(): util.Set[TableCapability] = {
Set(MICRO_BATCH_READ, CONTINUOUS_READ).asJava
}
override def newScanBuilder(options: CaseInsensitiveStringMap): ScanBuilder = {
new FakeScanBuilder
}
}
}
}
class FakeReadNeitherMode extends DataSourceRegister with TableProvider {
override def shortName(): String = "fake-read-neither-mode"
override def getTable(options: CaseInsensitiveStringMap): Table = {
new Table {
override def name(): String = "fake"
override def schema(): StructType = StructType(Nil)
override def capabilities(): util.Set[TableCapability] = Collections.emptySet()
}
}
}
class FakeWriteOnly
extends DataSourceRegister
with TableProvider
with SessionConfigSupport {
override def shortName(): String = "fake-write-microbatch-continuous"
override def keyPrefix: String = shortName()
override def getTable(options: CaseInsensitiveStringMap): Table = {
LastWriteOptions.options = options
new Table with FakeStreamingWriteTable {
override def name(): String = "fake"
override def schema(): StructType = StructType(Nil)
}
}
}
class FakeNoWrite extends DataSourceRegister with TableProvider {
override def shortName(): String = "fake-write-neither-mode"
override def getTable(options: CaseInsensitiveStringMap): Table = {
new Table {
override def name(): String = "fake"
override def schema(): StructType = StructType(Nil)
override def capabilities(): util.Set[TableCapability] = Collections.emptySet()
}
}
}
case class FakeWriteV1FallbackException() extends Exception
class FakeSink extends Sink {
override def addBatch(batchId: Long, data: DataFrame): Unit = {}
}
class FakeWriteSupportProviderV1Fallback extends DataSourceRegister
with TableProvider with StreamSinkProvider {
override def createSink(
sqlContext: SQLContext,
parameters: Map[String, String],
partitionColumns: Seq[String],
outputMode: OutputMode): Sink = {
new FakeSink()
}
override def shortName(): String = "fake-write-v1-fallback"
override def getTable(options: CaseInsensitiveStringMap): Table = {
new Table with FakeStreamingWriteTable {
override def name(): String = "fake"
override def schema(): StructType = StructType(Nil)
}
}
}
object LastReadOptions {
var options: CaseInsensitiveStringMap = _
def clear(): Unit = {
options = null
}
}
object LastWriteOptions {
var options: CaseInsensitiveStringMap = _
def clear(): Unit = {
options = null
}
}
class StreamingDataSourceV2Suite extends StreamTest {
override def beforeAll(): Unit = {
super.beforeAll()
val fakeCheckpoint = Utils.createTempDir()
spark.conf.set(SQLConf.CHECKPOINT_LOCATION.key, fakeCheckpoint.getCanonicalPath)
}
override def afterEach(): Unit = {
LastReadOptions.clear()
LastWriteOptions.clear()
}
val readFormats = Seq(
"fake-read-microbatch-only",
"fake-read-continuous-only",
"fake-read-microbatch-continuous",
"fake-read-neither-mode")
val writeFormats = Seq(
"fake-write-microbatch-continuous",
"fake-write-neither-mode")
val triggers = Seq(
Trigger.Once(),
Trigger.ProcessingTime(1000),
Trigger.Continuous(1000))
private def testPositiveCase(readFormat: String, writeFormat: String, trigger: Trigger): Unit = {
testPositiveCaseWithQuery(readFormat, writeFormat, trigger)(() => _)
}
private def testPositiveCaseWithQuery(
readFormat: String,
writeFormat: String,
trigger: Trigger)(check: StreamingQuery => Unit): Unit = {
val query = spark.readStream
.format(readFormat)
.load()
.writeStream
.format(writeFormat)
.trigger(trigger)
.start()
check(query)
query.stop()
}
private def testNegativeCase(
readFormat: String,
writeFormat: String,
trigger: Trigger,
errorMsg: String) = {
val ex = intercept[UnsupportedOperationException] {
testPositiveCase(readFormat, writeFormat, trigger)
}
assert(ex.getMessage.contains(errorMsg))
}
private def testPostCreationNegativeCase(
readFormat: String,
writeFormat: String,
trigger: Trigger,
errorMsg: String) = {
val query = spark.readStream
.format(readFormat)
.load()
.writeStream
.format(writeFormat)
.trigger(trigger)
.start()
eventually(timeout(streamingTimeout)) {
assert(query.exception.isDefined)
assert(query.exception.get.cause != null)
assert(query.exception.get.cause.getMessage.contains(errorMsg))
}
}
test("disabled v2 write") {
// Ensure the V2 path works normally and generates a V2 sink..
testPositiveCaseWithQuery(
"fake-read-microbatch-continuous", "fake-write-v1-fallback", Trigger.Once()) { v2Query =>
assert(v2Query.asInstanceOf[StreamingQueryWrapper].streamingQuery.sink
.isInstanceOf[Table])
}
// Ensure we create a V1 sink with the config. Note the config is a comma separated
// list, including other fake entries.
val fullSinkName = classOf[FakeWriteSupportProviderV1Fallback].getName
withSQLConf(SQLConf.DISABLED_V2_STREAMING_WRITERS.key -> s"a,b,c,test,$fullSinkName,d,e") {
testPositiveCaseWithQuery(
"fake-read-microbatch-continuous", "fake-write-v1-fallback", Trigger.Once()) { v1Query =>
assert(v1Query.asInstanceOf[StreamingQueryWrapper].streamingQuery.sink
.isInstanceOf[FakeSink])
}
}
}
Seq(
Tuple2(classOf[FakeReadMicroBatchOnly], Trigger.Once()),
Tuple2(classOf[FakeReadContinuousOnly], Trigger.Continuous(1000))
).foreach { case (source, trigger) =>
test(s"SPARK-25460: session options are respected in structured streaming sources - $source") {
// `keyPrefix` and `shortName` are the same in this test case
val readSource = source.getConstructor().newInstance().shortName()
val writeSource = "fake-write-microbatch-continuous"
val readOptionName = "optionA"
withSQLConf(s"spark.datasource.$readSource.$readOptionName" -> "true") {
testPositiveCaseWithQuery(readSource, writeSource, trigger) { _ =>
eventually(timeout(streamingTimeout)) {
// Write options should not be set.
assert(!LastWriteOptions.options.containsKey(readOptionName))
assert(LastReadOptions.options.getBoolean(readOptionName, false))
}
}
}
val writeOptionName = "optionB"
withSQLConf(s"spark.datasource.$writeSource.$writeOptionName" -> "true") {
testPositiveCaseWithQuery(readSource, writeSource, trigger) { _ =>
eventually(timeout(streamingTimeout)) {
// Read options should not be set.
assert(!LastReadOptions.options.containsKey(writeOptionName))
assert(LastWriteOptions.options.getBoolean(writeOptionName, false))
}
}
}
}
}
// Get a list of (read, write, trigger) tuples for test cases.
val cases = readFormats.flatMap { read =>
writeFormats.flatMap { write =>
triggers.map(t => (write, t))
}.map {
case (write, t) => (read, write, t)
}
}
for ((read, write, trigger) <- cases) {
testQuietly(s"stream with read format $read, write format $write, trigger $trigger") {
val sourceTable = DataSource.lookupDataSource(read, spark.sqlContext.conf).getConstructor()
.newInstance().asInstanceOf[TableProvider].getTable(CaseInsensitiveStringMap.empty())
val sinkTable = DataSource.lookupDataSource(write, spark.sqlContext.conf).getConstructor()
.newInstance().asInstanceOf[TableProvider].getTable(CaseInsensitiveStringMap.empty())
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Implicits._
trigger match {
// Invalid - can't read at all
case _ if !sourceTable.supportsAny(MICRO_BATCH_READ, CONTINUOUS_READ) =>
testNegativeCase(read, write, trigger,
s"Data source $read does not support streamed reading")
// Invalid - can't write
case _ if !sinkTable.supports(STREAMING_WRITE) =>
testNegativeCase(read, write, trigger,
s"Data source $write does not support streamed writing")
case _: ContinuousTrigger =>
if (sourceTable.supports(CONTINUOUS_READ)) {
// Valid microbatch queries.
testPositiveCase(read, write, trigger)
} else {
// Invalid - trigger is continuous but reader is not
testNegativeCase(
read, write, trigger, s"Data source $read does not support continuous processing")
}
case microBatchTrigger =>
if (sourceTable.supports(MICRO_BATCH_READ)) {
// Valid continuous queries.
testPositiveCase(read, write, trigger)
} else {
// Invalid - trigger is microbatch but reader is not
testPostCreationNegativeCase(read, write, trigger,
s"Data source $read does not support microbatch processing")
}
}
}
}
}
| ptkool/spark | sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala | Scala | apache-2.0 | 15,590 |
package dtos
import models.sunerp._
import play.api.libs.json.{Json, JsValue, Writes}
import models.sunerp.DonVi
import models.sunerp.Company
/**
* The Class DonViDto.
*
* @author Nguyen Duc Dung
* @since 3/19/14 10:11 AM
*
*/
case class DonViDto(
id: Long,
name: String,
shortName: Option[String],
companyId: Long,
company: Company
)
object DonViDto {
def apply(tuple: (DonVi, Company)) = {
val (donVi, company) = tuple
new DonViDto(
id = donVi.id.get,
name = donVi.name,
shortName = donVi.shortName,
companyId = donVi.companyId,
company = company
)
}
implicit def jsonWrite = new Writes[DonViDto] {
override def writes(o: DonViDto): JsValue = Json.obj(
"id" -> o.id,
"name" -> o.name,
"shortName" -> o.shortName,
"companyId" -> o.companyId,
"company" -> Companies.companyJsonFormat.writes(o.company)
)
}
}
| SunriseSoftVN/sunerp | app/dtos/DonViDto.scala | Scala | apache-2.0 | 1,032 |
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.jobs.index
import com.twitter.scalding._
import org.apache.accumulo.core.data.{Key, Mutation, Value}
import org.apache.accumulo.core.security.ColumnVisibility
import org.apache.hadoop.conf.Configuration
import org.geotools.data.DataStoreFinder
import org.locationtech.geomesa.core.data.{AccumuloDataStore, AccumuloFeatureWriter}
import org.locationtech.geomesa.jobs.JobUtils
import org.locationtech.geomesa.jobs.scalding.{AccumuloInputOptions, AccumuloOutputOptions, AccumuloSource, AccumuloSourceOptions, ConnectionParams}
import scala.collection.JavaConverters._
class AttributeIndexJob(args: Args) extends Job(args) {
lazy val feature = args(ConnectionParams.FEATURE_NAME)
lazy val attributes = args.list(AttributeIndexJob.Params.ATTRIBUTES_TO_INDEX)
lazy val zookeepers = args(ConnectionParams.ZOOKEEPERS)
lazy val instance = args(ConnectionParams.ACCUMULO_INSTANCE)
lazy val user = args(ConnectionParams.ACCUMULO_USER)
lazy val password = args(ConnectionParams.ACCUMULO_PASSWORD)
lazy val catalog = args(ConnectionParams.CATALOG_TABLE)
lazy val recordTable = args(ConnectionParams.RECORD_TABLE)
lazy val attributeTable = args(ConnectionParams.ATTRIBUTE_TABLE)
lazy val auths = args.optional(ConnectionParams.AUTHORIZATIONS).getOrElse("")
lazy val input = AccumuloInputOptions(recordTable)
lazy val output = AccumuloOutputOptions(attributeTable)
lazy val options = AccumuloSourceOptions(instance, zookeepers, user, password, input, output)
lazy val params = Map("zookeepers" -> zookeepers,
"instanceId" -> instance,
"tableName" -> catalog,
"user" -> user,
"password" -> password,
"auths" -> auths)
// non-serializable resources we want to re-use if possible
class Resources {
val ds = DataStoreFinder.getDataStore(params.asJava).asInstanceOf[AccumuloDataStore]
val sft = ds.getSchema(feature)
val visibilities = ds.writeVisibilities
val decoder = ds.getFeatureEncoder(feature)
// the attributes we want to index
val attributeDescriptors = sft.getAttributeDescriptors
.asScala
.filter(ad => attributes.contains(ad.getLocalName))
val attributesWithNames = AccumuloFeatureWriter.getAttributesWithNames(attributeDescriptors)
// required by scalding
def release(): Unit = {}
}
// scalding job
AccumuloSource(options)
.using(new Resources())
.flatMap(('key, 'value) -> 'mutation) {
(r: Resources, kv: (Key, Value)) => getAttributeIndexMutation(r, kv._1, kv._2)
}.write(AccumuloSource(options))
/**
* Converts a key/value pair from the record table into attribute index mutations
*
* @param r
* @param key
* @param value
* @return
*/
def getAttributeIndexMutation(r: Resources, key: Key, value: Value): Seq[Mutation] = {
val feature = r.decoder.decode(r.sft, value)
AccumuloFeatureWriter.getAttributeIndexMutations(feature,
r.attributesWithNames,
new ColumnVisibility(r.visibilities))
}
}
object AttributeIndexJob {
object Params {
val ATTRIBUTES_TO_INDEX = "geomesa.index.attributes"
}
def runJob(conf: Configuration, params: Map[String, String], feature: String, attributes: Seq[String]) = {
if (attributes.isEmpty) {
throw new IllegalArgumentException("No attributes specified")
}
val ds = DataStoreFinder.getDataStore(params.asJava).asInstanceOf[AccumuloDataStore]
if (ds == null) {
throw new IllegalArgumentException("Data store could not be loaded")
} else if (!ds.catalogTableFormat(feature)) {
throw new IllegalStateException("Feature does not have an attribute index")
}
val jParams = params.asJava
import org.locationtech.geomesa.core.data.AccumuloDataStoreFactory.params._
// create args to pass to scalding job based on our input parameters
val args = new collection.mutable.ListBuffer[String]()
args.append("--" + ConnectionParams.FEATURE_NAME, feature)
args.appendAll(Seq("--" + Params.ATTRIBUTES_TO_INDEX) ++ attributes)
args.append("--" + ConnectionParams.RECORD_TABLE, ds.getRecordTableForType(feature))
args.append("--" + ConnectionParams.ATTRIBUTE_TABLE, ds.getAttrIdxTableName(feature))
args.append("--" + ConnectionParams.ZOOKEEPERS,
zookeepersParam.lookUp(jParams).asInstanceOf[String])
args.append("--" + ConnectionParams.ACCUMULO_INSTANCE,
instanceIdParam.lookUp(jParams).asInstanceOf[String])
args.append("--" + ConnectionParams.ACCUMULO_USER,
userParam.lookUp(jParams).asInstanceOf[String])
args.append("--" + ConnectionParams.ACCUMULO_PASSWORD,
passwordParam.lookUp(jParams).asInstanceOf[String])
args.append("--" + ConnectionParams.CATALOG_TABLE,
tableNameParam.lookUp(jParams).asInstanceOf[String])
Option(authsParam.lookUp(jParams).asInstanceOf[String]).foreach(a =>
args.append("--" + ConnectionParams.AUTHORIZATIONS, a))
Option(visibilityParam.lookUp(jParams).asInstanceOf[String]).foreach(v =>
args.append("--" + ConnectionParams.VISIBILITIES, v))
// set libjars so that our dependent libs get propagated to the cluster
JobUtils.setLibJars(conf)
// run the scalding job on HDFS
val hdfsMode = Hdfs(strict = true, conf)
val arguments = Mode.putMode(hdfsMode, Args(args))
val job = new AttributeIndexJob(arguments)
val flow = job.buildFlow
flow.complete() // this blocks until the job is done
}
}
| nhambletCCRI/geomesa | geomesa-jobs/src/main/scala/org/locationtech/geomesa/jobs/index/AttributeIndexJob.scala | Scala | apache-2.0 | 6,457 |
package org.zouzias.spark.lucenerdd.aws.utils
case class SparkInfo(executorInstances: String, executorMemory: String, executorCores: String){
override def toString(): String = {
s"instances-${executorInstances}-mem-${executorMemory}-cores-${executorCores}"
}
}
| zouzias/spark-lucenerdd-aws | src/main/scala/org/zouzias/spark/lucenerdd/aws/utils/SparkInfo.scala | Scala | apache-2.0 | 271 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import kafka.utils._
import kafka.message._
import org.junit._
import org.junit.Assert._
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.runners.Parameterized.Parameters
import org.apache.kafka.common.record.{CompressionType, MemoryRecords, RecordBatch, SimpleRecord}
import org.apache.kafka.common.utils.Utils
import java.util.{Collection, Properties}
import kafka.server.{BrokerTopicStats, FetchLogEnd, LogDirFailureChannel}
import scala.jdk.CollectionConverters._
@RunWith(value = classOf[Parameterized])
class BrokerCompressionTest(messageCompression: String, brokerCompression: String) {
val tmpDir = TestUtils.tempDir()
val logDir = TestUtils.randomPartitionLogDir(tmpDir)
val time = new MockTime(0, 0)
val logConfig = LogConfig()
@After
def tearDown(): Unit = {
Utils.delete(tmpDir)
}
/**
* Test broker-side compression configuration
*/
@Test
def testBrokerSideCompression(): Unit = {
val messageCompressionCode = CompressionCodec.getCompressionCodec(messageCompression)
val logProps = new Properties()
logProps.put(LogConfig.CompressionTypeProp, brokerCompression)
/*configure broker-side compression */
val log = Log(logDir, LogConfig(logProps), logStartOffset = 0L, recoveryPoint = 0L, scheduler = time.scheduler,
time = time, brokerTopicStats = new BrokerTopicStats, maxProducerIdExpirationMs = 60 * 60 * 1000,
producerIdExpirationCheckIntervalMs = LogManager.ProducerIdExpirationCheckIntervalMs,
logDirFailureChannel = new LogDirFailureChannel(10))
/* append two messages */
log.appendAsLeader(MemoryRecords.withRecords(CompressionType.forId(messageCompressionCode.codec), 0,
new SimpleRecord("hello".getBytes), new SimpleRecord("there".getBytes)), leaderEpoch = 0)
def readBatch(offset: Int): RecordBatch = {
val fetchInfo = log.read(offset,
maxLength = 4096,
isolation = FetchLogEnd,
minOneMessage = true)
fetchInfo.records.batches.iterator.next()
}
if (!brokerCompression.equals("producer")) {
val brokerCompressionCode = BrokerCompressionCodec.getCompressionCodec(brokerCompression)
assertEquals("Compression at offset 0 should produce " + brokerCompressionCode.name, brokerCompressionCode.codec, readBatch(0).compressionType.id)
}
else
assertEquals("Compression at offset 0 should produce " + messageCompressionCode.name, messageCompressionCode.codec, readBatch(0).compressionType.id)
}
}
object BrokerCompressionTest {
@Parameters
def parameters: Collection[Array[String]] = {
(for (brokerCompression <- BrokerCompressionCodec.brokerCompressionOptions;
messageCompression <- CompressionType.values
) yield Array(messageCompression.name, brokerCompression)).asJava
}
}
| sslavic/kafka | core/src/test/scala/unit/kafka/log/BrokerCompressionTest.scala | Scala | apache-2.0 | 3,645 |
package com.eevolution.context.dictionary.domain.api.repository
import com.eevolution.context.dictionary._
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 01/11/17.
*/
trait ImportFormatRowRepository [ImportFormatRow , Int] extends api.Repostory [ImportFormatRow , Int] {
} | adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/repository/ImportFormatRowRepository.scala | Scala | gpl-3.0 | 1,148 |
package mixr.isabelle.hol.util
import org.junit.Test
import isabelle.Term.{TFree, Free, Bound, App}
import mixr.isabelle.pure.lib.TermYXMLTest.parseYXMLFile
import Sets._
import SetsTest._
import org.junit.Assert.assertEquals
import isabelle.Term
import mixr.isabelle.pure.lib.TermUtils
class SetsTest {
@Test(expected = classOf[IllegalArgumentException])
def element_extraction_must_throw_an_exception_if_the_term_is_not_a_set_specification(): Unit = {
extractInsertedSetElements(App(Bound(0), Bound(1)))
}
@Test
def element_extraction_must_return_an_empty_list_for_bottom(): Unit = {
val noElements = extractInsertedSetElements(emptySetTerm)
assertEquals(Nil, noElements)
}
@Test
def element_extraction_must_return_a_all_elements_of_nonempty_sets(): Unit = {
assertEquals(List(freeVar("a")), extractInsertedSetElements(singletonSetTerm))
assertEquals(List(freeVar("a"), freeVar("b")), extractInsertedSetElements(twoSetTerm))
assertEquals(List(freeVar("a"), freeVar("b"), freeVar("c"), freeVar("d")), extractInsertedSetElements(largeSetTerm))
}
}
object SetsTest {
/**
* `{} ⊆ {a}`
*/
val SETS_TESTS_FORMULA = parseYXMLFile("/mixr/isabelle/pure/lib/UnescapedYXML_sets_element_extraction_test_emptySet_subsetOf_singletonSet")
/**
* `{a,b} ⊆ {a,b,c,d}`
*/
val LARGE_SETS_TESTS_FORMULA = parseYXMLFile("/mixr/isabelle/pure/lib/UnescapedYXML_sets_element_extraction_large_sets")
val (emptySetTerm, singletonSetTerm) = SETS_TESTS_FORMULA match {
case App(truePropConst, App(App(lessEqConst, emptySet), setA)) => (emptySet, setA)
case _ => throw new IllegalStateException()
}
val (twoSetTerm, largeSetTerm) = LARGE_SETS_TESTS_FORMULA match {
case App(truePropConst, App(App(lessEqConst, emptySet), setA)) => (emptySet, setA)
case _ => throw new IllegalStateException()
}
def freeVar(freeVarName: String): Term.Free = {
Free(freeVarName, TFree("'a", TermUtils.HOL_LIST_TYPE))
}
} | urbas/mixr | libs/MixRIsabelleApi/src/test/scala/mixr/isabelle/hol/util/SetsTest.scala | Scala | mit | 1,982 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.request
import java.{ util => ju }
import java.nio.charset.Charset
import io.gatling.commons.validation.Validation
import io.gatling.core.body._
import io.gatling.core.session._
import io.gatling.http.client.Param
import io.gatling.http.client.body.multipart._
import com.softwaremill.quicklens._
object BodyPart {
def rawFileBodyPart(
name: Option[Expression[String]],
filePath: Expression[String],
rawFileBodies: RawFileBodies
): BodyPart =
BodyPart(name, fileBodyPartBuilder(rawFileBodies.asResourceAndCachedBytes(filePath)), BodyPartAttributes.Empty)
def elFileBodyPart(
name: Option[Expression[String]],
filePath: Expression[String],
defaultCharset: Charset,
elFileBodies: ElFileBodies
): BodyPart =
stringBodyPart(name, new ElBody(elFileBodies.parse(filePath)), defaultCharset)
def pebbleStringBodyPart(name: Option[Expression[String]], string: String, defaultCharset: Charset): BodyPart =
stringBodyPart(name, PebbleStringBody(string, defaultCharset), defaultCharset)
def pebbleFileBodyPart(
name: Option[Expression[String]],
filePath: Expression[String],
defaultCharset: Charset,
pebbleFileBodies: PebbleFileBodies
): BodyPart =
stringBodyPart(name, PebbleFileBody(filePath, pebbleFileBodies, defaultCharset), defaultCharset)
def stringBodyPart(name: Option[Expression[String]], string: Expression[String], defaultCharset: Charset): BodyPart =
BodyPart(name, stringBodyPartBuilder(string, defaultCharset), BodyPartAttributes.Empty)
def byteArrayBodyPart(name: Option[Expression[String]], bytes: Expression[Array[Byte]]): BodyPart =
BodyPart(name, byteArrayBodyPartBuilder(bytes), BodyPartAttributes.Empty)
private def stringBodyPartBuilder(string: Expression[String], defaultCharset: Charset)(
name: String,
charset: Option[Charset],
transferEncoding: Option[String],
contentId: Option[String],
dispositionType: Option[String],
contentType: Option[String],
customHeaders: ju.List[Param],
fileName: Option[String]
): Expression[Part[_]] =
fileName match {
case None =>
string.map { resolvedString =>
new StringPart(
name,
resolvedString,
charset.getOrElse(defaultCharset),
transferEncoding.orNull,
contentId.orNull,
dispositionType.orNull,
contentType.orNull,
customHeaders
)
}
case _ =>
byteArrayBodyPartBuilder(string.map(_.getBytes(charset.getOrElse(defaultCharset))))(
name,
charset,
transferEncoding,
contentId,
dispositionType,
contentType,
customHeaders,
fileName
)
}
private def byteArrayBodyPartBuilder(bytes: Expression[Array[Byte]])(
name: String,
charset: Option[Charset],
transferEncoding: Option[String],
contentId: Option[String],
dispositionType: Option[String],
contentType: Option[String],
customHeaders: ju.List[Param],
fileName: Option[String]
): Expression[Part[_]] =
bytes.map { resolvedBytes =>
new ByteArrayPart(
name,
resolvedBytes,
charset.orNull,
transferEncoding.orNull,
contentId.orNull,
dispositionType.orNull,
contentType.orNull,
customHeaders,
fileName.orNull
)
}
private def fileBodyPartBuilder(resource: Expression[ResourceAndCachedBytes])(
name: String,
charset: Option[Charset],
transferEncoding: Option[String],
contentId: Option[String],
dispositionType: Option[String],
contentType: Option[String],
customHeaders: ju.List[Param],
fileName: Option[String]
): Expression[Part[_]] =
session =>
for {
ResourceAndCachedBytes(resource, cachedBytes) <- resource(session)
} yield cachedBytes match {
case Some(bytes) =>
new ByteArrayPart(
name,
bytes,
charset.orNull,
transferEncoding.orNull,
contentId.orNull,
dispositionType.orNull,
contentType.orNull,
customHeaders,
fileName.getOrElse(resource.name)
)
case _ =>
new FilePart(
name,
resource.file,
charset.orNull,
transferEncoding.orNull,
contentType.orNull,
dispositionType.orNull,
contentId.orNull,
customHeaders,
fileName.getOrElse(resource.name)
)
}
}
object BodyPartAttributes {
val Empty: BodyPartAttributes = BodyPartAttributes(None, None, None, None, None, None, Nil)
}
final case class BodyPartAttributes(
contentType: Option[Expression[String]],
charset: Option[Charset],
dispositionType: Option[Expression[String]],
fileName: Option[Expression[String]],
contentId: Option[Expression[String]],
transferEncoding: Option[String],
customHeaders: List[(Expression[String], Expression[String])]
) {
lazy val customHeadersExpression: Expression[Seq[(String, String)]] = expressionSeq2SeqExpression(customHeaders)
}
final case class BodyPart(
name: Option[Expression[String]],
partBuilder: (
String, // name
Option[Charset], // charset
Option[String], // transferEncoding
Option[String], // contentId
Option[String], // dispositionType
Option[String], // contentType
ju.List[Param], // customHeaders
Option[String] // fileName
) => Expression[Part[_]],
attributes: BodyPartAttributes
) {
def contentType(contentType: Expression[String]): BodyPart = this.modify(_.attributes.contentType).setTo(Some(contentType))
def charset(charset: String): BodyPart = this.modify(_.attributes.charset).setTo(Some(Charset.forName(charset)))
def dispositionType(dispositionType: Expression[String]): BodyPart = this.modify(_.attributes.dispositionType).setTo(Some(dispositionType))
def fileName(fileName: Expression[String]): BodyPart = this.modify(_.attributes.fileName).setTo(Some(fileName))
def contentId(contentId: Expression[String]): BodyPart = this.modify(_.attributes.contentId).setTo(Some(contentId))
def transferEncoding(transferEncoding: String): BodyPart = this.modify(_.attributes.transferEncoding).setTo(Some(transferEncoding))
def header(name: Expression[String], value: Expression[String]): BodyPart = this.modify(_.attributes.customHeaders)(_ ::: List(name -> value))
def toMultiPart(session: Session): Validation[Part[_]] =
for {
name <- resolveOptionalExpression(name, session)
contentType <- resolveOptionalExpression(attributes.contentType, session)
dispositionType <- resolveOptionalExpression(attributes.dispositionType, session)
fileName <- resolveOptionalExpression(attributes.fileName, session)
contentId <- resolveOptionalExpression(attributes.contentId, session)
customHeaders <- attributes.customHeadersExpression(session)
customHeadersAsParams =
if (customHeaders.nonEmpty) {
val params = new ju.ArrayList[Param](customHeaders.size)
customHeaders.foreach { case (headerName, value) => params.add(new Param(headerName, value)) }
params
} else {
ju.Collections.emptyList[Param]
}
part <- partBuilder(
name.orNull,
attributes.charset,
attributes.transferEncoding,
contentId,
dispositionType,
contentType,
customHeadersAsParams,
fileName
)(session)
} yield part
}
| gatling/gatling | gatling-http/src/main/scala/io/gatling/http/request/BodyPart.scala | Scala | apache-2.0 | 8,360 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.model
import com.netflix.atlas.core.stacklang.BaseWordSuite
import com.netflix.atlas.core.stacklang.Interpreter
import com.netflix.atlas.core.stacklang.StandardVocabulary
import com.netflix.atlas.core.stacklang.Word
class AndWordSuite extends BaseWordSuite {
def interpreter: Interpreter =
Interpreter(QueryVocabulary.allWords ::: StandardVocabulary.allWords)
def word: Word = QueryVocabulary.And
def shouldMatch: List[(String, List[Any])] = List(
"a,b,:eq,:true" -> List(Query.Equal("a", "b")),
":true,a,b,:eq" -> List(Query.Equal("a", "b")),
"a,b,:eq,:false" -> List(Query.False),
":false,a,b,:eq" -> List(Query.False),
"a,b,:eq,c,:has" -> List(Query.And(Query.Equal("a", "b"), Query.HasKey("c")))
)
def shouldNotMatch: List[String] = List("", "a", ":true", "a,:true")
}
| copperlight/atlas | atlas-core/src/test/scala/com/netflix/atlas/core/model/AndWordSuite.scala | Scala | apache-2.0 | 1,448 |
/*
* Copyright 2014 Ralf Steuerwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ifmaps
import org.scalatest.FunSuite
import ifmaps._
import ifmaps.transformation.Converter._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ExtendedIdentifierTest extends FunSuite {
test("enclosing identity must not have an administrative-domain") {
val extendedIdentifier = (
<network xmlns="http://www.example.com/extended-identifiers"
address="10.0.0.0" netmask="255.0.0.0" type="IPv4"
administrative-domain=""></network>).toExtendedIdentifier()
val ifmapjIdentifier = extendedIdentifier.toIfmapj.asInstanceOf[JIdentity]
assert(ifmapjIdentifier.getAdministrativeDomain() === null)
}
test("ignore administrative-domain for identity if type is other and other-type-def is extended") {
val extendedIdentifier =
Identity("someXML", IDType.other, Some("adminDomainForbidden"), Some("extended"))
val ifmapjIdentifier = extendedIdentifier.toIfmapj.asInstanceOf[JIdentity]
assert(ifmapjIdentifier.getAdministrativeDomain() === null)
}
} | rsteuerw/ifmaps | src/test/scala/ifmaps/ExtendedIdentifierTest.scala | Scala | apache-2.0 | 1,688 |
package uk.ac.surrey.xw.multichooser
import java.awt.BorderLayout.CENTER
import javax.swing.BorderFactory
import javax.swing.JList
import javax.swing.JScrollPane
import javax.swing.ListSelectionModel.MULTIPLE_INTERVAL_SELECTION
import org.nlogo.core.LogoList
import org.nlogo.window.GUIWorkspace
import uk.ac.surrey.xw.api.IntegerProperty
import uk.ac.surrey.xw.api.LabeledPanelWidget
import uk.ac.surrey.xw.api.LabeledPanelWidgetKind
import uk.ac.surrey.xw.api.ListProperty
import uk.ac.surrey.xw.api.State
import uk.ac.surrey.xw.api.WidgetKey
import uk.ac.surrey.xw.api.swing.LogoObjectListCellRenderer
import uk.ac.surrey.xw.api.swing.enrichJList
class MultiChooserKind[W <: MultiChooser] extends LabeledPanelWidgetKind[W] {
override val name = "MULTI-CHOOSER"
override val newWidget = new MultiChooser(_, _, _)
override val heightProperty = new IntegerProperty[W](
"HEIGHT", Some(_.setHeight(_)), _.getHeight, 100)
private def items(jl: JList[AnyRef]) =
(0 until jl.getModel.getSize).map(jl.getModel.getElementAt)
val selectedItemsProperty = new ListProperty[W](
"SELECTED-ITEMS",
Some((w, xs) ⇒ {
val _items = items(w.jList)
w.jList.setSelectedIndices(
xs.map(x ⇒ _items.indexOf(x)).filterNot(_ == -1).toArray
)
}),
w ⇒ LogoList.fromJava(w.jList.getSelectedValuesList)
)
val itemsProperty = new ListProperty[W](
"ITEMS",
Some((w, xs) ⇒ {
w.jList.setListData(xs.toVector.toArray)
}),
w ⇒ LogoList(items(w.jList): _*)
)
override def propertySet = super.propertySet ++ Set(
selectedItemsProperty, itemsProperty
)
override def defaultProperty = Some(selectedItemsProperty)
}
class MultiChooser(
val key: WidgetKey,
val state: State,
val ws: GUIWorkspace)
extends LabeledPanelWidget {
override val kind = new MultiChooserKind[this.type]
val jList = new JList[AnyRef]() {
setSelectionMode(MULTIPLE_INTERVAL_SELECTION)
setBorder(BorderFactory.createRaisedBevelBorder)
}
add(new JScrollPane(jList), CENTER)
/* Use a custom renderer so Dump.logoObject is used instead of toString */
jList.setCellRenderer(new LogoObjectListCellRenderer)
jList.onValueChanged { event ⇒
if (!event.getValueIsAdjusting)
updateInState(kind.selectedItemsProperty)
}
}
| CRESS-Surrey/eXtraWidgets | xw/widgets/MultiChooserWidget/src/main/scala/uk/ac/surrey/xw/multichooser/MultiChooser.scala | Scala | mit | 2,311 |
package me.yingrui.segment.math
import org.nd4j.linalg.factory.Nd4j._
import scala.util.Random
class NDMatrixBuilder extends MatrixBuilder {
override def vector(data: Seq[Double]): Matrix = new NDMatrix(create(data.toArray))
override def apply(data: Seq[Double]): Matrix = vector(data)
override def apply(data: Array[Double]): Matrix = vector(data)
override def apply(row: Int, col: Int): Matrix = new NDMatrix(zeros(row, col))
override def apply(size: Int, identity: Boolean): Matrix = if(identity) new NDMatrix(eye(size)) else apply(size, size)
override def apply(data: Array[Array[Double]]): Matrix = new NDMatrix(create(data))
override def apply(row: Int, col: Int, data: Array[Double]): Matrix = new NDMatrix(create(data, Array(row, col)))
override def applyBoolean(row: Int, col: Int, data: Array[Boolean]): Matrix = apply(row, col, data.map(b => if(b) 1D else -1D))
override def randomize(row: Int, col: Int, min: Double, max: Double) = {
val data = new Array[Double](row * col)
for (i <- 0 until data.length) {
data(i) = (Math.random() * (max - min)) + min
}
apply(row, col, data)
}
override def randomize(row: Int, col: Int): Matrix = {
val data = (0 until row * col).map(i => 1e-5 * Random.nextInt(100).toDouble)
new DenseMatrix(row, col, data.toArray)
}
}
| yingrui/mahjong | lib-segment/src/main/scala/me/yingrui/segment/math/NDMatrixBuilder.scala | Scala | gpl-3.0 | 1,335 |
// Project: scalanative-cocoa
// Module: Foundation
// Description: Generated with scala-obj-bindgen (with manual postprocessing) from:
// objc/objc.h, objc/NSObject.h, objc/Protocol.h, Foundation/NSObject.h
package cocoa.foundation
import scalanative.native._
import objc._
@ObjC
trait NSCopying {
@inline def copyWithZone_(zone: NSZone): id = extern
}
| jokade/scalanative-cocoa | foundation/src/main/scala/cocoa/foundation/NSCopying.scala | Scala | mit | 381 |
package tests
import strategy.TextEditor
/**
* TextFormatter tests
* Some tests for the TextEditor fulfilling "Strategy Pattern" exercise
* in week 8
*
* @author lmignot
*/
class StrategyTest extends BaseTest {
private val TextA = "Testing text in caps formatter"
private val TextB = "T3st1ng 50m3 g33k 5tuff"
private val TextAUC = TextA.toUpperCase
private val TextALC = TextA.toLowerCase
private val TextBFormatted = "Tstngmgktuff"
describe("A text editor") {
it("format() should format the text according to the provided formatter") {
def formatter(s:String): String = s.toUpperCase
val editor: TextEditor = TextEditor(formatter)
val res = editor.publishText(TextA)
res should be (TextAUC)
}
it("format() should format the text according to any valid strategy") {
val editor: TextEditor = TextEditor(s => s.toLowerCase)
val res = editor.publishText(TextA)
res should be (TextALC)
}
it("and yet another strategy") {
def formatter (s: String): String = s.filter(_.isLetter)
val editor: TextEditor = TextEditor(formatter)
val res = editor.publishText(TextB)
res should be (TextBFormatted)
}
}
}
| BBK-PiJ-2015-67/sdp-portfolio | exercises/week08/src/test/scala/tests/StrategyTest.scala | Scala | unlicense | 1,217 |
package chapter1
object TokenType extends Enumeration {
type TokenType = Value
val Space, Punctuation, Symbolic, Numeric, Alphanumeric = Value
def charType(c: Char): TokenType =
// here the order of if's matters
if(" \t\n\r".contains(c)) Space else
if("()[]{},".contains(c)) Punctuation else
if("~‘!@#$%^&*-+=|\\:;<>.?/".contains(c)) Symbolic else
if("0123456789".contains(c)) Numeric else
// this is called alphanumeric and does not contain any numeric characters intentionally
if("abcdefghijklmnopqrstuvwxyz_’ABCDEFGHIJKLMNOPQRSTUVWXYZ".contains(c)) Alphanumeric else
throw SyntaxErrorException(s"Unknown character: $c")
}
| inpefess/practical-logic-handbook | src/main/scala/chapter1/TokenType.scala | Scala | mit | 671 |
package com.intentmedia.mario
import com.intentmedia.mario.Pipeline._
import org.scalatest._
class PipelineSuite extends FlatSpec with Matchers {
def time(f: => Unit) = {
val s = System.currentTimeMillis
f
(System.currentTimeMillis - s).toInt
}
trait GeneratorBuilder {
val generatorResult = 1
val generator = pipe(generatorResult)
}
"A pipeline without dependencies" should "run standalone" in new GeneratorBuilder {
generator.run() should be(generatorResult)
}
"A unary pipeline" should "depend on one matching pipeline to run successfully" in new GeneratorBuilder {
val transformer = pipe((a: Int) => a + 2, generator)
transformer.run() should be(generatorResult + 2)
}
"A binary pipeline" should "depend on two matching pipelines to run successfully" in new GeneratorBuilder {
val transformer = pipe((a: Int) => a.toString, generator)
val binaryTransformer = pipe((a: String, b: Int) => a + b.toString, transformer, generator)
binaryTransformer.run() should be(generatorResult.toString + generatorResult.toString)
}
"runWith" should "execute other pipelines successfully" in new GeneratorBuilder {
var a = 1
var b = 1
val generator1 = pipe(a += 1)
val generator2 = pipe(b += 2)
val result = generator.runWith(generator1, generator2)
result should be(generatorResult)
a should be(2)
b should be(3)
}
"A pipeline step" should "be executed only once when being a dependency of multiple pipelines" in {
var a = 1
val modifier = pipe {
a += 1
a
}
val transformer1 = pipe((x: Int) => x + 1, modifier)
val transformer2 = pipe((x: Int) => x + 2, modifier)
val transformer3 = pipe((x: Int, y: Int) => x + y, transformer1, transformer2)
val pipelineResult = transformer3.run()
a should be(2)
pipelineResult should be(a * 2 + 3)
}
"Independent pipelines" should "be executed independently" in {
val time1 = 100
val time2 = 200
val delayedStep1 = pipe(Thread.sleep(time1))
val delayedStep2 = pipe(Thread.sleep(time2))
val joinStep = pipe((a: Unit, b: Unit) => {}, delayedStep1, delayedStep2)
time {
joinStep.run()
} should be < time1 + time2
}
"A Reducer" should "generate a tree of pipelines reductions" in new GeneratorBuilder {
generator.setId("A")
val gen2 = pipe(2).setId("B")
val gen3 = pipe(3).setId("C")
val steps = Seq(generator, gen2, gen3)
val reducer = steps.reduceToPipe((a: Int, b: Int) => a + b)
reducer.run() should be(6)
reducer.dependencyTree() should be(
"Transformer" + "\n" +
"|- Transformer" + "\n" +
"| |- A" + "\n" +
"| |- B" + "\n" +
"|- C")
}
} | intentmedia/mario | src/test/scala/com/intentmedia/mario/PipelineSuite.scala | Scala | mit | 2,736 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.urls
import org.scalatest.GivenWhenThen
import org.scalatest.funspec.AnyFunSpecLike
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.play.guice.GuiceOneServerPerSuite
import play.api.i18n.{I18nSupport, MessagesApi}
import play.api.mvc.AnyContentAsEmpty
import play.api.test.FakeRequest
class LinkSpec extends AnyFunSpecLike with GivenWhenThen with Matchers with GuiceOneServerPerSuite with I18nSupport {
implicit val request: FakeRequest[AnyContentAsEmpty.type] = FakeRequest()
implicit val messagesApi: MessagesApi = app.injector.instanceOf[MessagesApi]
describe("portal page link should") {
it("be created with no value for same window target") {
Given("the link has no value attribute supplied")
When("portal page link is created")
val portalLink = Link.toPortalPage.apply(url = "https://someurl", value = None)
Then("the link should be rendered without a value")
portalLink.toHtml.toString() shouldBe "<a href=\\"https://someurl\\" target=\\"_self\\" data-sso=\\"client\\"></a>"
}
it("be created with the given the value,id and css ") {
Given("the link has id, css and value")
val value = Some("link text")
When("portal page link is created")
val portalLink = Link.toPortalPage.apply(url = "https://someurl",
value = value,
id = Some("link-id"),
cssClasses = Some("link-style blink"))
Then("the link should be rendered with id and styles")
portalLink.toHtml.toString() shouldBe "<a id=\\"link-id\\" href=\\"https://someurl\\" target=\\"_self\\" data-sso=\\"client\\" class=\\"link-style blink\\">link text</a>"
}
it("be created with the given the value for same window target without escaping the text") {
Given("the link value attribute as 'Pay £4,000 now - it's due'")
val value = Some("Pay £4,000 now - it's due")
When("portal page link is created")
val portalLink = Link.toPortalPage.apply(url = "https://someurl", value = value)
Then("the link should be rendered in the same way")
portalLink.toHtml.toString() shouldBe """<a href="https://someurl" target="_self" data-sso="client">Pay £4,000 now - it's due</a>"""
}
it("be created with the hidden info span when specified") {
Given("the hiddenInfo value is 'my hiddenInfo'")
val hiddenInfo = Some("my hiddenInfo")
When("portal page link is created")
val portalLink = Link.toPortalPage.apply(url = "https://someurl", value = None, hiddenInfo = hiddenInfo)
Then("the link should have hidden span")
portalLink.toHtml.toString() shouldBe """<a href="https://someurl" target="_self" data-sso="client"><span class="visuallyhidden">my hiddenInfo</span></a>"""
}
}
describe("internal page link should") {
it("be created with no value for same window target") {
Given("the link has no value attribute supplied")
When("internal page link is created")
val portalLink = Link.toInternalPage.apply(url = "https://someurl", value = None)
Then("the link should be rendered with no sso in the same window")
portalLink.toHtml.toString() shouldBe "<a href=\\"https://someurl\\" target=\\"_self\\" data-sso=\\"false\\"></a>"
}
it("be created with data attribute") {
Given("the data attribute as 'data-some=test'")
val data = Some(Map("some" -> "test"))
When("link is created")
val linkWithDataAttr = Link.toInternalPage.apply(url = "https://someurl", value = None, dataAttributes = data)
Then("the link should render with data attribute")
linkWithDataAttr.toHtml.toString() shouldBe "<a href=\\"https://someurl\\" target=\\"_self\\" data-sso=\\"false\\" data-some=\\"test\\"></a>"
}
it("be created with multiple data attributes") {
Given("the data attributes as 'data-some1=test1' and 'data-some2=test2'")
val data = Some(Map("some1" -> "test1", "some2" -> "test2"))
When("link is created")
val linkWithDataAttr = Link.toInternalPage.apply(url = "https://someurl", value = None, dataAttributes = data)
Then("the link should render with data attribute")
linkWithDataAttr.toHtml.toString() shouldBe "<a href=\\"https://someurl\\" target=\\"_self\\" data-sso=\\"false\\" data-some1=\\"test1\\" data-some2=\\"test2\\"></a>"
}
}
describe("internal page with sso link should") {
it("be created with no value for same window target") {
Given("the link has no value attribute supplied")
When("internal page link is created")
val portalLink = Link.toInternalPageWithSso.apply(url = "https://someurl", value = None)
Then("the link should be rendered with no sso in a new window")
portalLink.toHtml.toString() shouldBe "<a href=\\"https://someurl\\" target=\\"_self\\" data-sso=\\"server\\"></a>"
}
}
describe("external page link should") {
it("be created with no value for same window target") {
Given("the link has no value attribute supplied")
When("external page link is created")
val portalLink = Link.toExternalPage.apply(url = "https://someurl", value = None)
Then("the link should be rendered with no sso in a new window")
portalLink.toHtml.toString() shouldBe """<a href="https://someurl" target="_blank" data-sso="false" rel="external noopener noreferrer"><span class="visuallyhidden">link opens in a new window</span></a>"""
}
it("be created with hidden info span for screen readers") {
Given("the link value attribute as 'Pay £4,000 now - it's due'")
val value = Some("Pay £4,000 now - it's due")
When("external page link is created")
val portalLink = Link.toExternalPage.apply(url = "https://someurl", value = value)
Then("the link should be rendered with title including a new window prompt")
portalLink.toHtml.toString() shouldBe """<a href="https://someurl" target="_blank" data-sso="false" rel="external noopener noreferrer">Pay £4,000 now - it's due<span class="visuallyhidden">link opens in a new window</span></a>"""
}
}
}
| hmrc/url-builder | src/test/scala/uk/gov/hmrc/urls/LinkSpec.scala | Scala | apache-2.0 | 6,712 |
package debop4s.core.utils
import java.util.concurrent.{CountDownLatch => JCountDownLatch}
import scala.annotation.tailrec
import scala.util.{Failure, Success, Try}
/**
* Memorize
* @author Sunghyouk Bae
*/
object Memorize {
/**
* Thread-safe memoization for a function.
*
* This works like a lazy val indexed by the input value. The memo
* is held as part of the state of the returned function, so keeping
* a reference to the function will keep a reference to the
* (unbounded) memo table. The memo table will never forget a
* result, and will retain a reference to the corresponding input
* values as well.
*
* If the computation has side-effects, they will happen exactly
* once per input, even if multiple threads attempt to memoize the
* same input at one time, unless the computation throws an
* exception. If an exception is thrown, then the result will not be
* stored, and the computation will be attempted again upon the next
* access. Only one value will be computed at a time. The overhead
* required to ensure that the effects happen only once is paid only
* in the case of a miss (once per input over the life of the memo
* table). Computations for different input values will not block
* each other.
*
* The combination of these factors means that this method is useful
* for functions that will only ever be called on small numbers of
* inputs, are expensive compared to a hash lookup and the memory
* overhead, and will be called repeatedly.
*/
def apply[@miniboxed A, @miniboxed B](f: A => B): A => B = new ((A) => B) {
private[this] var memo = Map.empty[A, Either[JCountDownLatch, B]]
/**
* What to do if we do not find the value already in the memo
* table.
*/
@tailrec private[this] def missing(a: A): B = synchronized {
// With the lock, check to see what state the value is in.
memo.get(a) match {
case None =>
// If it's missing, then claim the slot by putting in a
// CountDownLatch that will be completed when the value is
// available.
val latch = new JCountDownLatch(1)
memo = memo + (a -> Left(latch))
// The latch wrapped in Left indicates that the value
// needs to be computed in this thread, and then the
// latch counted down.
Left(latch)
case Some(other) =>
// This is either the latch that will indicate that the
// work has been done, or the computed value.
Right(other)
}
} match {
case Right(Right(b)) =>
// The computation is already done.
b
case Right(Left(latch)) =>
// Someone else is doing the computation.
latch.await()
// This recursive call will happen when there is an
// exception computing the value, or if the value is
// currently being computed.
missing(a)
case Left(latch) =>
// Compute the value outside of the synchronized block.
val b = Try { f(a) }
b match {
case Success(_) =>
case Failure(t) =>
// If there was an exception running the
// computation, then we need to make sure we do not
// starve any waiters before propagating the
// exception.
synchronized { memo = memo - a }
latch.countDown()
throw t
}
// Update the memo table to indicate that the work has
// been done, and signal to any waiting threads that the
// work is complete.
synchronized { memo = memo + (a -> Right(b.get)) }
latch.countDown()
b.get
}
override def apply(a: A): B = {
// Look in the (possibly stale) memo table. If the value is
// present, then it is guaranteed to be the final value. If it
// is absent, call missing() to determine what to do.
memo.get(a) match {
case Some(Right(b)) => b
case _ => missing(a)
}
}
}
}
| debop/debop4s | debop4s-core/src/main/scala/debop4s/core/utils/Memorize.scala | Scala | apache-2.0 | 4,067 |
package us.blelbinha.scalaredisexample
import akka.util.ByteString
import redis.{ByteStringFormatter, RedisClient}
import us.blelbinha.scalaredisexample.Model.Id
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.pickling._
import scala.pickling.json._
object Redis {
implicit private val akkaSystem = akka.actor.ActorSystem()
private val client = RedisClient()
private val applicationString = "dorloch"
def save[T <: Model[T] : FastTypeTag : SPickler : Unpickler](obj: T): Future[T] = {
val objWithId: T = obj.id.fold {
val newId = java.util.UUID.randomUUID.toString
obj.withId(newId)
} {
_ => obj
}
val resultFuture = client.set(s"$applicationString:${objWithId.id}", objWithId)
resultFuture map (_ => objWithId)
}
def get[T <: Model[T] : FastTypeTag : SPickler : Unpickler](id: Id): Future[Option[T]] =
client.get(s"$applicationString:$id")
implicit private def genericByteStringFormatter[T <: Model[T] : FastTypeTag : SPickler : Unpickler]: ByteStringFormatter[T] =
new ByteStringFormatter[T] {
override def deserialize(bs: ByteString): T = JSONPickle(bs.utf8String).unpickle[T]
override def serialize(data: T): ByteString = ByteString(data.pickle.value)
}
}
| ExNexu/scala-redis-example | src/main/scala/us/blelbinha/scalaredisexample/Redis.scala | Scala | bsd-3-clause | 1,297 |
package lang.keywords
import scala.util.control.Breaks
object Assert {
def main(args: Array[String]) {
assert(!listValuesAllValid(null))
assert(listValuesAllValid(List("")))
assert(!listValuesAllValid(List("", null)))
listValuesAllValid(List("1", "2", null, "3"))
}
def listValuesAllValid(values: List[String]): Boolean = {
if (values == null) {
false
} else {
var hadConfigValues = true
val loop = new Breaks
loop.breakable {
for (value <- values) {
if (value == null) {
hadConfigValues = false
loop.break()
}
}
}
hadConfigValues
}
}
}
| congdepeng/scalab | src/main/scala/lang/keywords/Assert.scala | Scala | apache-2.0 | 676 |
package glasskey.util
import java.security.interfaces.RSAPublicKey
import com.nimbusds.jose.jwk.{JWK, RSAKey}
import com.typesafe.config.ConfigFactory
import glasskey.model.fetchers.IDTokenAccessTokenValidatorMock
import org.scalatest._
import org.scalatest.enablers.Emptiness._
import scala.collection.JavaConverters._
import scala.collection.mutable
class PEMDERKeySourceSpec extends FlatSpec with Matchers with IDTokenAccessTokenValidatorMock {
"Iterating RSA keys" should "convert to RSA public keys" in {
val util = new PEMDERKeySource { override def source = ConfigFactory.load.getConfig("oauth").getConfig("provider").getString("access-token-JWT-key-url") }
val pubKey = util.getPublicKey("1")
pubKey shouldBe a [Option[RSAPublicKey]]
pubKey shouldBe 'defined
}
"From file: Iterating RSA keys" should "convert to RSA public keys" in {
val util = new FilePEMDERKeySource { override def source = staticX509PEMDER }
val pubKey = util.getPublicKey("1")
pubKey shouldBe a [Option[RSAPublicKey]]
pubKey shouldBe 'defined
}
}
| MonsantoCo/glass-key | glass-key-common/src/test/scala/glasskey/util/PEMDERKeySourceSpec.scala | Scala | bsd-3-clause | 1,071 |
package tu.model.knowledge
/**
* Stores typed KLine
* @author talanov max
* date 2012-06-08
* time: 10:51 PM
* @see KLine
*/
case class TypedKLine[Type <: Resource](var _frames: Map[KnowledgeURI, Type], _uri: KnowledgeURI, _probability: Probability)
extends Resource(_uri, _probability) {
def size = frames.size
def this(_frames: Map[KnowledgeURI, Type], _uri: KnowledgeURI) = {
this(_frames, _uri, new Probability())
}
def frames: Map[KnowledgeURI, Type] = _frames
def frames_=(value: Map[KnowledgeURI, Type]): TypedKLine[Type] = {
_frames = value
this
}
def +(in: Pair[KnowledgeURI, Type]): TypedKLine[Type] = {
_frames = _frames + in
this
}
def +(in: Type): TypedKLine[Type] = {
_frames = _frames + (in.uri -> in)
this
}
/**
* Returns Iterable of values of Type.
* @return Iterable of values of Type.
*/
def values: Iterable[Type] = frames.values
/**
* Returns Some[Type] if frames contains Resource with specified KnowledgeURI.
* @param uri to search resource with.
* @return Option[Type] with specified KnowledgeURI.
*/
def get(uri: KnowledgeURI): Option[Type] = {
_frames.get(uri)
}
/**
* Searches for Resource with KnowledgeURI with specified UID.
* @param uid UID to search with.
* @return Option[Type] with specified UID in KnowledgeURI.
*/
def get(uid: String): List[Type] = {
_frames.filter {
keyValue: Pair[KnowledgeURI, Type] => {
keyValue._1.uid.equals(uid)
}
}.map {
keyValue: Pair[KnowledgeURI, Type] => keyValue._2
}.toList
}
override def toString: String = {
frames.map(f => f._1.toString()).mkString("(", ",", ")")
}
//TODO correct this
/*def this(map: Map[String, String]) = {
val typeString = map.get("type") match {
case Some(x) => x
case None => throw new UnexpectedException("$Type_not_specified")
}
val frames = map.get("frames") match {
case Some(x) => {
x
}
case None => Map[KnowledgeURI, Resource]()
}
this(Map[KnowledgeURI, Resource](), new KnowledgeURI(map), new Probability(map))
}*/
/**
* Merges the frames of current TypedKLine with specified TypedKLine of the same Type.
* @param in KLine to merge.
* @return current TypedKLine with merged _frames.
*/
def merge(in: TypedKLine[Type]): TypedKLine[Type] = {
_frames = frames ++ in.frames
this
}
}
object TypedKLine {
//TODO correct this
/*
def translateStringMap[Type <: Resource](stringMap: String, typeString: String): Map[KnowledgeURI, Type] = {
val res = Map.empty[KnowledgeURI, Type]
val listPairsString: List[String] = stringMap.replaceFirst("Map\(", "").replace(")", "").split(",").toList
val listPairs: List[Pair[String, String]] = listPairsString.map {
x: String => {
val twoString = x.split("=>").toList
if (twoString.size > 1) {
(twoString(0), twoString(1))
} else {
throw new UnexpectedException("$Invalid_map")
}
}
}
/*listPairs.map {
}*/
null
} */
def apply[Type <: Resource](uri: KnowledgeURI): TypedKLine[Type] = {
new TypedKLine(Map[KnowledgeURI, Type](), uri)
}
def apply[Type <: Resource](name: String): TypedKLine[Type] = {
new TypedKLine(Map[KnowledgeURI, Type](), KnowledgeURI(name + "TypedKLine"))
}
def apply[Type <: Resource](name: String, entity: Type): TypedKLine[Type] = {
new TypedKLine(Map[KnowledgeURI, Type](entity.uri -> entity), KnowledgeURI(name + "TypedKLine"))
}
def apply[Type <: Resource](name: String, entities: Map[KnowledgeURI, Type]): TypedKLine[Type] = {
new TypedKLine(entities, KnowledgeURI(name + "TypedKLine"))
}
} | keskival/2 | model.knowledge/src/main/scala/tu/model/knowledge/TypedKLine.scala | Scala | gpl-3.0 | 3,759 |
/**********************************************************************************************************************
* Copyright © 2014 Reactific Software, Inc. *
* *
* This file is part of Scrupal, an Opinionated Web Application Framework. *
* *
* Scrupal is free software: you can redistribute it and/or modify it under the terms *
* of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, *
* or (at your option) any later version. *
* *
* Scrupal is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more *
* details. *
* *
* You should have received a copy of the GNU General Public License along with Scrupal. If not, see either: *
* http://www.gnu.org/licenses or http://opensource.org/licenses/GPL-3.0. *
**********************************************************************************************************************/
package scrupal.store.reactivemongo
import java.util.concurrent.atomic.AtomicInteger
import akka.util.Timeout
import org.specs2.mutable.{Specification}
import org.specs2.specification._
import reactivemongo.api.DefaultDB
import scrupal.utils.ScrupalComponent
import scala.concurrent.Await
import scala.concurrent.duration.{Duration, FiniteDuration}
/** A Fake DB Context used for testing
* Created by reidspencer on 10/21/14.
*/
abstract class DBContextSpecification(val specName: String,
val timeout: FiniteDuration = Duration(5,"seconds"))
extends Specification with ScrupalComponent {
// WARNING: Do NOT put anything but def and lazy val because of DelayedInit or app startup will get invoked twice
// and you'll have a real MESS on your hands!!!! (i.e. no db interaction will work!)
// Handle one time startup and teardown of the DBContext
object dbContextActions {
lazy val startDB = { DBContext.startup() }
lazy val stopDB = { DBContext.shutdown() }
}
lazy val dbActions = dbContextActions
override def map(fs: ⇒ Fragments) = Step(dbActions.startDB) ^ fs ^ Step(dbActions.stopDB)
override def logger_identity = specName
implicit lazy val akka_timeout : Timeout = timeout
lazy val uri = "mongodb://localhost:27017/"
lazy val counter : AtomicInteger = new AtomicInteger(0)
def getDBContext() : DBContext = {
val name = Symbol(specName + "-" + counter.incrementAndGet() )
DBContext.fromURI(name, uri)
}
private def doWithDBC[T]( f: (DBContext) ⇒ T) : T = {
val dbc = getDBContext()
try {
f (dbc)
}
finally {
dbc.close()
}
}
def withDBContext[T]( f: (DBContext) ⇒ T ) : T = {
doWithDBC { implicit dbc ⇒
f(dbc)
}
}
def withDB[T](dbName: String) ( f : (DefaultDB) ⇒ T) : T = {
doWithDBC { dbc ⇒
dbc.withDatabase(dbName) { implicit db ⇒
f(db)
}
}
}
def withEmptyDB[T](dbName: String)( f : (ScrupalDB) ⇒ T) : T = {
doWithDBC { dbc ⇒
dbc.withDatabase(dbName) { implicit db ⇒
val future = db.emptyDatabase
Await.result(future, timeout)
f(db)
}
}
}
}
| scrupal/scrupal-store-reactivemongo | src/test/scala/scrupal/scrupal/reactivemongo/DBContextSpecification.scala | Scala | apache-2.0 | 4,124 |
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.
package com.microsoft.ml.spark
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.ml.{PipelineStage, Transformer}
import org.apache.spark.ml.param.{Param, ParamMap, TransformerParam}
import org.apache.spark.ml.util.{DefaultParamsReadable, Identifiable}
import org.apache.spark.sql.types._
object MultiColumnAdapter extends DefaultParamsReadable[MultiColumnAdapter]
/** The <code>MultiColumnAdapter</code> takes a unary transformer and a list of input output column pairs
* and applies the transformer to each column
*/
class MultiColumnAdapter(override val uid: String) extends Transformer with MMLParams {
def this() = this(Identifiable.randomUID("MultiColumnAdapter"))
/** Comma separated list of input column names, encoded as a string. These are the columns to be transformed.
* @group param
*/
val inputCols: Param[String] =
StringParam(
this,
"inputCols",
"comma separated list of column names encoded as a string")
/** @group getParam */
final def getInputCols: String = $(inputCols)
/** @group setParam */
def setInputCols(value: String): this.type = set(inputCols, value)
/** Comma separated list of column names for the transformed columns, encoded as a string.
* @group param
*/
val outputCols: Param[String] =
StringParam(
this,
"outputCols",
"comma separated list of column names encoded as a string")
/** @group getParam */
final def getOutputCols: String = $(outputCols)
/** @group setParam */
def setOutputCols(value: String): this.type = set(outputCols, value)
/** @return List of input/output column name pairs. */
def getInputOutputPairs: List[(String, String)] =
getInputCols.split(",").zip(getOutputCols.split(",")).toList
/** Base transformer to apply to every column in the input column list.
* @group param
*/
val baseTransformer: TransformerParam =
new TransformerParam(this,
"baseTransformer",
"base transformer to apply to every column")
/** @group getParam */
final def getBaseTransformer: Transformer = $(baseTransformer)
/** @group setParam */
def setBaseTransformer(value: Transformer): this.type = {
try {
//Test to see whether the class has the appropriate getters and setters
value.getParam("inputCol")
value.getParam("outputCol")
setParamInternal(value, "inputCol", this.uid + "__in")
setParamInternal(value, "outputCol", this.uid + "__out")
} catch {
case e: Exception =>
throw new IllegalArgumentException(
"Need to pass a transformer with inputCol and outputCol params")
}
set(baseTransformer, value)
}
private def setParamInternal[M <: PipelineStage, V](model: M,
name: String,
value: V) = {
model.set(model.getParam(name), value)
}
private def getParamInternal[M <: PipelineStage](model: M, name: String) = {
model.getOrDefault(model.getParam(name))
}
private def setInOutCols[M <: PipelineStage](
model: M,
inputOutputPair: (String, String)) = {
setParamInternal(setParamInternal(model, "inputCol", inputOutputPair._1),
"outputCol",
inputOutputPair._2)
}
/** Apply the transform to all the columns in the input column list
* @param dataset
* @return DataFrame with transformed columns bearing the output column names
*/
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema)
val firstOutput = setInOutCols(getBaseTransformer,
getInputOutputPairs.head).transform(dataset)
getInputOutputPairs.tail.foldLeft(firstOutput: DataFrame) { (df, pair) =>
setInOutCols(getBaseTransformer, pair).transform(df)
}
}
def copy(extra: ParamMap): this.type = defaultCopy(extra)
private def verifyCols(df: DataFrame,
inputOutputPairs: List[(String, String)]) = {
inputOutputPairs.foreach {
case (s1, s2) if !df.columns.contains(s1) =>
throw new IllegalArgumentException(
s"DataFrame does not contain specified column: $s1")
case (s1, s2) if df.columns.contains(s2) =>
throw new IllegalArgumentException(
s"DataFrame already contains specified column: $s2")
case _ =>
}
}
override def transformSchema(schema: StructType): StructType = {
getInputOutputPairs.foldLeft(schema) { (schema, pair) =>
setInOutCols(getBaseTransformer, pair).transformSchema(schema)
}
}
}
| rastala/mmlspark | src/multi-column-adapter/src/main/scala/MultiColumnAdapter.scala | Scala | mit | 4,822 |
package colossus.protocols.http
import colossus.controller.Controller
import colossus.core.{IOSystem, InitContext, PipelineHandler, ServerContext}
import colossus.service._
class HttpServiceHandler(rh: RequestHandler) extends ServiceServer[Http](rh) {}
class Generator(context: InitContext) extends HandlerGenerator[RequestHandler](context) {
val DateHeader = new DateHeader
val ServerHeader = HttpHeader("Server", context.server.name.idString)
val defaultHeaders = HttpHeaders(DateHeader, ServerHeader)
def fullHandler =
requestHandler =>
new PipelineHandler(
new Controller(
new HttpServiceHandler(requestHandler),
new HttpServerCodec(defaultHeaders, requestHandler.config.maxRequestSize)
),
requestHandler
)
}
abstract class Initializer(ctx: InitContext) extends Generator(ctx) with ServiceInitializer[RequestHandler]
/**
* A RequestHandler contains the business logic for transforming [[HttpRequest]] into [[HttpResponse]] objects.
*/
abstract class RequestHandler(ctx: ServerContext, config: ServiceConfig) extends GenRequestHandler[Http](ctx, config) {
def this(ctx: ServerContext) = this(ctx, ServiceConfig.load(ctx.name))
val defaults = new Http.ServerDefaults
override def tagDecorator = new ReturnCodeTagDecorator
override def handleRequest(input: Http#Request): Callback[Http#Response] = {
val response = super.handleRequest(input)
if (!input.head.persistConnection) connection.disconnect()
response
}
def unhandledError = {
case error => defaults.errorResponse(error)
}
}
/**
* Entry point for starting a Http server
*/
object HttpServer extends ServiceDSL[RequestHandler, Initializer] {
def basicInitializer = initContext => new Generator(initContext)
def basic(name: String, port: Int)(handler: PartialFunction[HttpRequest, Callback[HttpResponse]])(
implicit io: IOSystem) = start(name, port) { initContext =>
new Initializer(initContext) {
def onConnect = serverContext => new RequestHandler(serverContext) { def handle = handler }
}
}
}
| tumblr/colossus | colossus/src/main/scala/colossus/protocols/http/HttpServer.scala | Scala | apache-2.0 | 2,100 |
package blended.streams.jms
import java.util.UUID
import java.util.concurrent.Semaphore
import akka.actor.ActorSystem
import akka.stream._
import akka.stream.stage._
import blended.jms.utils.{JmsAckSession, JmsConsumerSession, JmsDestination}
import blended.streams.message._
import blended.streams.transaction.FlowHeaderConfig
import blended.util.logging.Logger
import javax.jms._
import scala.util.{Failure, Success}
class JmsSourceStage(
name : String,
settings: JMSConsumerSettings,
headerConfig : FlowHeaderConfig,
log : Logger = Logger[JmsSourceStage]
)(implicit actorSystem: ActorSystem) extends GraphStage[SourceShape[FlowEnvelope]] {
private val out = Outlet[FlowEnvelope](s"JmsSource($name.out)")
override def shape: SourceShape[FlowEnvelope] = SourceShape(out)
override protected def initialAttributes: Attributes =
ActorAttributes.dispatcher("FixedPool")
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = {
val logic : GraphStageLogic = new SourceStageLogic[JmsConsumerSession](shape, out, settings, inheritedAttributes) {
private val bufferSize = (settings.bufferSize + 1) * settings.sessionCount
private val backpressure = new Semaphore(bufferSize)
override private[jms] val handleError = getAsyncCallback[Throwable]{ ex =>
fail(out, ex)
}
private val dest : JmsDestination = jmsSettings.jmsDestination match {
case Some(d) => d
case None => throw new IllegalArgumentException("Destination must be defined for consumer")
}
override protected def createSession(
connection: Connection,
): JmsConsumerSession = {
val session = connection.createSession(false, AcknowledgeMode.AutoAcknowledge.mode)
new JmsConsumerSession(
connection = connection,
session = session,
sessionId = nextSessionId(),
jmsDestination = dest
)
}
override protected def pushMessage(msg: FlowEnvelope): Unit = {
log.trace("Pushing message downstream")
push(out, msg)
backpressure.release()
}
override protected def onSessionOpened(jmsSession: JmsConsumerSession): Unit = {
log.debug(s"Creating JMS consumer in [$id] for destination [$dest]")
jmsSession.createConsumer(settings.selector) match {
case Success(consumer) =>
try {
consumer.setMessageListener(new MessageListener {
override def onMessage(message: Message): Unit = {
backpressure.acquire()
// Use a Default Envelope that simply ignores calls to acknowledge if any
val flowMessage = JmsFlowSupport.jms2flowMessage(headerConfig)(jmsSettings)(message).get
log.debug(s"Message received for [${settings.jmsDestination.map(_.asString)}] [$id] : $flowMessage")
val envelopeId : String = flowMessage.header[String](headerConfig.headerTrans) match {
case None =>
val newId = UUID.randomUUID().toString()
log.debug(s"Created new envelope id [$newId]")
newId
case Some(s) =>
log.debug(s"Reusing transaction id [$s] as envelope id")
s
}
handleMessage.invoke(
FlowEnvelope(
flowMessage.withHeader(headerConfig.headerTrans, envelopeId).get, envelopeId
)
)
}
})
} catch {
case jmse : JMSException =>
log.warn(jmse)(s"Error setting up message listener [${settings.jmsDestination}] in [${jmsSession.sessionId}]")
closeSession(jmsSession)
}
case Failure(t) =>
log.warn(t)(s"Error setting up consumer [${settings.jmsDestination}] in [${jmsSession.sessionId}]")
closeSession(jmsSession)
}
}
}
logic
}
}
| lefou/blended | blended.streams/src/main/scala/blended/streams/jms/JmsSourceStage.scala | Scala | apache-2.0 | 4,076 |
/*
* Copyright (c) 2015 Steven Soloff
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.github.ssoloff.polyhedra
import io.github.ssoloff.polyhedra.internal.{
ExpressionBaseVisitor => InternalExpressionBaseVisitor,
ExpressionLexer => InternalExpressionLexer,
ExpressionParser => InternalExpressionParser
}
import org.antlr.v4.runtime.{ANTLRInputStream, BaseErrorListener, CommonTokenStream}
import scala.util.{Failure, Success, Try}
/** Provides a set of method for parsing dice expressions.
*/
object ExpressionParser {
private[this] class ExpressionVisitor(context: Context) extends InternalExpressionBaseVisitor[Expression[_]] {
/** Creates a new die expression from the specified die literal.
*
* @param literal
* The die literal.
*
* @return A new die expression.
*/
private[this] def createDieExpression(literal: String): DieExpression = {
val sides = literal.tail match {
case "%" => 100 // scalastyle:ignore magic.number
case x => x.toInt
}
new DieExpression(context.bag.d(sides))
}
/** Creates a new function call expression.
*
* @param name
* The function name.
* @param argumentListExpressions
* The collection of expressions used as the function arguments.
*
* @return A new function call expression.
*/
private[this] def createFunctionCallExpression(name: String, argumentListExpressions: Seq[Expression[_]]): FunctionCallExpression[_, _] = {
val func = lookupFunction(name)
new FunctionCallExpression[Any, Any](name, func, argumentListExpressions)
}
/** Returns the name of the function to use for the specified roll
* modifier.
*
* @param rollModifierOperation
* The roll modifier operation: "+" for clone or "-" for drop.
* @param rollModifierDieType
* The roll modifier die type: "H" for highest rolls or "L" for lowest
* rolls.
*
* @return The name of the function to use for the specified roll
* modifier.
*/
private[this] def getRollModifierFunctionName(rollModifierOperation: String, rollModifierDieType: String): String = {
rollModifierOperation match {
case "+" => rollModifierDieType match {
case "H" => "cloneHighestRolls"
case "L" => "cloneLowestRolls"
}
case "-" => rollModifierDieType match {
case "H" => "dropHighestRolls"
case "L" => "dropLowestRolls"
}
}
}
/** Returns the function with the specified name.
*
* <p>
* This method first looks up the function in the expression parser
* context. If it does not exist, it then looks up the function in the
* collection of built-in functions. Otherwise, it throws an exception.
* </p>
*
* @param name
* The function name.
*
* @return The function.
*
* @throws java.lang.IllegalArgumentException
* If a function with the specified name does not exist.
*/
private[this] def lookupFunction(name: String): Seq[_] => _ =
context.functions.get(name)
.orElse(ExpressionFunctions(name))
.getOrElse(throw new IllegalArgumentException(s"unknown function '$name'"))
override def visitAddition(ctx: InternalExpressionParser.AdditionContext): Expression[Double] =
new AdditionExpression(
visit(ctx.additive_expression()).asInstanceOf[Expression[Double]],
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]]
)
override def visitArrayLiteral(ctx: InternalExpressionParser.ArrayLiteralContext): ArrayExpression[_] =
visit(ctx.expression_list()).asInstanceOf[ArrayExpression[_]]
override def visitDiceRollLiteral(ctx: InternalExpressionParser.DiceRollLiteralContext): Expression[_] = {
val literal = ctx.DICE_ROLL_LITERAL().getText()
val pattern = """^(\\d+)(d[\\d%]+)(([-+])(\\d*)([HL]))?$""".r
literal match {
case pattern(rollCount, dieLiteral, rollModifier, rollModifierOperation, rollModifierCountAsString, rollModifierDieType) => {
var rollExpression = createFunctionCallExpression("roll", List(
new ConstantExpression(rollCount.toDouble),
createDieExpression(dieLiteral)
))
val rollModifierCount = rollModifierCountAsString match {
case x: String if !x.isEmpty => x.toDouble
case _ => 1.0
}
if (rollModifier != null) { // scalastyle:ignore null
val rollModifierFunctionName = getRollModifierFunctionName(rollModifierOperation, rollModifierDieType);
rollExpression = createFunctionCallExpression(rollModifierFunctionName, List(
rollExpression,
new ConstantExpression(rollModifierCount.toDouble)
))
}
createFunctionCallExpression("sum", List(rollExpression))
}
}
}
override def visitDieLiteral(ctx: InternalExpressionParser.DieLiteralContext): DieExpression = {
val literal = ctx.DIE_LITERAL().getText()
createDieExpression(literal)
}
override def visitDivision(ctx: InternalExpressionParser.DivisionContext): Expression[Double] =
new DivisionExpression(
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]],
visit(ctx.unary_expression()).asInstanceOf[Expression[Double]]
)
override def visitDivisionAndRound(ctx: InternalExpressionParser.DivisionAndRoundContext): Expression[_] =
createFunctionCallExpression("round", List(
new DivisionExpression(
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]],
visit(ctx.unary_expression()).asInstanceOf[Expression[Double]]
)
))
override def visitDivisionAndRoundDown(ctx: InternalExpressionParser.DivisionAndRoundDownContext): Expression[_] =
createFunctionCallExpression("floor", List(
new DivisionExpression(
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]],
visit(ctx.unary_expression()).asInstanceOf[Expression[Double]]
)
))
override def visitDivisionAndRoundTowardsZero(ctx: InternalExpressionParser.DivisionAndRoundTowardsZeroContext): Expression[_] =
createFunctionCallExpression("trunc", List(
new DivisionExpression(
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]],
visit(ctx.unary_expression()).asInstanceOf[Expression[Double]]
)
))
override def visitDivisionAndRoundUp(ctx: InternalExpressionParser.DivisionAndRoundUpContext): Expression[_] =
createFunctionCallExpression("ceil", List(
new DivisionExpression(
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]],
visit(ctx.unary_expression()).asInstanceOf[Expression[Double]]
)
))
override def visitEmptyExpressionList(ctx: InternalExpressionParser.EmptyExpressionListContext): ArrayExpression[_] =
new ArrayExpression(Nil)
override def visitFunctionCall(ctx: InternalExpressionParser.FunctionCallContext): Expression[_] = {
val name = ctx.IDENTIFIER().getText()
val argumentListExpressions = visit(ctx.expression_list()).asInstanceOf[ArrayExpression[_]].expressions
createFunctionCallExpression(name, argumentListExpressions)
}
override def visitGroup(ctx: InternalExpressionParser.GroupContext): Expression[_] =
new GroupExpression(visit(ctx.expression()))
override def visitIntegerLiteral(ctx: InternalExpressionParser.IntegerLiteralContext): Expression[Double] =
new ConstantExpression(ctx.INTEGER_LITERAL().getText().toDouble)
override def visitModulo(ctx: InternalExpressionParser.ModuloContext): Expression[Double] =
new ModuloExpression(
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]],
visit(ctx.unary_expression()).asInstanceOf[Expression[Double]]
)
override def visitMultiElementExpressionList(ctx: InternalExpressionParser.MultiElementExpressionListContext): ArrayExpression[_] = {
val front = visit(ctx.expression_list()).asInstanceOf[ArrayExpression[_]]
new ArrayExpression(front.expressions :+ visit(ctx.expression()))
}
override def visitMultiplication(ctx: InternalExpressionParser.MultiplicationContext): Expression[Double] =
new MultiplicationExpression(
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]],
visit(ctx.unary_expression()).asInstanceOf[Expression[Double]]
)
override def visitNegative(ctx: InternalExpressionParser.NegativeContext): Expression[Double] =
new NegativeExpression(visit(ctx.primary_expression()).asInstanceOf[Expression[Double]])
override def visitPositive(ctx: InternalExpressionParser.PositiveContext): Expression[Double] =
new PositiveExpression(visit(ctx.primary_expression()).asInstanceOf[Expression[Double]])
override def visitProgram(ctx: InternalExpressionParser.ProgramContext): Expression[_] =
visit(ctx.expression())
override def visitSingleElementExpressionList(ctx: InternalExpressionParser.SingleElementExpressionListContext): ArrayExpression[_] =
new ArrayExpression(List(visit(ctx.expression())))
override def visitSubtraction(ctx: InternalExpressionParser.SubtractionContext): Expression[Double] =
new SubtractionExpression(
visit(ctx.additive_expression()).asInstanceOf[Expression[Double]],
visit(ctx.multiplicative_expression()).asInstanceOf[Expression[Double]]
)
}
private[this] object ThrowingErrorListener extends BaseErrorListener {
import org.antlr.v4.runtime.{RecognitionException, Recognizer}
import org.antlr.v4.runtime.misc.ParseCancellationException
override def syntaxError(
recognizer: Recognizer[_, _],
offendingSymbol: Any,
line: Int,
charPositionInLine: Int,
msg: String,
e: RecognitionException): Unit =
throw new ParseCancellationException("line " + line + ":" + charPositionInLine + " " + msg)
}
/** The execution context for an expression parser.
*
* @constructor Creates a new expression parser context.
*
* @param bag
* The dice bag used by the parser whenever a die literal is encountered.
* @param functions
* A map used by the parser to lookup function implementations when a
* function call is encountered. The functions in this object override
* any function with the same name in the parser's default function list.
*/
final class Context(
val bag: Bag,
val functions: Map[String, Seq[_] => _]
)
/** The default expression parser context.
*
* <p>
* Uses a default dice bag and includes no additional function implementations.
* </p>
*/
final val DefaultContext = new Context(new Bag, Map())
/** Parses the specified dice expression text.
*
* @param source
* The dice expression text to parse.
* @param context
* The expression parser context. If not specified, uses the default
* context.
*
* @return The parsed expression if successful or an exception if the
* expression could not be parsed.
*/
def parse(
source: String,
context: Context = DefaultContext
): Try[Expression[_]] = {
try {
val input = new ANTLRInputStream(source)
val lexer = new InternalExpressionLexer(input)
lexer.removeErrorListeners()
lexer.addErrorListener(ThrowingErrorListener)
val tokens = new CommonTokenStream(lexer)
val parser = new InternalExpressionParser(tokens)
parser.removeErrorListeners()
parser.addErrorListener(ThrowingErrorListener)
val tree = parser.program()
val visitor = new ExpressionVisitor(context)
Success(visitor.visit(tree))
} catch {
case e: Exception => Failure(new IllegalArgumentException(s"invalid expression '$source'", e))
}
}
}
| ssoloff/polyhedra-jvm | src/main/scala/io/github/ssoloff/polyhedra/ExpressionParser.scala | Scala | mit | 13,165 |
package lila.notify
import lila.db.dsl._
import org.joda.time.DateTime
private final class NotificationRepo(val coll: Coll) {
import BSONHandlers._
def insert(notification: Notification) = {
coll.insert(notification).void
}
def remove(notifies: Notification.Notifies, selector: Bdoc): Funit =
coll.remove(userNotificationsQuery(notifies) ++ selector).void
def markAllRead(notifies: Notification.Notifies): Funit = {
coll.update(unreadOnlyQuery(notifies), $set("read" -> true), multi = true).void
}
def unreadNotificationsCount(userId: Notification.Notifies): Fu[Int] = {
coll.count(unreadOnlyQuery(userId).some)
}
private val hasOld = $doc(
"read" -> false,
"createdAt" $gt DateTime.now.minusDays(3))
private val hasUnread = $doc( // recent, read
"createdAt" $gt DateTime.now.minusMinutes(10))
private val hasOldOrUnread =
$doc("$or" -> List(hasOld, hasUnread))
def hasRecentStudyInvitation(userId: Notification.Notifies, studyId: InvitedToStudy.StudyId): Fu[Boolean] =
coll.exists($doc(
"notifies" -> userId,
"content.type" -> "invitedStudy",
"content.studyId" -> studyId
) ++ hasOldOrUnread)
def hasRecentNotificationsInThread(userId: Notification.Notifies, topicId: MentionedInThread.TopicId): Fu[Boolean] =
coll.exists($doc(
"notifies" -> userId,
"content.type" -> "mention",
"content.topicId" -> topicId
) ++ hasOldOrUnread)
def hasRecentPrivateMessageFrom(userId: Notification.Notifies, thread: PrivateMessage.Thread): Fu[Boolean] =
coll.exists($doc(
"notifies" -> userId,
"content.type" -> "privateMessage",
"content.thread.id" -> thread.id
) ++ hasOld)
def hasRecentQaAnswer(userId: Notification.Notifies, question: QaAnswer.Question): Fu[Boolean] = {
coll.exists($doc(
"notifies" -> userId,
"content.type" -> "qaAnswer",
"content.questionId" -> question.id
) ++ hasOldOrUnread)
}
def exists(notifies: Notification.Notifies, selector: Bdoc): Fu[Boolean] =
coll.exists(userNotificationsQuery(notifies) ++ selector)
val recentSort = $sort desc "createdAt"
def userNotificationsQuery(userId: Notification.Notifies) = $doc("notifies" -> userId)
private def unreadOnlyQuery(userId: Notification.Notifies) = $doc("notifies" -> userId, "read" -> false)
}
| clarkerubber/lila | modules/notify/src/main/NotificationRepo.scala | Scala | agpl-3.0 | 2,353 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kafka
import java.io.File
import java.lang.{Integer => JInt}
import java.net.InetSocketAddress
import java.util.concurrent.TimeoutException
import java.util.{Map => JMap, Properties}
import scala.annotation.tailrec
import scala.language.postfixOps
import scala.util.control.NonFatal
import kafka.admin.AdminUtils
import kafka.api.Request
import kafka.producer.{KeyedMessage, Producer, ProducerConfig}
import kafka.serializer.StringEncoder
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.{ZKStringSerializer, ZkUtils}
import org.I0Itec.zkclient.ZkClient
import org.apache.zookeeper.server.{NIOServerCnxnFactory, ZooKeeperServer}
import org.apache.spark.streaming.Time
import org.apache.spark.util.Utils
import org.apache.spark.{Logging, SparkConf}
/**
* This is a helper class for Kafka test suites. This has the functionality to set up
* and tear down local Kafka servers, and to push data using Kafka producers.
*
* 这是Kafka测试套件的助手课程,这具有设置和拆除本地Kafka服务器的功能,并使用Kafka生产者推送数据
*
* The reason to put Kafka test utility class in src is to test Python related Kafka APIs.
* 将Kafka测试实用程序类放在src中的原因是测试Python相关的Kafka API
*/
private[kafka] class KafkaTestUtils extends Logging {
// Zookeeper related configurations
//Zookeeper相关配置
private val zkHost = "localhost"
private var zkPort: Int = 0
private val zkConnectionTimeout = 6000
private val zkSessionTimeout = 6000
private var zookeeper: EmbeddedZookeeper = _
private var zkClient: ZkClient = _
// Kafka broker related configurations
//kafka broker相关配置
private val brokerHost = "localhost"
private var brokerPort = 9092
private var brokerConf: KafkaConfig = _
// Kafka broker server
//Kafka broker服务器
private var server: KafkaServer = _
// Kafka producer
//Kafka生产者
private var producer: Producer[String, String] = _
// Flag to test whether the system is correctly started
//标志以测试系统是否正确启动
private var zkReady = false
private var brokerReady = false
def zkAddress: String = {
assert(zkReady, "Zookeeper not setup yet or already torn down, cannot get zookeeper address")
s"$zkHost:$zkPort"
}
def brokerAddress: String = {
assert(brokerReady, "Kafka not setup yet or already torn down, cannot get broker address")
s"$brokerHost:$brokerPort"
}
def zookeeperClient: ZkClient = {
assert(zkReady, "Zookeeper not setup yet or already torn down, cannot get zookeeper client")
Option(zkClient).getOrElse(
throw new IllegalStateException("Zookeeper client is not yet initialized"))
}
// Set up the Embedded Zookeeper server and get the proper Zookeeper port
//设置嵌入式Zookeeper服务器并获得正确的Zookeeper端口
private def setupEmbeddedZookeeper(): Unit = {
// Zookeeper server startup
zookeeper = new EmbeddedZookeeper(s"$zkHost:$zkPort")
// Get the actual zookeeper binding port
//获取实际的zookeeper绑定端口
zkPort = zookeeper.actualPort
zkClient = new ZkClient(s"$zkHost:$zkPort", zkSessionTimeout, zkConnectionTimeout,
ZKStringSerializer)
zkReady = true
}
// Set up the Embedded Kafka server
//设置嵌入式Kafka服务器
private def setupEmbeddedKafkaServer(): Unit = {
assert(zkReady, "Zookeeper should be set up beforehand")
// Kafka broker startup
//Kafka broker启动
Utils.startServiceOnPort(brokerPort, port => {
brokerPort = port
brokerConf = new KafkaConfig(brokerConfiguration)
server = new KafkaServer(brokerConf)
server.startup()
(server, port)
}, new SparkConf(), "KafkaBroker")
brokerReady = true
}
/** setup the whole embedded servers, including Zookeeper and Kafka brokers
* 设置整个嵌入式服务器,包括Zookeeper和Kafka经纪人 */
def setup(): Unit = {
setupEmbeddedZookeeper()
setupEmbeddedKafkaServer()
}
/** Teardown the whole servers, including Kafka broker and Zookeeper
* 整个服务器,包括Kafka broker和Zookeeper*/
def teardown(): Unit = {
brokerReady = false
zkReady = false
if (producer != null) {
producer.close()
producer = null
}
if (server != null) {
server.shutdown()
server = null
}
brokerConf.logDirs.foreach { f => Utils.deleteRecursively(new File(f)) }
if (zkClient != null) {
zkClient.close()
zkClient = null
}
if (zookeeper != null) {
zookeeper.shutdown()
zookeeper = null
}
}
/** Create a Kafka topic and wait until it propagated to the whole cluster
* 创建一个Kafka主题,等待它传播到整个集群*/
def createTopic(topic: String): Unit = {
AdminUtils.createTopic(zkClient, topic, 1, 1)
// wait until metadata is propagated
//等到元数据传播
waitUntilMetadataIsPropagated(topic, 0)
}
/** Java-friendly function for sending messages to the Kafka broker
* 用于向Kafka代理发送消息的Java友好函数 */
def sendMessages(topic: String, messageToFreq: JMap[String, JInt]): Unit = {
import scala.collection.JavaConversions._
sendMessages(topic, Map(messageToFreq.mapValues(_.intValue()).toSeq: _*))
}
/** Send the messages to the Kafka broker
* 发送消息到Kafka broker*/
def sendMessages(topic: String, messageToFreq: Map[String, Int]): Unit = {
val messages = messageToFreq.flatMap { case (s, freq) => Seq.fill(freq)(s) }.toArray
sendMessages(topic, messages)
}
/** Send the array of messages to the Kafka broker
* 将消息数组发送到Kafka代理*/
def sendMessages(topic: String, messages: Array[String]): Unit = {
producer = new Producer[String, String](new ProducerConfig(producerConfiguration))
producer.send(messages.map { new KeyedMessage[String, String](topic, _ ) }: _*)
producer.close()
producer = null
}
private def brokerConfiguration: Properties = {
val props = new Properties()
props.put("broker.id", "0")
props.put("host.name", "localhost")
props.put("port", brokerPort.toString)
props.put("log.dir", Utils.createTempDir().getAbsolutePath)
props.put("zookeeper.connect", zkAddress)
props.put("log.flush.interval.messages", "1")
props.put("replica.socket.timeout.ms", "1500")
props
}
private def producerConfiguration: Properties = {
val props = new Properties()
props.put("metadata.broker.list", brokerAddress)
props.put("serializer.class", classOf[StringEncoder].getName)
// wait for all in-sync replicas to ack sends
//等待所有同步副本到ACK发送
props.put("request.required.acks", "-1")
props
}
// A simplified version of scalatest eventually, rewritten here to avoid adding extra test
// dependency
//最终在这里重写了一个简化版本的scalatest,以避免增加额外的测试依赖性
def eventually[T](timeout: Time, interval: Time)(func: => T): T = {
def makeAttempt(): Either[Throwable, T] = {
try {
Right(func)
} catch {
case e if NonFatal(e) => Left(e)
}
}
val startTime = System.currentTimeMillis()
@tailrec
def tryAgain(attempt: Int): T = {
makeAttempt() match {
case Right(result) => result
case Left(e) =>
val duration = System.currentTimeMillis() - startTime
if (duration < timeout.milliseconds) {
Thread.sleep(interval.milliseconds)
} else {
throw new TimeoutException(e.getMessage)
}
tryAgain(attempt + 1)
}
}
tryAgain(1)
}
private def waitUntilMetadataIsPropagated(topic: String, partition: Int): Unit = {
def isPropagated = server.apis.metadataCache.getPartitionInfo(topic, partition) match {
case Some(partitionState) =>
val leaderAndInSyncReplicas = partitionState.leaderIsrAndControllerEpoch.leaderAndIsr
ZkUtils.getLeaderForPartition(zkClient, topic, partition).isDefined &&
Request.isValidBrokerId(leaderAndInSyncReplicas.leader) &&
leaderAndInSyncReplicas.isr.size >= 1
case _ =>
false
}
eventually(Time(10000), Time(100)) {
assert(isPropagated, s"Partition [$topic, $partition] metadata not propagated after timeout")
}
}
private class EmbeddedZookeeper(val zkConnect: String) {
val snapshotDir = Utils.createTempDir()
val logDir = Utils.createTempDir()
val zookeeper = new ZooKeeperServer(snapshotDir, logDir, 500)
val (ip, port) = {
val splits = zkConnect.split(":")
(splits(0), splits(1).toInt)
}
val factory = new NIOServerCnxnFactory()
factory.configure(new InetSocketAddress(ip, port), 16)
factory.startup(zookeeper)
val actualPort = factory.getLocalPort
def shutdown() {
factory.shutdown()
Utils.deleteRecursively(snapshotDir)
Utils.deleteRecursively(logDir)
}
}
}
| tophua/spark1.52 | external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala | Scala | apache-2.0 | 9,859 |
package com.dadrox.sbt.junit
class Capture {
import java.io.ByteArrayOutputStream
import java.io.PrintStream
val scalaout = scala.Console.out
val scalaerr = scala.Console.err
val serr = new ByteArrayOutputStream
val sout = new ByteArrayOutputStream
def start = {
scala.Console.setErr(new PrintStream(serr, true))
scala.Console.setOut(new PrintStream(sout, true))
this
}
def take = {
val result = (new String(sout.toByteArray()), new String(serr.toByteArray()))
serr.reset()
sout.reset()
result
}
def stop = {
scala.Console.setErr(scalaerr)
scala.Console.setOut(scalaout)
}
} | dadrox/sbt-junit | src/main/scala.bak/com/dadrox/sbt/junit/Capture.scala | Scala | bsd-2-clause | 700 |
/*
* Copyright (C) 2017 The Proteus Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.proteus.peach.redis.server
import com.proteus.peach.client.PeachAkkaClient
import com.proteus.peach.client.PeachClient
import com.proteus.peach.client.PeachClientValidator
import org.junit.BeforeClass
object RedisPeachServerAppIT {
/**
* Init Server.
*/
@BeforeClass
def beforeAll(): Unit = {
RedisPeachServerApp.main(Array())
}
}
class RedisPeachServerAppIT extends PeachClientValidator {
/**
* Client cache to test.
*/
override val clientCache: PeachClient = PeachAkkaClient()
}
| aagea/peach | peach-redis-server/src/test/scala/com/proteus/peach/redis/server/RedisPeachServerAppIT.scala | Scala | apache-2.0 | 1,135 |
package org.bitcoins.core.script.constant
import org.bitcoins.core.util.BytesUtil
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
/** Created by chris on 6/5/16.
*/
class ScriptNumberUtilTest extends BitcoinSUnitTest {
"ScriptNumberUtil" must "convert a positive hex number to its corresponding long number" in {
val hex = "01"
val long = ScriptNumberUtil.toLong(hex)
long must be(1)
//127
val hex1 = "7f"
val long1 = ScriptNumberUtil.toLong(hex1)
long1 must be(127)
//128
val hex2 = "8000"
val long2 = ScriptNumberUtil.toLong(hex2)
long2 must be(128)
//32767
val hex3 = "ff7f"
val long3 = ScriptNumberUtil.toLong(hex3)
long3 must be(32767)
//32768
val hex4 = "008000"
val long4 = ScriptNumberUtil.toLong(hex4)
long4 must be(32768)
//20
val hex5 = "14"
val long5 = ScriptNumberUtil.toLong(hex5)
long5 must be(20)
//0
val hex6 = "00"
val long6 = ScriptNumberUtil.toLong(hex6)
long6 must be(0)
}
it must "convert a negative hex number to its corresponding long number" in {
//-1
val hex = "81"
val long = ScriptNumberUtil.toLong(hex)
long must be(-1)
//-127
val hex1 = "ff"
val long1 = ScriptNumberUtil.toLong(hex1)
long1 must be(-127)
//-128
val hex2 = "8080"
val long2 = ScriptNumberUtil.toLong(hex2)
long2 must be(-128)
//-32767
val hex3 = "ffff"
val long3 = ScriptNumberUtil.toLong(hex3)
long3 must be(-32767)
//-32768
val hex4 = "008080"
val long4 = ScriptNumberUtil.toLong(hex4)
long4 must be(-32768)
}
it must "determine if a hex string is a positive number" in {
val hex = "01"
val hexIsPositive = ScriptNumberUtil.isPositive(hex)
hexIsPositive must be(true)
//128
val hex1 = "8000"
val hexIsPositive1 = ScriptNumberUtil.isPositive(hex1)
hexIsPositive1 must be(true)
val hex2 = "ff7f"
val hexIsPositive2 = ScriptNumberUtil.isPositive(hex2)
hexIsPositive2 must be(true)
}
it must "determine if a hex string is a negative number" in {
//-1
val hex = "81"
val hexIsNegative = ScriptNumberUtil.isNegative(hex)
hexIsNegative must be(true)
//-128
val hex1 = "8080"
val hexIsNegative1 = ScriptNumberUtil.isNegative(hex1)
hexIsNegative1 must be(true)
//-32767
val hex2 = "ffff"
val hexIsNegative2 = ScriptNumberUtil.isNegative(hex2)
hexIsNegative2 must be(true)
//must also work for bytes
ScriptNumberUtil.isNegative(BytesUtil.decodeHex(hex2)) must be(true)
}
it must "change a sign bit from negative to positive" in {
val hex = "ff"
val expectedHex = "7f"
BytesUtil.encodeHex(
ScriptNumberUtil
.changeSignBitToPositive(BytesUtil.decodeHex(hex))) must be(expectedHex)
//-32767
val hex1 = "ffff"
val expectedHex1 = "7fff"
BytesUtil.encodeHex(ScriptNumberUtil.changeSignBitToPositive(hex1)) must be(
expectedHex1)
}
it must "change a sign bit from positive to negative" in {
val hex = "01"
val expectedHex = "81"
BytesUtil.encodeHex(ScriptNumberUtil.changeSignBitToNegative(hex)) must be(
expectedHex)
//32767
val hex1 = "7fff"
val expectedHex1 = "ffff"
BytesUtil.encodeHex(ScriptNumberUtil.changeSignBitToNegative(hex1)) must be(
expectedHex1)
//128
val hex2 = "8000"
val expectedHex2 = "8000"
BytesUtil.encodeHex(ScriptNumberUtil.changeSignBitToNegative(hex2)) must be(
expectedHex2)
}
it must "detect if the last two bytes are all zeros" in {
val hex = "00"
ScriptNumberUtil.firstByteAllZeros(hex) must be(true)
val hex1 = "8001"
ScriptNumberUtil.firstByteAllZeros(hex1) must be(false)
val hex2 = "80"
ScriptNumberUtil.firstByteAllZeros(hex2) must be(false)
}
it must "serialize negative numbers to the correct hex value" in {
val hex = ScriptNumberUtil.longToHex(-1)
val expectedHex = "81"
hex must be(expectedHex)
val hex1 = ScriptNumberUtil.longToHex(-127)
val expectedHex1 = "ff"
hex1 must be(expectedHex1)
val hex2 = ScriptNumberUtil.longToHex(-128)
val expectedHex2 = "8080"
hex2 must be(expectedHex2)
val hex3 = ScriptNumberUtil.longToHex(-32767)
val expectedHex3 = "ffff"
hex3 must be(expectedHex3)
val hex4 = ScriptNumberUtil.longToHex(-32768)
val expectedHex4 = "008080"
hex4 must be(expectedHex4)
}
it must "serialize a positive number to the correct hex value" in {
val hex = ScriptNumberUtil.longToHex(0L)
val expectedHex = ""
hex must be(expectedHex)
val hex1 = ScriptNumberUtil.longToHex(1)
val expectedHex1 = "01"
hex1 must be(expectedHex1)
val hex2 = ScriptNumberUtil.longToHex(127)
val expectedHex2 = "7f"
hex2 must be(expectedHex2)
val hex3 = ScriptNumberUtil.longToHex(128)
val expectedHex3 = "8000"
hex3 must be(expectedHex3)
val hex4 = ScriptNumberUtil.longToHex(32767)
val expectedHex4 = "ff7f"
hex4 must be(expectedHex4)
val hex5 = ScriptNumberUtil.longToHex(32768)
val expectedHex5 = "008000"
hex5 must be(expectedHex5)
}
it must "convert a sequence of bytes to a max value for int" in {
val max = Int.MaxValue
ScriptNumberUtil.toInt("FFFFFF7F") must be(max)
}
it must "convert a sequence of bytes to the min value for an int" in {
val min = Int.MinValue + 1
//the minimum number we can represent in ScriptNumbers i
//is Int.MinValue + 1 since we have a negative zero and zero
ScriptNumberUtil.toInt("ffffffff") must be(min)
}
it must "throw an exception when we try and convert a 33 bit sequence to an int" in {
intercept[IllegalArgumentException] {
ScriptNumberUtil.toInt("FFFFFF7F00")
}
}
}
| bitcoin-s/bitcoin-s | core-test/src/test/scala/org/bitcoins/core/script/constant/ScriptNumberUtilTest.scala | Scala | mit | 5,808 |
package org.rebeam.tree.view.list
import chandu0101.scalajs.react.components.materialui._
import io.circe.Encoder
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.VdomNode
import japgolly.scalajs.react.vdom.html_<^._
import org.rebeam.tree.Searchable
import org.rebeam.tree.ref._
import org.rebeam.tree.sync._
import org.rebeam.tree.view.View._
import org.rebeam.tree.view._
import org.rebeam.tree.view.infinite.Infinite
import org.rebeam.tree.view.measure.CursorHeightView
import org.rebeam.tree.view.pages.Pages
import org.rebeam.tree.view.transition._
import scala.scalajs.js
//object ListView {
//
// /**
// * Create a component viewing a list
// *
// * @param name The name of the component
// * @param toItem Function from an item to a finder, used for CursorP.zoomAllMatchesP
// * @param itemToKey Maps items to their react keys
// * @param itemView Component viewing items
// * @param subheader The subheader text to display in list
// * @param fEncoder Encoder for finder
// * @tparam A The type of list element
// * @tparam C The type of current page
// * @tparam P The type of all pages
// * @tparam F The type of finder used to find items
// * @return A view of the list, with infinite scrolling, suitable for use in a SortableContainer
// */
// def legacy[A, C, P, F <: A => Boolean](
// name: String,
// toItem: A => F,
// itemToKey: A => js.Any,
// itemView: Cursor[A, Pages[C, P]] => VdomElement,
// subheader: String,
// mode: ListMode = ListMode.Infinite
// )(implicit
// fEncoder: Encoder[F],
// s: Searchable[A, Guid]
// ): (IndexChange => Callback) => (Cursor[List[A], Pages[C, P]]) => JsComponent.Unmounted[js.Object, Null] = {
// ListView[List[A], Pages[C, P], Cursor[A, Pages[C, P]]](
// name,
// _.zoomAllMatches(toItem),
// c => itemToKey(c.model),
// itemView,
// subheader,
// mode
// )
// }
//
// /**
// * Create a list view
// * @param name Name of view
// * @param listCursorToItems Take a cursor with root model (type R) and location as a page (type P) and yield a list
// * of cursors to the list items (type A) and locations as actions on them (type Q). Note we
// * are using the location parameter as an action to perform on list items.
// * @param itemToKey Get a key for given list item
// * @param itemView Get a view for Cursor[A, Q], which displays list items of type A, allowing actions to
// * be performed using location of type Q.
// * @param subheader Subheader text for the list
// * @param mode Mode for list display
// * @tparam R The type of root model - some data type from which we can get to a list of items - doesn't
// * need to actually be a list
// * @tparam P The type of location for the root cursor - normally some kind of "page" location
// * @tparam A The type of list items
// * @tparam Q The type of location for list items - often some kind of action that can be performed
// * on list items, but can be an actual location.
// * @return A List view
// */
// def withAction[R, P, A, Q, CT[-p, +u] <: CtorType[p, u], U](
// name: String,
// listCursorToItems: Cursor[R, P] => List[Cursor[A, Q]],
// itemToKey: A => js.Any,
// itemView: GenericComponent[Cursor[A, Q], CT, U],
// subheader: String,
// mode: ListMode = ListMode.Infinite
// ): ((IndexChange) => Callback) => (Cursor[R, P]) => JsComponent.Unmounted[js.Object, Null] = {
// ListView[R, P, Cursor[A, Q]](
// name,
// listCursorToItems,
// c => itemToKey(c.model),
// itemView,
// subheader,
// mode
// )
// }
//
// def usingRef[R, P, A, Q](
// name: String,
// rootToItemRefs: Cursor[R, P] => Cursor[List[Ref[A]], P],
// itemAndCursorToAction: (A, Cursor[R, P]) => Q,
// itemView: Cursor[A, Q] => VdomElement,
// subheader: String,
// mode: ListMode = ListMode.Infinite
// )(implicit
// fEncoder: Encoder[FindRefById[A]],
// mCodec: MirrorCodec[A],
// toId: Identifiable[A],
// s: Searchable[A, Guid]
// ): ((IndexChange) => Callback) => (Cursor[R, P]) => JsComponent.Unmounted[js.Object, Null] = {
//
// ListView.withAction[R, P, A, Q](
// name,
// (cp: Cursor[R, P]) =>
// rootToItemRefs(cp)
// .zoomAllMatches(a => FindRefById(a.id))
// .flatMap(cursorToRef => cursorToRef.followRef(cursorToRef.model))
// .map(ca => ca.move(itemAndCursorToAction(ca.model, cp))),
// a => toId.id(a).toString(),
// itemView,
// subheader,
// mode
// )
// }
//
// def usingMatches[R, P, A, Q, F <: A => Boolean](
// name: String,
// rootToItems: Cursor[R, P] => Cursor[List[A], P],
// itemToFinder: A => F,
// itemAndCursorToAction: (A, Cursor[R, P]) => Q,
// itemToKey: A => js.Any,
// itemView: Cursor[A, Q] => VdomElement,
// subheader: String,
// mode: ListMode = ListMode.Infinite
// )(implicit
// fEncoder: Encoder[F],
// s: Searchable[A, Guid]
// ): ((IndexChange) => Callback) => (Cursor[R, P]) => JsComponent.Unmounted[js.Object, Null] = {
// ListView.withAction[R, P, A, Q](
// name = name,
// listCursorToItems = (cp: Cursor[R, P]) => rootToItems(cp).zoomAllMatches(itemToFinder).map(ca => ca.move(itemAndCursorToAction(ca.model, cp))),
// itemToKey = c => itemToKey(c),
// itemView = itemView,
// subheader = subheader,
// mode = mode
// )
// }
//
// def usingId[R, P, A <: Identified[A], Q](
// name: String,
// rootToItems: Cursor[R, P] => Cursor[List[A], P],
// itemAndCursorToAction: (A, Cursor[R, P]) => Q,
// itemView: Cursor[A, Q] => VdomElement,
// subheader: String,
// mode: ListMode = ListMode.Infinite
// )(implicit
// fEncoder: Encoder[FindById[A]],
// s: Searchable[A, Guid]
// ): ((IndexChange) => Callback) => (Cursor[R, P]) => JsComponent.Unmounted[js.Object, Null] = {
// ListView.usingMatches[R, P, A, Q, FindById[A]](
// name = name,
// rootToItems = rootToItems,
// itemToFinder = a => FindById[A](a.id),
// itemAndCursorToAction = itemAndCursorToAction,
// itemToKey = a => a.id.toString(),
// itemView = itemView,
// subheader = subheader,
// mode = mode
// )
// }
//
// sealed trait ListMode
// object ListMode {
// case object Infinite extends ListMode
// case object Finite extends ListMode
// }
//
// /**
// * Create a component viewing a list
// * @param name The name of the component
// * @param listCursorToItems Produce a list of elements from the model
// * @param itemToKey Produce a key for each element
// * @param itemView A view for an element
// * @param subheader The subheader text to display in list
// * @tparam L The type of the list-like model
// * @tparam P The type P for CursorP
// * @tparam A The type of list element
// * @return A view of the list, with infinite scrolling, suitable for use in a SortableContainer
// */
// def apply[L, P, A](
// name: String,
// listCursorToItems: Cursor[L, P] => List[A],
// itemToKey: A => js.Any,
// itemView: GenericComponent[A, CtorType.Props, _],
// subheader: String,
// mode: ListMode = ListMode.Infinite
// ): ((IndexChange) => Callback) => (Cursor[L, P]) => JsComponent.Unmounted[js.Object, Null] = {
// val sortableElement = SortableElement.wrap(itemView)
//
// mode match {
// // Wrap in an Infinite for performance on long lists
// case ListMode.Infinite =>
// // Use a height view to get us the height of the rendered element as an extra part of prop.
// // This lets us scale the Infinite list appropriately to fill space.
// val view = CursorHeightView[L, P](name) {
// (cp, height) =>
// val h: Int = height.map(_.toInt).getOrElse(60)
//
// // We need to apply a style by class to get the Infinite to be 100% height rather than the
// // "height: containerHeight" inline style it sets on itself. This allows it to resize to fill
// // available space, then be measured by Measure, which adjusts the containerHeight. This is
// // neater than wrapping in a "height: 100%" div, and also works with react-sortable-hoc, which
// // expects the top level component to be the one containing the sortable elements. Using a div
// // breaks this and so breaks the nice feature where dragging to container edge starts scrolling.
// Infinite(elementHeight = 60, containerHeight = h, className = "tree-infinite--height-100-percent")(
// MuiSubheader(
// inset = true,
// style = js.Dynamic.literal(
// "height" -> "60px",
// "padding-top" -> "8px"
// )
// )(subheader: VdomNode) :: listCursorToItems(cp).zipWithIndex.map {
// case (a, index) => sortableElement(SortableElement.Props(key = itemToKey(a), index = index))(a) : VdomElement
// }
// )
// }
// val sortableView = SortableContainer.wrap(view)
// (onIndexChange: IndexChange => Callback) => sortableView(p(onIndexChange))
//
// // Don't wrap with infinite, therefore doesn't need a height view.
// // For this case we can also provide enter/leave transitions.
// case ListMode.Finite =>
// val view = cursorView[L, P](name) {
// cp =>
// CSSTransitionGroup(
// "tree-list-view--transition",
// enterTimeout = 250, // Animation should take 225ms
// leaveTimeout = 220, // Animation should take 195ms
// component = "div",
// className = "tree-list-view__transition-container"
// )(
// (MuiSubheader(
// inset = true,
// style = js.Dynamic.literal(
// "height" -> "60px",
// "padding-top" -> "8px"
// )
// )(subheader: VdomNode):VdomNode) :: listCursorToItems(cp).zipWithIndex.map {
// case (a, index) => sortableElement(SortableElement.Props(key = itemToKey(a), index = index))(a) : VdomNode
// } : _*
// )
// }
// val sortableView = SortableContainer.wrap(view)
// (onIndexChange: IndexChange => Callback) => sortableView(p(onIndexChange))
// }
// }
//
// def p(onSortEnd: IndexChange => Callback = p => Callback{}): SortableContainer.Props =
// SortableContainer.Props(
// onSortEnd = onSortEnd,
// useDragHandle = true,
// helperClass = "react-sortable-handler"
// )
//
//}
object SortableListView {
/**
* Create a component viewing a list
*
* @param name The name of the component
* @param toItem Function from an item to a finder, used for CursorP.zoomAllMatchesP
* @param itemToKey Maps items to their react keys
* @param itemView Component viewing items
* @param subheader The subheader text to display in list
* @param fEncoder Encoder for finder
* @tparam A The type of list element
* @tparam C The type of current page
* @tparam P The type of all pages
* @tparam F The type of finder used to find items
* @return A view of the list, with infinite scrolling, suitable for use in a SortableContainer
*/
def legacy[A, C, P, F <: A => Boolean](
name: String,
toItem: A => F,
itemToKey: A => Key,
itemView: GenericComponent[Cursor[A, Pages[C, P]], CtorType.Props, _],
subheader: String,
mode: ListMode = ListMode.Infinite
)(implicit
fEncoder: Encoder[F],
s: Searchable[A, Guid]
): (IndexChange => Callback) => (Cursor[List[A], Pages[C, P]]) => JsComponent.Unmounted[js.Object, Null] = {
SortableListView[List[A], Pages[C, P], Cursor[A, Pages[C, P]]](
name,
_.zoomAllMatches(toItem),
c => itemToKey(c.model),
itemView,
subheader,
mode
)
}
/**
* Create a list view
* @param name Name of view
* @param listCursorToItems Take a cursor with root model (type R) and location as a page (type P) and yield a list
* of cursors to the list items (type A) and locations as actions on them (type Q). Note we
* are using the location parameter as an action to perform on list items.
* @param itemToKey Get a key for given list item
* @param itemView Get a view for Cursor[A, Q], which displays list items of type A, allowing actions to
* be performed using location of type Q.
* @param subheader Subheader text for the list
* @param mode Mode for list display
* @tparam R The type of root model - some data type from which we can get to a list of items - doesn't
* need to actually be a list
* @tparam P The type of location for the root cursor - normally some kind of "page" location
* @tparam A The type of list items
* @tparam Q The type of location for list items - often some kind of action that can be performed
* on list items, but can be an actual location.
* @return A List view
*/
def withAction[R, P, A, Q](
name: String,
listCursorToItems: Cursor[R, P] => List[Cursor[A, Q]],
itemToKey: A => Key,
itemView: GenericComponent[Cursor[A, Q], CtorType.Props, _],
subheader: String,
mode: ListMode = ListMode.Infinite
): ((IndexChange) => Callback) => (Cursor[R, P]) => JsComponent.Unmounted[js.Object, Null] = {
SortableListView[R, P, Cursor[A, Q]](
name,
listCursorToItems,
c => itemToKey(c.model),
itemView,
subheader,
mode
)
}
def usingMatches[R, P, A, Q, F <: A => Boolean](
name: String,
rootToItems: Cursor[R, P] => Cursor[List[A], P],
itemToFinder: A => F,
itemAndCursorToAction: (A, Cursor[R, P]) => Q,
itemToKey: A => Key,
itemView: GenericComponent[Cursor[A, Q], CtorType.Props, _],
subheader: String,
mode: ListMode = ListMode.Infinite
)(implicit
fEncoder: Encoder[F],
s: Searchable[A, Guid]
): ((IndexChange) => Callback) => (Cursor[R, P]) => JsComponent.Unmounted[js.Object, Null] = {
SortableListView.withAction[R, P, A, Q](
name = name,
listCursorToItems = (cp: Cursor[R, P]) => rootToItems(cp).zoomAllMatches(itemToFinder).map(ca => ca.move(itemAndCursorToAction(ca.model, cp))),
itemToKey = c => itemToKey(c),
itemView = itemView,
subheader = subheader,
mode = mode
)
}
def usingId[R, P, A <: Identified[A], Q](
name: String,
rootToItems: Cursor[R, P] => Cursor[List[A], P],
itemAndCursorToAction: (A, Cursor[R, P]) => Q,
itemView: GenericComponent[Cursor[A, Q], CtorType.Props, _],
subheader: String,
mode: ListMode = ListMode.Infinite
)(implicit
s: Searchable[A, Guid]
): ((IndexChange) => Callback) => (Cursor[R, P]) => JsComponent.Unmounted[js.Object, Null] =
SortableListView.withAction[R, P, A, Q](
name = name,
listCursorToItems = (cp: Cursor[R, P]) => rootToItems(cp).zoomAllIds.map(ca => ca.move(itemAndCursorToAction(ca.model, cp))),
itemToKey = a => a.id.toString(),
itemView = itemView,
subheader = subheader,
mode = mode
)
def usingRef[R, P, A, Q](
name: String,
rootToItemRefs: Cursor[R, P] => Cursor[List[Ref[A]], P],
itemAndCursorToAction: (A, Cursor[R, P]) => Q,
itemView: GenericComponent[Cursor[A, Q], CtorType.Props, _],
subheader: String,
mode: ListMode = ListMode.Infinite
)(implicit
mCodec: MirrorCodec[A],
// TODO use the id form the ref instead - then we won't need this Identifiable
toId: Identifiable[A],
s: Searchable[A, Guid]
): ((IndexChange) => Callback) => (Cursor[R, P]) => JsComponent.Unmounted[js.Object, Null] = {
SortableListView.withAction[R, P, A, Q](
name = name,
listCursorToItems = (cp: Cursor[R, P]) => rootToItemRefs(cp)
.zoomAllRefsById
.flatMap(cursorToRef => cursorToRef.followRef(cursorToRef.model))
.map(ca => ca.move(itemAndCursorToAction(ca.model, cp))),
itemToKey = a => toId.id(a).toString,
itemView = itemView,
subheader = subheader,
mode = mode
)
}
sealed trait ListMode
object ListMode {
case object Infinite extends ListMode
case object Finite extends ListMode
}
/**
* Create a component viewing a list
* @param name The name of the component
* @param listCursorToItems Produce a list of elements from the model
* @param itemToKey Produce a key for each element
* @param itemView A view for an element
* @param subheader The subheader text to display in list
* @tparam L The type of the list-like model
* @tparam P The type P for CursorP
* @tparam A The type of list element
* @return A view of the list, with infinite scrolling, suitable for use in a SortableContainer
*/
def apply[L, P, A](
name: String,
listCursorToItems: Cursor[L, P] => List[A],
itemToKey: A => Key,
itemView: GenericComponent[A, CtorType.Props, _],
subheader: String,
mode: ListMode = ListMode.Infinite
): ((IndexChange) => Callback) => (Cursor[L, P]) => JsComponent.Unmounted[js.Object, Null] = {
val sortableElement = SortableElement.wrap(itemView)
mode match {
// Wrap in an Infinite for performance on long lists
case ListMode.Infinite =>
// Use a height view to get us the height of the rendered element as an extra part of prop.
// This lets us scale the Infinite list appropriately to fill space.
val view = CursorHeightView[L, P](name) {
(cp, height) =>
// Note we provide a minimum height of 1 since Infinite detects
// a height of 0 as a missing property!
val h: Int = Math.max(1, height.map(_.toInt).getOrElse(300))
// We need to apply a style by class to get the Infinite to be 100% height rather than the
// "height: containerHeight" inline style it sets on itself. This allows it to resize to fill
// available space, then be measured by Measure, which adjusts the containerHeight. This is
// neater than wrapping in a "height: 100%" div, and also works with react-sortable-hoc, which
// expects the top level component to be the one containing the sortable elements. Using a div
// breaks this and so breaks the nice feature where dragging to container edge starts scrolling.
Infinite(elementHeight = 60, containerHeight = h, className = "tree-infinite--height-100-percent")(
MuiSubheader(
inset = true,
style = js.Dynamic.literal(
"height" -> "60px",
"padding-top" -> "8px"
)
// )(subheader: VdomNode) :: listCursorToItems(cp).map {
// a => <.div(^.className := "tree-list-view__item", ^.key := itemToKey(a), itemView(a)): VdomElement
// }
)(subheader: VdomNode) :: listCursorToItems(cp).zipWithIndex.map {
case (a, index) => sortableElement(SortableElement.Props(key = itemToKey(a), index = index))(a) : VdomElement
}
)
}
// (onIndexChange: IndexChange => Callback) => view(_)
val sortableView = SortableContainer.wrap(view)
(onIndexChange: IndexChange => Callback) => sortableView(p(onIndexChange))
// Don't wrap with infinite, therefore doesn't need a height view.
// For this case we can also provide enter/leave transitions.
case ListMode.Finite =>
val view = cursorView[L, P](name) {
cp =>
CSSTransitionGroup(
"tree-list-view--transition",
enterTimeout = 250, // Animation should take 225ms
leaveTimeout = 220, // Animation should take 195ms
component = "div",
className = "tree-list-view__transition-container"
)(
(MuiSubheader(
inset = true,
style = js.Dynamic.literal(
"height" -> "60px",
"padding-top" -> "8px"
)
// )(subheader: VdomNode):VdomNode) :: listCursorToItems(cp).map {
// a => <.div(^.className := "tree-list-view__item", ^.key := itemToKey(a), itemView(a)): VdomElement
// } : _*
)(subheader: VdomNode):VdomNode) :: listCursorToItems(cp).zipWithIndex.map {
case (a, index) => sortableElement(SortableElement.Props(key = itemToKey(a), index = index))(a) : VdomNode
} : _*
)
}
// (onIndexChange: IndexChange => Callback) => view(_)
val sortableView = SortableContainer.wrap(view)
(onIndexChange: IndexChange => Callback) => sortableView(p(onIndexChange))
}
}
def p(onSortEnd: IndexChange => Callback = p => Callback{}): SortableContainer.Props =
SortableContainer.Props(
onSortEnd = onSortEnd,
useDragHandle = true,
helperClass = "react-sortable-handler"
)
}
| trepidacious/tree-material-ui | js/src/main/scala/org/rebeam/tree/view/list/SortableListView.scala | Scala | gpl-3.0 | 22,548 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.optim.aggregator
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.ml.feature._
import org.apache.spark.ml.linalg._
/**
* AFTAggregator computes the gradient and loss for a AFT loss function,
* as used in AFT survival regression for samples in sparse or dense vector in an online fashion.
*
* The loss function and likelihood function under the AFT model based on:
* Lawless, J. F., Statistical Models and Methods for Lifetime Data,
* New York: John Wiley & Sons, Inc. 2003.
*
* Two AFTAggregator can be merged together to have a summary of loss and gradient of
* the corresponding joint dataset.
*
* Given the values of the covariates $x^{'}$, for random lifetime $t_{i}$ of subjects i = 1,..,n,
* with possible right-censoring, the likelihood function under the AFT model is given as
*
* <blockquote>
* $$
* L(\\beta,\\sigma)=\\prod_{i=1}^n[\\frac{1}{\\sigma}f_{0}
* (\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma})]^{\\delta_{i}}S_{0}
* (\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma})^{1-\\delta_{i}}
* $$
* </blockquote>
*
* Where $\\delta_{i}$ is the indicator of the event has occurred i.e. uncensored or not.
* Using $\\epsilon_{i}=\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma}$, the log-likelihood function
* assumes the form
*
* <blockquote>
* $$
* \\iota(\\beta,\\sigma)=\\sum_{i=1}^{n}[-\\delta_{i}\\log\\sigma+
* \\delta_{i}\\log{f_{0}}(\\epsilon_{i})+(1-\\delta_{i})\\log{S_{0}(\\epsilon_{i})}]
* $$
* </blockquote>
* Where $S_{0}(\\epsilon_{i})$ is the baseline survivor function,
* and $f_{0}(\\epsilon_{i})$ is corresponding density function.
*
* The most commonly used log-linear survival regression method is based on the Weibull
* distribution of the survival time. The Weibull distribution for lifetime corresponding
* to extreme value distribution for log of the lifetime,
* and the $S_{0}(\\epsilon)$ function is
*
* <blockquote>
* $$
* S_{0}(\\epsilon_{i})=\\exp(-e^{\\epsilon_{i}})
* $$
* </blockquote>
*
* and the $f_{0}(\\epsilon_{i})$ function is
*
* <blockquote>
* $$
* f_{0}(\\epsilon_{i})=e^{\\epsilon_{i}}\\exp(-e^{\\epsilon_{i}})
* $$
* </blockquote>
*
* The log-likelihood function for Weibull distribution of lifetime is
*
* <blockquote>
* $$
* \\iota(\\beta,\\sigma)=
* -\\sum_{i=1}^n[\\delta_{i}\\log\\sigma-\\delta_{i}\\epsilon_{i}+e^{\\epsilon_{i}}]
* $$
* </blockquote>
*
* Due to minimizing the negative log-likelihood equivalent to maximum a posteriori probability,
* the loss function we use to optimize is $-\\iota(\\beta,\\sigma)$.
* The gradient functions for $\\beta$ and $\\log\\sigma$ respectively are
*
* <blockquote>
* $$
* \\frac{\\partial (-\\iota)}{\\partial \\beta}=
* \\sum_{1=1}^{n}[\\delta_{i}-e^{\\epsilon_{i}}]\\frac{x_{i}}{\\sigma} \\\\
*
* \\frac{\\partial (-\\iota)}{\\partial (\\log\\sigma)}=
* \\sum_{i=1}^{n}[\\delta_{i}+(\\delta_{i}-e^{\\epsilon_{i}})\\epsilon_{i}]
* $$
* </blockquote>
*
* @param bcCoefficients The broadcasted value includes three part: 1, regression coefficients
* corresponding to the features; 2, the intercept; 3, the log of scale
* parameter.
* @param fitIntercept Whether to fit an intercept term.
* @param bcFeaturesStd The broadcast standard deviation values of the features.
*/
private[ml] class AFTAggregator(
bcFeaturesStd: Broadcast[Array[Double]],
fitIntercept: Boolean)(bcCoefficients: Broadcast[Vector])
extends DifferentiableLossAggregator[Instance, AFTAggregator] {
protected override val dim: Int = bcCoefficients.value.size
/**
* Add a new training data to this AFTAggregator, and update the loss and gradient
* of the objective function.
*
* @param data The Instance representation for one data point to be added into this aggregator.
* @return This AFTAggregator object.
*/
def add(data: Instance): this.type = {
val coefficients = bcCoefficients.value.toArray
val intercept = coefficients(dim - 2)
// sigma is the scale parameter of the AFT model
val sigma = math.exp(coefficients(dim - 1))
val xi = data.features
val ti = data.label
val delta = data.weight
require(ti > 0.0, "The lifetime or label should be greater than 0.")
val localFeaturesStd = bcFeaturesStd.value
val margin = {
var sum = 0.0
xi.foreachNonZero { (index, value) =>
if (localFeaturesStd(index) != 0.0) {
sum += coefficients(index) * (value / localFeaturesStd(index))
}
}
sum + intercept
}
val epsilon = (math.log(ti) - margin) / sigma
lossSum += delta * math.log(sigma) - delta * epsilon + math.exp(epsilon)
val multiplier = (delta - math.exp(epsilon)) / sigma
xi.foreachNonZero { (index, value) =>
if (localFeaturesStd(index) != 0.0) {
gradientSumArray(index) += multiplier * (value / localFeaturesStd(index))
}
}
gradientSumArray(dim - 2) += { if (fitIntercept) multiplier else 0.0 }
gradientSumArray(dim - 1) += delta + multiplier * sigma * epsilon
weightSum += 1.0
this
}
}
/**
* BlockAFTAggregator computes the gradient and loss as used in AFT survival regression
* for blocks in sparse or dense matrix in an online fashion.
*
* Two BlockAFTAggregators can be merged together to have a summary of loss and gradient of
* the corresponding joint dataset.
*
* NOTE: The feature values are expected to be standardized before computation.
*
* @param bcCoefficients The coefficients corresponding to the features.
* @param fitIntercept Whether to fit an intercept term.
*/
private[ml] class BlockAFTAggregator(
fitIntercept: Boolean)(bcCoefficients: Broadcast[Vector])
extends DifferentiableLossAggregator[InstanceBlock,
BlockAFTAggregator] {
protected override val dim: Int = bcCoefficients.value.size
private val numFeatures = dim - 2
@transient private lazy val coefficientsArray = bcCoefficients.value match {
case DenseVector(values) => values
case _ => throw new IllegalArgumentException(s"coefficients only supports dense vector" +
s" but got type ${bcCoefficients.value.getClass}.")
}
@transient private lazy val linear = Vectors.dense(coefficientsArray.take(numFeatures))
/**
* Add a new training instance block to this BlockAFTAggregator, and update the loss and
* gradient of the objective function.
*
* @return This BlockAFTAggregator object.
*/
def add(block: InstanceBlock): this.type = {
require(block.matrix.isTransposed)
require(numFeatures == block.numFeatures, s"Dimensions mismatch when adding new " +
s"instance. Expecting $numFeatures but got ${block.numFeatures}.")
require(block.labels.forall(_ > 0.0), "The lifetime or label should be greater than 0.")
val size = block.size
val intercept = coefficientsArray(dim - 2)
// sigma is the scale parameter of the AFT model
val sigma = math.exp(coefficientsArray(dim - 1))
// vec here represents margins
val vec = if (fitIntercept) {
Vectors.dense(Array.fill(size)(intercept)).toDense
} else {
Vectors.zeros(size).toDense
}
BLAS.gemv(1.0, block.matrix, linear, 1.0, vec)
// in-place convert margins to gradient scales
// then, vec represents gradient scales
var localLossSum = 0.0
var i = 0
var sigmaGradSum = 0.0
while (i < size) {
val ti = block.getLabel(i)
// here use Instance.weight to store censor for convenience
val delta = block.getWeight(i)
val margin = vec(i)
val epsilon = (math.log(ti) - margin) / sigma
val expEpsilon = math.exp(epsilon)
localLossSum += delta * math.log(sigma) - delta * epsilon + expEpsilon
val multiplier = (delta - expEpsilon) / sigma
vec.values(i) = multiplier
sigmaGradSum += delta + multiplier * sigma * epsilon
i += 1
}
lossSum += localLossSum
weightSum += size
block.matrix match {
case dm: DenseMatrix =>
BLAS.nativeBLAS.dgemv("N", dm.numCols, dm.numRows, 1.0, dm.values, dm.numCols,
vec.values, 1, 1.0, gradientSumArray, 1)
case sm: SparseMatrix =>
val linearGradSumVec = Vectors.zeros(numFeatures).toDense
BLAS.gemv(1.0, sm.transpose, vec, 0.0, linearGradSumVec)
BLAS.getBLAS(numFeatures).daxpy(numFeatures, 1.0, linearGradSumVec.values, 1,
gradientSumArray, 1)
}
if (fitIntercept) gradientSumArray(dim - 2) += vec.values.sum
gradientSumArray(dim - 1) += sigmaGradSum
this
}
}
| witgo/spark | mllib/src/main/scala/org/apache/spark/ml/optim/aggregator/AFTAggregator.scala | Scala | apache-2.0 | 9,352 |
package com.myproject.play.acl
import java.security.cert.X509Certificate
import play.api.mvc.Request
/**
* Created by Surendra on 4/5/16.
*/
trait AuthenticatedRequest[+A] extends Request[A]{
val userId: Long
}
object AuthenticatedRequest {
def apply[A](userIdArg: Long, req: Request[A]) = new AuthenticatedRequest[A] {
def body = req.body
def headers = req.headers
def id = req.id
def method = req.method
def path = req.path
def queryString = req.queryString
def remoteAddress = req.remoteAddress
def secure = req.secure
def tags = req.tags
def uri = req.uri
def version = req.version
def clientCertificateChain: Option[Seq[X509Certificate]] = req.clientCertificateChain
val userId: Long = userIdArg
}
}
| surenyonjan/play-acl | app/com/myproject/play/acl/AuthenticatedRequest.scala | Scala | mit | 785 |
package monocle.std
import cats.Eq
import monocle.{Lens, MonocleSuite}
import monocle.law.discipline.{IsoTests, PrismTests}
import monocle.law.discipline.function.{EachTests, EmptyTests, PossibleTests}
import org.scalacheck.{Arbitrary, Cogen}
import scala.annotation.nowarn
class OptionSpec extends MonocleSuite {
checkAll("some", PrismTests(some[Int]))
checkAll("none", PrismTests(none[Long]))
checkAll("optionToDisjunction", IsoTests(optionToDisjunction[Int]))
checkAll("pOptionToDisjunction", IsoTests(pOptionToDisjunction[Int, Int]))
checkAll("each Option", EachTests[Option[Int], Int])
checkAll("possible Option", PossibleTests[Option[Int], Int]): @nowarn
checkAll("empty Option", EmptyTests[Option[Int]]): @nowarn
case class IntNoZero(value: Int)
object IntNoZero {
implicit val eq: Eq[IntNoZero] = Eq.fromUniversalEquals
implicit val arbitrary: Arbitrary[IntNoZero] =
Arbitrary(Arbitrary.arbitrary[Int].filterNot(_ == 0).map(IntNoZero(_)))
implicit val cogen: Cogen[IntNoZero] =
Cogen.cogenInt.contramap(_.value)
}
checkAll("withDefault Int 0", IsoTests(withDefault(IntNoZero(0))))
test("withDefault can break get-replace property") {
def mapAt(index: String): Lens[Map[String, Int], Option[Int]] =
at(index)
def mapDefaultTo0(index: String): Lens[Map[String, Int], Int] =
mapAt(index).andThen(withDefault(0))
assert(mapDefaultTo0("id").replace(0)(Map("id" -> 0)) == Map.empty)
}
}
| julien-truffaut/Monocle | test/shared/src/test/scala/monocle/std/OptionSpec.scala | Scala | mit | 1,472 |
package org.kirhgoff.morphoid.engine
import org.scalatest._
import org.scalamock.scalatest.MockFactory
/**
* Created by <a href="mailto:kirill.lastovirya@gmail.com">kirhgoff</a> on 2/9/17.
*/
class MorphoidEngineTest extends FlatSpec with Matchers with MockFactory {
// ------------------ Creature
"Creature" should "be able to calculate its origin point" in {
new Creature("", "", Map(Seed(0, 0), Mover(2, 5))).origin should be(Physical(0, 0))
new Creature("", "", Map(Mover(2, 5), Seed(0, 0))).origin should be(Physical(0, 0))
new Creature("", "", Map(Mover(0, 1), Seed(1, 0))).origin should be(Physical(0, 0))
new Creature("", "", Map(Seed(2, 5))).origin should be(Physical(2, 5))
}
"Creature" should "know its bounding rect" in {
def checkBoundBox(rect: Rect, cells: Map[Physical, CellType]) = {
rect should equal(new Creature("01", "test", cells).boundingRect)
}
checkBoundBox(Rect(2, 3, 2, 3), Map(Seed(2, 3)))
checkBoundBox(Rect(1, 2, 2, 3), Map(Seed(2, 3), Mover(1, 2)))
checkBoundBox(Rect(-1, -2, 2, 2), Map(
Seed(0, 0), Mover(-1, -2), Feeder(2,2)))
}
// ------------------ Rect
"Rect" should "be possible to check if it is inside" in {
Rect(0, 0, 10, 10).includes(Rect(1,1, 2,2)) shouldBe true
Rect(0, 0, 10, 10).includes(Rect(1,1, 12,2)) shouldBe false
Rect(0, 0, 10, 10).includes(Rect(-10,-10, -2, -2)) shouldBe false
}
"Rect" should "be able to detect intersects" in {
// include examples
Rect(0, 0, 10, 10).intersects(Rect(1,1, 2,2)) shouldBe true
Rect(0, 0, 10, 10).intersects(Rect(1,1, 12,2)) shouldBe true
Rect(0, 0, 10, 10).intersects(Rect(-10,-10, -2, -2)) shouldBe false
Rect(0, 0, 1, 1).intersects(Rect(1,1, 2,2)) shouldBe true
Rect(0, 0, 10, 10).intersects(Rect(1,1, 12,12)) shouldBe true
Rect(0, 0, 10, 10).intersects(Rect(-1,-1, 0,0)) shouldBe true
}
"Rect" should "be able to inflate" in {
Rect(0, 0, 10, 10).inflate(2) shouldBe Rect(-2, -2, 12, 12)
Rect(0, 0, 0, 0).inflate(1) shouldBe Rect(-1, -1, 1, 1)
}
"MorphoidEngine" should "provide surroundings" in {
val engine = MorphoidEngine.createEmpty(10, 10)
val creature = mock[Creature]
(creature.cells _).expects().returns(List(Physical(5, 5)))
engine.surroundings(creature, 1) == List(
Physical(4, 4), Physical(5, 4), Physical(6, 4),
Physical(4, 5), Physical(5, 5), Physical(6, 5),
Physical(4, 6), Physical(5, 6), Physical(6, 6)
)
}
"MorphoidEngine" should "see shrooms without exceptions" in {
val size = 3
// Is that all?
for (
ox <- 0 to size;
oy <- 0 to size;
shx <- 0 to size;
shy <- 0 to size
)
{
val engine = MorphoidEngine(
Ooze("ooze", ox, oy, 1), Shroom("shrm", shx, shy)
).init()
val ooze = engine.soulById("ooze")
val creature = ooze.creature
ooze.act(engine.surroundings(creature, size))
}
}
// --------------------- Shroom
"Shroom" should "stay" in {
val plant = Shroom(2, 5)
val origin = plant.creature.origin
MorphoidEngine(plant).tick()
origin should equal(plant.creature.origin)
}
// TODO decide how to split tests
"Shroom" should "produce energy" in {
val engine = MorphoidEngine(Shroom(0, 0)).init()
val initialEnergy = engine.fullEnergy
engine.tick().fullEnergy should be > initialEnergy
}
// ----------------------- Ooze
"Ooze" should "roam" in {
val herbivore = Ooze(2, 5, 1)
MorphoidEngine(herbivore).tick()
//println("After ----------------------------------->")
Physical(2, 5) shouldNot equal(herbivore.creature.origin)
}
"Ooze" should "move in appropriate time" in {
val entity = Ooze(2, 5, 3)
val creature = entity.creature
val origin = creature.origin
val engine = MorphoidEngine(entity)
engine.tick()
origin should equal(creature.origin)
engine.tick()
origin should equal(creature.origin)
engine.tick() //Move here
origin shouldNot equal(creature.origin)
val newOrigin = creature.origin
engine.tick()
newOrigin should equal(creature.origin)
}
private def findCreatureByType(engine: MorphoidEngine, creature: String) =
engine.getCreatures.find(c => c.kind.equals(creature)).get
"Ooze" should "move towards shrooms" in {
val engine = MorphoidEngine(
Shroom(0, 0),
Ooze(0, 3, 1)
).init()
engine.creatureType(Physical(0, 0)) shouldEqual "shroom"
def ooze = findCreatureByType(engine, "ooze")
engine.tick()
ooze.origin shouldBe Physical(0, 2) // Moves towards shroom
engine.tick()
ooze.origin shouldBe Physical(0, 1) // Moves towards shroom
}
"Ooze" should "find best direction" in {
//One cell
def checkBestDirection(dx:Int, dy:Int, result:Direction) = {
val ooze = Ooze(0, 0, 1)
ooze.bestDirection(Physical(dx, dy)) shouldBe result
}
checkBestDirection(-3, 0, West)
checkBestDirection(2, 3, South)
checkBestDirection(1, 0, East)
checkBestDirection(2, 1, East)
checkBestDirection(-3, -6, North)
checkBestDirection(0, -3, North)
//TODO add more tests for multi-cells
}
// TODO ask Michal
"Ooze" should "die without food" in {
val engine = MorphoidEngine(Ooze(0, 0, 1)).init()
val initialEnergy = engine.fullEnergy
engine.tick().fullEnergy should be < initialEnergy
}
"Ooze" should "live near shroom" in {
val engine = MorphoidEngine(Shroom(0, 0), Ooze(0, 1, 1)).init()
val initialEnergy = engine.fullEnergy
engine.tick().fullEnergy should be > initialEnergy
}
/*
"Decoy" should "appear if ooze has died" in {
val engine = MorphoidEngine(new EnergyBalanceController {
override def oozeLife = 3
override def cellDecay = -1
override def decoyDecay = -0.1
override def decoyThreshold = 2
}, Ooze(0, 0, 1))
// Energy decreased at the end of the tick
engine.tick()
engine.getCreatures should not be empty
engine.getDecoy shouldBe empty
// Creature killed on the third when energy is zero
engine.tick()
engine.getCreatures shouldBe empty
engine.getDecoy should not be empty
}
"Decoy" should "decay with time" in {
val engine = MorphoidEngine(new EnergyBalanceController {
override def oozeLife = 3
override def cellDecay = -1
override def decoyDecay = -0.7
override def decoyThreshold = 2
}, Ooze(0, 0, 1))
engine.tick().tick()
engine.getCreatures shouldBe empty
engine.getDecoy should not be empty
val initialEnergy = engine.fullEnergy
engine.tick()
engine.fullEnergy should be < initialEnergy
engine.tick()
engine.getDecoy shouldBe empty
}
*/
}
| kirhgoff/morphoid | morphoid-engine/src/test/scala/org/kirhgoff/morphoid/engine/MorphoidEngineTest.scala | Scala | lgpl-2.1 | 6,747 |
package io.vamp.operation.workflow
import com.typesafe.scalalogging.LazyLogging
import io.vamp.common.akka._
import io.vamp.model.artifact.Workflow
import io.vamp.operation.notification._
import io.vamp.operation.workflow.WorkflowActor.Update
import io.vamp.operation.workflow.WorkflowSynchronizationActor.SynchronizeAll
import io.vamp.persistence.{ ArtifactPaginationSupport, ArtifactSupport, PersistenceActor }
import io.vamp.workflow_driver.WorkflowDriverActor
class WorkflowSynchronizationSchedulerActor extends SchedulerActor with OperationNotificationProvider {
def tick() = IoC.actorFor[WorkflowSynchronizationActor] ! SynchronizeAll
}
object WorkflowSynchronizationActor {
sealed trait WorkflowMessages
object SynchronizeAll extends WorkflowMessages
}
class WorkflowSynchronizationActor extends CommonSupportForActors with ArtifactSupport with ArtifactPaginationSupport with OperationNotificationProvider with LazyLogging {
import WorkflowSynchronizationActor._
def receive = {
case SynchronizeAll ⇒ synchronize()
case _ ⇒
}
private def synchronize() = {
logger.debug("WorkflowSynchronizationActor - Synchronizing workflows")
implicit val timeout = PersistenceActor.timeout()
forAll[Workflow](allArtifacts[Workflow], {
workflows ⇒
IoC.actorFor[WorkflowDriverActor] ! WorkflowDriverActor.GetScheduled(workflows)
workflows.foreach { workflow ⇒ IoC.actorFor[WorkflowActor] ! Update(workflow, running = workflow.instances.nonEmpty) }
})
}
}
| magneticio/vamp | operation/src/main/scala/io/vamp/operation/workflow/WorkflowSynchronizationActor.scala | Scala | apache-2.0 | 1,541 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.compiler.optimizer
import cogx.cogmath.geometry.Shape
import cogx.compiler.codegenerator.KernelCircuit
import cogx.compiler.codegenerator.opencl.hyperkernels.{ConvolveHyperKernel, ConvolveTiledHyperKernel2, SliceVectorsHyperKernel, TensorReduceHyperKernel}
import cogx.compiler.parser.op._
import cogx.parameters.Cog
import cogx.platform.opencl.OpenCLKernelCodeGenParams
import cogx.platform.types.{BorderValid, CrossCorrelationOrientation, UpsampleInputConvolution, UseSmallTensorWhenBest}
import cogx.runtime.execution.Profiler
/** Optimizer of kernel DAGs.
*
* This optimizer recognizes a ConvolveKernel(vectorMode = ProjectFrame) driving a TensorReduceHyperKernel and merges
* them into a single ConvolveKernel(vectorMode = ProjectFrameBlockReduceSum) kernel that does the entire task, Also,
* this optimizer recognizes a ConvolveKernel(vectorMode = BackProjectFrame) driving a TensorReduceHyperKernel and merges
* them into a single ConvolveKernel(vectorMode = BackProjectFrameBlockReduceSum) kernel that does the entire task,
*
* Finally, this optimizer helps create an optimized FilterAdjointBlockReduceSum through an admittedly complicated path:
*
* Before this kernel-circuit-level optimizer is run, the user creates the following sequence of operations
* at the SyntaxTree level:
*
* crossCorrelateFilterAdjoint(...).blockReduceSum(batchSize)
*
* The VectorFieldGenerator, will translate each of these operations into their respective kernels. The
* crossCorrelateFilterAdjoint operation, when translated, knows nothing of the following blockReduceSum, so a
* sometimes inefficient ConvolveToSmallFieldHyperKernel is used that requires its own TensorReduceHyperKernel to
* complete its job. The first pass KernelCircuit is thus:
*
* ConvolveToSmallFieldHyperKernel -> TensorReduceHyperKernel(x) -> TensorReduceHyperKernel(batchSize)
*
* The next step in transforming this kernel sequence is performed by the TensorReduceOptimizer, which combines
* the kernels into the following sequence:
*
* ConvolveToSmallFieldHyperKernel -> TensorReduceHyperKernel(x * batchSize)
*
* Finally, this optimizer is run and recognizes the sequence of a convolve kernel (one that has a
* ConvolveOp(vectorMode=FilterAdjoint) ) followed by a tensor reduction down to the tensor shape expected of an
* end-to-end FilterAdjointBlockReduceSum. After checking that the ConvolveHyperkernel factory method is prepared
* to handle the new opcode, this optimizer replaces the two-kernel convolve-reduce sequence by the result of
* ConvolveHyperKernel(vectorMode=FilterAdjointBlockReduceSum). This torturous sequence of steps could be avoided
* if we did optimizations at the SyntaxTree level.
*
* @author Dick Carter
*/
private[cogx]
object ProjectFrameTensorReduceSumOptimizer extends Optimizer {
private[cogx] val Enabled = true
/** "Horizontally" merge all HyperKernels in `dag` when possible.
*
* @param dag Kernel circuit to be optimized.
* @param codeGenParams A bundle of device parameters that affect kernel code generation and optimization.
* @param profiler The profiler to use to pick the best variant
* @param report True if verbosity is desired.
* @return The number of optimizations made.
*/
def optimize(dag: KernelCircuit, codeGenParams: OpenCLKernelCodeGenParams, profiler: Profiler, report: Boolean = true) = {
val answer =
if (!Enabled) {
if (Cog.verboseOptimizer) {
println(" *** ProjectFrame/TensorReduceSum Optimizer: disabled")
}
0
}
else {
if (Cog.verboseOptimizer) {
println(" *** ProjectFrame/TensorReduceSum Optimizer: starting (" + dag.size + " nodes)")
}
val initialDagSize = dag.size
// Pre-order flattening not technically necessary, but it makes for the
// most understandable MergedOp.toString() in the kernel DAG printout
val kernels = dag.flattenPreorder
for (kernel <- kernels) {
if (!kernel.isDead) {
kernel match {
case reduceKernel: TensorReduceHyperKernel =>
reduceKernel.inputs(0).source.opcode match {
case convolveOp: AbstractConvolveOp =>
val convolveKernel = reduceKernel.inputs(0).source
val batchSize = convolveOp.batchSize
val in0VectorLength = convolveKernel.inputs(0).fieldType.tensorShape.points
val in1VectorLength = convolveKernel.inputs(1).fieldType.tensorShape.points
val convolveOutputType = reduceKernel.inputs(0).fieldType
val reduceFactor = reduceKernel.operation.factor
val resultVectorSize = reduceKernel.outputs(0).fieldType.tensorShape.points
// We could also optimize vectorMode == PlaneByPlane where the image and
// filter lengths are the same. Do we need a thread-count analysis here
// to make sure this is always a win?
val okToMergeTest1 = reduceKernel.inputs(0).sinks.length == 1 && !convolveKernel.outputs(0).probed
val okToMergeTest2 = convolveOp.vectorMode match {
case ProjectFrame =>
val planesPerImage = in0VectorLength / batchSize
val numLogicalFilters = in1VectorLength / planesPerImage
Cog.projectFrameMerging && (resultVectorSize == numLogicalFilters * batchSize)
case BackProjectFrame =>
val numLogicalFilters = in0VectorLength / batchSize
val planesPerImage = in1VectorLength / numLogicalFilters
Cog.backProjectFrameMerging && (resultVectorSize == planesPerImage * batchSize)
case FilterAdjoint =>
val planesPerImage = in0VectorLength / batchSize
val numLogicalFilters = in1VectorLength / batchSize
Cog.filterAdjointMerging &&
convolveOp.samplingPolicy.isInstanceOf[UpsampleInputConvolution] &&
convolveOp.filterOrientation == CrossCorrelationOrientation &&
batchSize > 1 &&
(resultVectorSize == planesPerImage * numLogicalFilters) &&
ConvolveHyperKernel.canUseFilterAdjointBlockReduceSum(convolveKernel.inputs.toArray, convolveOp, convolveOutputType.fieldShape, codeGenParams)
case _ =>
false
}
val okToMerge = okToMergeTest1 && okToMergeTest2
if (okToMerge) {
val newVectorMode = convolveOp.vectorMode match {
case ProjectFrame => ProjectFrameBlockReduceSum
case BackProjectFrame => BackProjectFrameBlockReduceSum
case FilterAdjoint => FilterAdjointBlockReduceSum
case _ => throw new RuntimeException("Unexpected vector mode.")
}
val newOp = ConvolveOp(convolveOp.borderPolicy, convolveOp.filterOrientation,
convolveOp.samplingPolicy, newVectorMode, convolveOp.batchSize)
val newResultType =
ConvolveHyperKernel.outputFieldType(convolveKernel.inputs(0).fieldType, convolveKernel.inputs(1).fieldType,
newOp.borderPolicy, newOp.samplingPolicy, newOp.vectorMode, newOp.batchSize)
val newConvolveKernel =
ConvolveHyperKernel(convolveKernel.inputs.toArray, newOp, newResultType, UseSmallTensorWhenBest, codeGenParams, profiler)
val lastKernel =
if (newResultType.tensorShape == Shape(1))
SliceVectorsHyperKernel(newConvolveKernel.outputs(0), TensorSliceOp(0), newResultType.resizeTensor(Shape()))
else
newConvolveKernel
require(lastKernel.resultTypes(0) == reduceKernel.resultTypes(0), "Internal compiler error.")
lastKernel.outputs(0).stealProbeAndNameFrom(reduceKernel.outputs(0))
lastKernel.outputs(0).stealSinksFrom(reduceKernel.outputs(0))
reduceKernel.removeFromCircuit(mustDo = true)
if (Cog.verboseKernelMerging) {
println("Merging " + convolveKernel + " ---> " + reduceKernel)
if (newConvolveKernel != lastKernel)
println("Result is " + newConvolveKernel + " ---> " + lastKernel)
else
println("Result is " + newConvolveKernel)
println("********************************************")
}
}
case _ => // Not the case we care about here
}
case _ => // Not the case we care about here
}
}
}
// We now have to fix up recurrences
fixRecurrences(dag)
val removedKernels = initialDagSize - dag.size
if (Cog.verboseOptimizer)
println(" *** ProjectFrame/TensorReduceSum Optimizer: " + removedKernels + " kernel" +
(if (removedKernels == 1) "" else "s") + " removed.")
removedKernels
}
answer
}
}
| hpe-cct/cct-core | src/main/scala/cogx/compiler/optimizer/ProjectFrameTensorReduceSumOptimizer.scala | Scala | apache-2.0 | 10,283 |
package models.domain
import app.XMLValues._
import models.{DayMonthYear, NationalInsuranceNumber}
object CircumstancesIdentification extends Identifier(id = "c1")
case class ReportChangeOrigin(origin: String = NotAsked) extends QuestionGroup(ReportChangeOrigin)
object ReportChangeOrigin extends QGIdentifier(id = s"${CircumstancesIdentification.id}.g1")
case class ReportChangeReason(jsEnabled: Boolean = false, reportChanges: String = NotAsked) extends QuestionGroup(ReportChangeReason)
object ReportChangeReason extends QGIdentifier(id = s"${CircumstancesIdentification.id}.g1")
case class CircumstancesYourDetails( firstName: String = "",
surname: String = "",
nationalInsuranceNumber: NationalInsuranceNumber = NationalInsuranceNumber(Some("")),
dateOfBirth: DayMonthYear = DayMonthYear(None, None, None),
override val wantsContactEmail:String = "",
override val email:Option[String] = None,
override val emailConfirmation:Option[String] = None,
theirFirstName: String = "",
theirSurname: String = "",
theirRelationshipToYou: String = "",
furtherInfoContact: Option[String] = None
) extends QuestionGroup(CircumstancesYourDetails) with EMail{
}
object CircumstancesYourDetails extends QGIdentifier(id = s"${CircumstancesIdentification.id}.g2")
| Department-for-Work-and-Pensions/ClaimCapture | c3/app/models/domain/CircumstancesIdentification.scala | Scala | mit | 1,670 |
/******************************************************************************
* Copyright (c) 2014, Equal Experts Ltd
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the Midas Project.
******************************************************************************/
package com.ee.midas.model
trait Watchable[T] {
def update(newWatchable: T)
}
| EqualExperts/Midas | src/main/scala/com/ee/midas/model/Watchable.scala | Scala | bsd-2-clause | 1,810 |
package com.geeksville.gcsapi
import com.geeksville.gcsapi._
import com.ridemission.rest.FileHandler
import android.content.Context
import com.geeksville.rest.AndroidFilesystem
import com.geeksville.andropilot.UsesDirectories
import com.ridemission.rest.JavaFileSystem
/**
* A GCSApi webserver that knows to pull static content from the local filesystem
*/
class AndroidWebserver(val acontext: Context, root: SmallAPI, localonly: Boolean = true) extends Webserver(root, localonly) with UsesDirectories {
// FIXME - we currently assume the cwd is the default of 'posixpilot'
server.addHandler(new FileHandler("/static", new AndroidFilesystem(acontext.getAssets, "webapp/")))
// Allow users to place custom checklists in /sdcard/andropilot/checklist/plane.html or copter.html.
checklistDirectory.foreach { dir =>
server.addHandler(new FileHandler("/static/checklist", new JavaFileSystem(dir)))
}
} | geeksville/arduleader | andropilot/src/main/scala/com/geeksville/gcsapi/AndroidWebserver.scala | Scala | gpl-3.0 | 914 |
package battleship.entity
/**
* TODO: Add comment
*/
object Rules {
val mapSize = 10
// Map of key - decker size to value - count on the map
val ships = Map((1, 4), (2, 3), (3, 2), (4, 1))
}
| overriderb/battleship3000 | src/main/scala/battleship/entity/Rules.scala | Scala | gpl-3.0 | 204 |
package test
class Thing {
def info: Info[this.type] = InfoRepository.getInfo(this)
def info2: Info[this.type] = {
def self: this.type = this
InfoRepository.getInfo(self)
}
}
trait Info[T]
case class InfoImpl[T](thing: T) extends Info[T]
object InfoRepository {
def getInfo(t: Thing): Info[t.type] = InfoImpl(t)
}
| som-snytt/dotty | tests/pos/z1720.scala | Scala | apache-2.0 | 333 |
package scalacookbook.chapter02
/**
* Created by liguodong on 2016/6/18.
*/
object FormatNumAndCurrency extends App{
val pi = scala.math.Pi
println(f"$pi%1.5f")
println(f"$pi%1.2f")
println(f"$pi%06.2f")
//scala2.10之前
println("%06.2f".format(pi))
//A simple way to add commas(逗号) is to use the getIntegerInstance method of the
//java.text.NumberFormat class
val formatter = java.text.NumberFormat.getIntegerInstance
println(formatter.format(10000))
println(formatter.format(1000000))
//从语言和国家构建语言环境
//You can also set a locale with the getIntegerInstance method:
val locale = new java.util.Locale("de", "DE")
println(locale)
val formatter2 = java.text.NumberFormat.getIntegerInstance(locale)
println(formatter2.format(1000000))
//You can handle floating-point values with a formatter returned by getInstance
val formatter3 = java.text.NumberFormat.getInstance
println(formatter3.format(10000.33))
//货币输出
//For currency output, use the getCurrencyInstance formatter
val formatter4 = java.text.NumberFormat.getCurrencyInstance
println(formatter4.format(123.456789))
println(formatter4.format(1234.56789))
println(formatter4.format(12345.6789))
println(formatter4.format(123456.789))
//处理国际货币
//This approach handles international currency
import java.util.{Currency, Locale}
val de = Currency.getInstance(new Locale("de", "DE"))
println(de)
formatter4.setCurrency(de)
println(formatter4.format(123456.789))
}
| liguodongIOT/java-scala-mix-sbt | src/main/scala/scalacookbook/chapter02/FormatNumAndCurrency.scala | Scala | apache-2.0 | 1,549 |
package io.strd.build.sbtssh
import jassh.SSHOptions
import sbt._
/**
* @author chebba
*/
trait SshKeys {
val sshOptions = settingKey[SSHOptions]("")
val sshSession = settingKey[SshSession]("")
}
| strdio/sbt-ssh | src/main/scala/io/strd/build/sbtssh/SshKeys.scala | Scala | apache-2.0 | 206 |
package api
import java.text.SimpleDateFormat
import java.util.{Date, Locale}
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import play.api.i18n.Lang
import play.api.mvc.{Call, RequestHeader}
/*
* Set of general values and methods for the API
*/
object Api {
//////////////////////////////////////////////////////////////////////
// Headers
final val HEADER_CONTENT_TYPE = "Content-Type"
final val HEADER_CONTENT_LANGUAGE = "Content-Language"
final val HEADER_ACCEPT_LANGUAGE = "Accept-Language"
final val HEADER_DATE = "Date"
final val HEADER_LOCATION = "Location"
final val HEADER_API_KEY = "X-Api-Key"
final val HEADER_AUTHORIZATION = "Authorization"
final val HEADER_PAGE = "X-Page"
final val HEADER_PAGE_FROM = "X-Page-From"
final val HEADER_PAGE_SIZE = "X-Page-Size"
final val HEADER_PAGE_TOTAL = "X-Page-Total"
def basicHeaders(implicit lang: Lang) = Seq(
HEADER_DATE -> printHeaderDate(new DateTime()),
HEADER_CONTENT_LANGUAGE -> lang.language
)
def locationHeader(uri: String): (String, String) = HEADER_LOCATION -> uri
def locationHeader(call: Call)(implicit request: RequestHeader): (String, String) = locationHeader(call.absoluteURL())
//////////////////////////////////////////////////////////////////////
// Date and joda.DateTime utils
private final val longDateTimeFormatter = DateTimeFormat.forPattern("E, dd MMM yyyy HH:mm:ss 'GMT'").withLocale(Locale.ENGLISH).withZoneUTC()
def parseHeaderDate(dateStr: String): DateTime = longDateTimeFormatter.parseDateTime(dateStr)
def printHeaderDate(date: DateTime): String = longDateTimeFormatter.print(date)
private final val dateTimeFormatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss")
def parseDateTime(dateStr: String): Date = dateTimeFormatter.parse(dateStr)
def printDateTime(date: Date): String = dateTimeFormatter.format(date)
private final val dateFormatter = new SimpleDateFormat("dd-MM-yyyy")
def parseDate(dateStr: String): Date = dateFormatter.parse(dateStr)
def printDate(date: Date): String = dateFormatter.format(date)
//////////////////////////////////////////////////////////////////////
// Sorting
object Sorting {
final val ASC = false
final val DESC = true
}
}
| ygpark2/play-ain-board | app/api/Api.scala | Scala | bsd-3-clause | 2,263 |
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.sbtheader
sealed trait LicenseStyle
object LicenseStyle {
final case object Detailed extends LicenseStyle
final case object SpdxSyntax extends LicenseStyle
}
| sbt/sbt-header | src/main/scala/de/heikoseeberger/sbtheader/LicenseStyle.scala | Scala | apache-2.0 | 792 |
package smtlib
package common
class Hexadecimal private(val repr: String) {
//should be normalized to upper cases
require(repr.forall(c =>
(c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
))
/*
* Returns the Int value represented by this hexadecimal number.
* Assumes the hexadecimal represents 32 bits, by padding 0 in
* front if necessary. It can return negative numbers.
*/
def toInt: Int = {
val padding = repr.reverse.drop(16)
require(padding.forall(c => c == '0'))
repr.foldLeft(0)((acc, c) => {
acc*16 + c.asDigit//asDigit works for 'A', 'F', ...
})
}
def toBinary: List[Boolean] = {
repr.flatMap{
case '0' => List(false, false, false, false)
case '1' => List(false, false, false, true )
case '2' => List(false, false, true , false)
case '3' => List(false, false, true , true )
case '4' => List(false, true , false, false)
case '5' => List(false, true , false, true )
case '6' => List(false, true , true , false)
case '7' => List(false, true , true , true )
case '8' => List(true , false, false, false)
case '9' => List(true , false, false, true )
case 'A' => List(true , false, true , false)
case 'B' => List(true , false, true , true )
case 'C' => List(true , true , false, false)
case 'D' => List(true , true , false, true )
case 'E' => List(true , true , true , false)
case 'F' => List(true , true , true , true )
}.toList
}
override def toString: String = "#x" + repr
override def equals(that: Any): Boolean = (that != null) && (that match {
case (h: Hexadecimal) => repr == h.repr
case _ => false
})
override def hashCode: Int = repr.hashCode
//TODO: take subpart of hexa (trunc from 32 bits to 8 bits for example)
}
object Hexadecimal {
def fromString(str: String): Option[Hexadecimal] = {
var error = false
val repr = str.map(c => {
if(isDigit(c))
c.toUpper
else {
error = true
c
}
})
if(error) None else Some(new Hexadecimal(repr))
}
/*
* return a 32-bits hexadecimal integer
*/
def fromInt(n: Int): Hexadecimal = {
if(n < 0) {
val res = "00000000".toArray
for(i <- 0 until 8) {
val digit = (n >> (32 - 4*(i+1))) & 15
res(i) = toDigit(digit)
}
fromString(res.mkString).get
} else {
var i = 0
var rest = n
var repr = ""
while(i < 8) {
val end = rest & 15
rest = rest >> 4
repr = toDigit(end) + repr
i += 1
}
fromString(repr).get
}
}
def toDigit(n: Int): Char = {
require(n >= 0 && n < 16)
if(n >= 0 && n < 10) (n + '0').toChar else ('A' + (n - 10)).toChar
}
def isDigit(c: Char): Boolean =
c.isDigit || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')
}
| colder/scala-smtlib | src/main/scala/smtlib/common/Hexadecimal.scala | Scala | mit | 2,877 |
import Utils._
object CROM {
def empty[NT >: Null, RT >: Null, CT >: Null, RST >: Null] = CROM[NT, RT, CT, RST](List.empty, List.empty, List.empty, List.empty, List.empty, Map.empty, Map.empty)
/**
* Little helper factory method for creating a ConstrainModel with Strings only.
*/
def forStrings(
nt: List[String],
rt: List[String],
ct: List[String],
rst: List[String],
fills: List[(String, String)],
parts: Map[String, List[String]],
rel: Map[String, List[String]]
): CROM[String, String, String, String] = CROM(nt, rt, ct, rst, fills, parts, rel)
}
case class CROM[NT >: Null, RT >: Null, CT >: Null, RST >: Null](
nt: List[NT],
rt: List[RT],
ct: List[CT],
rst: List[RST],
fills: List[(NT, RT)],
parts: Map[CT, List[RT]],
rel: Map[RST, List[RT]]
) {
assert(mutualDisjoint(List(nt, rt, ct, rst)))
assert(totalFunction(ct, parts))
assert(totalFunction(rst, rel))
def wellformed: Boolean = axiom1 && axiom2 && axiom3 && axiom4 && axiom5
def axiom1: Boolean =
all(rt.map(r => any(nt.union(ct).map(t => fills.contains((t, r))))))
def axiom2: Boolean =
all(ct.map(c => parts(c).size > 0))
def axiom3: Boolean = all(rt.map(r => (for (c <- ct if parts(c).contains(r)) yield true).size == 1))
def axiom4: Boolean =
all(rst.map(r => rel(r).head != rel(r).tail.head))
def axiom5: Boolean =
all(rst.map(r => any(ct.map(c => rel(r).toSet.subsetOf(parts(c).toSet)))))
} | max-leuthaeuser/ScalaFormalCROM | src/main/scala/CROM.scala | Scala | gpl-3.0 | 2,085 |