repo_name
stringlengths
6
97
path
stringlengths
3
341
text
stringlengths
8
1.02M
ithinkicancode/shapesafe
core/src/test/scala/org/shapesafe/core/ProofSystemTest.scala
package org.shapesafe.core import org.shapesafe.BaseSpec object ProofSystemTest { trait Conj case class Simple(name: String) extends Conj object TestSys extends ProofSystem[Conj] import TestSys._ import Factory._ case class P0() extends Conj object P0 { implicit def axiom: P0 =>> Simple = forAll[P0].=>> { p => Simple(p.getClass.toString) } } case class P1[T <: Conj, M](child: T, meta: M) object P1 { implicit def axiom[S <: Simple, M]: P1[S, M] =>> Simple = forAll[P1[S, M]].=>> { p => Simple(s"${p.getClass.toString} -> ${p.child.name}") } implicit def theorem[ T <: Conj, S <: Simple, M, O <: Conj ]( implicit lemma1: T |-< S, lemma2: P1[S, M] |- O ): P1[T, M] =>> O = forAll[P1[T, M]].=>> { p => lemma2.valueOf( p.copy(lemma1.valueOf(p.child)) ) } } } class ProofSystemTest extends BaseSpec { import ProofSystemTest._ it("can prove P1") { val p1 = P1(P0(), 123) TestSys.forValue(p1).summon } }
ithinkicancode/shapesafe
core/src/test/scala/org/shapesafe/core/shape/LeafShapeSpike.scala
package org.shapesafe.core.shape import org.scalatest.Ignore import org.shapesafe.BaseSpec import org.shapesafe.core.arity.Const import org.shapesafe.graph.commons.util.debug.print_@ import org.shapesafe.graph.commons.util.viz.TypeViz import shapeless.{HNil, Witness} @Ignore class LeafShapeSpike extends BaseSpec { import shapeless.record._ import shapeless.syntax.singleton.mkSingletonOps describe("records") { describe("example") { it("1") { val book = ("author" ->> "<NAME>") :: ("title" ->> "Types and Programming Languages") :: ("id" ->> 262162091) :: ("price" ->> 44.11) :: ("price" ->> 33.11) :: HNil print_@(TypeViz.infer(book).toString) { val rr = book.apply("author") // Note result type ... print_@(rr) print_@(TypeViz.infer(rr).toString) } { val rr = book.apply("price") // Note result type ... print_@(rr) print_@(TypeViz.infer(rr).toString) } { val values = book.values } } it("2") { val book = ("author" ->> "<NAME>") :: "Types and Programming Languages" :: ("id" ->> 262162091) :: ("price" ->> 44.11) :: ("price" ->> 33.11) :: HNil { val rr = book.apply("author") // Note result type ... print_@(rr) print_@(TypeViz.infer(rr).toString) } { // val values = book.values } } it("3") { val field = "id" ->> 262162091 print_@(TypeViz.infer(field).toString) } it("4") { { val fields = (Symbol("id").narrow -> 262162091) :: (Symbol("price").narrow -> 44.11) :: HNil val record = fields.record // print_@(record.price) } { val fields = (Symbol("id") ->> 262162091) :: (Symbol("price") ->> 44.11) :: HNil val record = fields.record print_@(record.price) } // TODO: only works in shapeless 3.x { val fields = ("id" ->> 262162091) :: ("price" ->> 44.11) :: HNil val record = fields.record // print_@(record.price) } } it("5") { val fields = ("id" ->> 262162091) :: ("price" ->> 44.11) :: HNil val keys = fields.keys print_@(TypeViz.infer(keys)) } } def asW_H(v: Witness.Lt[Symbol]*) = {} // it("infer keys") { // // val record = { // ("a" ->> 1) :: // ("b" ->> 2) :: // HNil // } // // def inferKeys[T <: HList](v: T)(implicit keys: shapeless.ops.record.Keys[T]) = keys // // { // val keys = record.keys // works // print(keys) // // inferKeys(record) // works // } // // { // val record2: record.type = record // val keys = record2.keys // works // print(keys) // // inferKeys(record2) // works //// inferKeys[record.type](record2) // compilation error! // } // // VizType[record.type].toString().shouldBe() // } it("zip") { val dims = { (Symbol("x") ->> Const.Literal(3)) :: (Symbol("y") ->> Const.Literal(4)) :: HNil } val x = dims.apply(Symbol("x")) { val fields = dims.fields // print_@(VizType.infer(dims)) // print_@(VizType.infer(fields)) } val values = dims.values val is = Symbol("i") :: Witness(Symbol("j")) :: HNil val reIndexed = is.zip(values).record // print_@(VizType.infer(reIndexed)) } // it("constraint") { // // def // } } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/arity/Var.scala
<filename>core/src/main/scala/org/shapesafe/core/arity/Var.scala package org.shapesafe.core.arity case class Var(runtimeArity: Int) extends LeafArity {} object Var {}
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/debugging/Expressions_Imp0.scala
<reponame>ithinkicancode/shapesafe package org.shapesafe.core.debugging import org.shapesafe.core.debugging.DebugSymbol.{On1, On2} trait Expressions_Imp0 { type Expr[T <: CanPeek] = T#_AsExpr // with T trait :<<-[A, B] trait ><[A, B] // trait Infix[A, S, B] extends (A ~~ Only[S] ~~ B) // trait PrefixW1[S, A] extends (Only[S] ~~ A) // trait PrefixW2[S, A, B] extends (Only[S] ~~ A ~~ B) trait |<<-[A, B] // trait OuterProduct[A, B] use >< instead trait CheckDistinct[A] trait GetSubscript[A, B] trait Reorder[A, B] import singleton.ops.+ trait AppendByName[O] extends On1 { type _AsOpStr = "AppendByName[" + O + "]" } trait SquashByName[O] extends On1 { type _AsOpStr = "SquashByName[" + O + "]" } trait DimensionWise[O] extends On2 { type _AsOpStr = "DimensionWise[" + O + "]" } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/shape/LeafShape.scala
<gh_stars>1-10 package org.shapesafe.core.shape import org.shapesafe.core.arity.Utils.NatAsOp import org.shapesafe.core.arity.{Arity, ArityAPI, Const, LeafArity} import org.shapesafe.core.axis.Axis import org.shapesafe.core.axis.Axis.{->>, :<<-} import org.shapesafe.core.tuple.{CanFromStatic, StaticTuples, TupleSystem} import shapeless.{::, HList, HNil, Nat, Witness} import scala.language.implicitConversions /** * a thin wrapper of HList that has all proofs of constraints included * this saves compiler burden and reduces error */ trait LeafShape extends Shape with LeafShape.Proto { type Record <: HList // name: String -> arity: Arity def record: Record type _Names <: Names val names: _Names type _Dimensions <: Dimensions.Tuple val dimensions: _Dimensions lazy val runtimeShape: List[Axis] = asList // TODO: merge with GetSubscript & becoming a special case // object IndexLookup extends Poly1 { // // implicit def name[S <: String]( // implicit // _selector: Selector[Record, S] { type Out <: Arity } // ) = at[Index.Name[S]] { name => // val core = _selector(record) // core.^ :<<- name.w // } // // implicit def ii[N <: Nat]( // implicit // _at: At[Static, N] { type Out <: Axis } // ): Case[Index.I_th[N]] { // type Result = _at.Out // } = at[Index.I_th[N]] { index => // _at(static) // } // } // type IndexLookup = IndexLookup.type // // object Sub { // // def apply[T <: Index](v: T): Sub1[T] = { // // Sub1(v) // } // // def apply(i: Nat)( // implicit // toIntN: ToInt[i.N] // ): Sub1[Index.I_th[i.N]] = { // // apply(Index.I_th(i)) // } // // def apply(w: Witness.Lt[String]): Sub1[Index.Name[w.T]] = { // // apply(Index.Name(w)) // } // } // // case class Sub1[T <: Index](index: T) { // // def axis( // implicit // byIndex: IndexLookup.Case[T] // ): byIndex.Result = { // byIndex.apply(index) // } // } final override def nodeString: String = this.toString } object LeafShape extends TupleSystem with CanFromStatic { import org.shapesafe.core.shape.ProveShape.Factory._ final type UpperBound = Axis object Proto extends StaticTuples[UpperBound] type Proto = Proto.Tuple final type Tuple = LeafShape // Cartesian product doesn't have eye but whatever class Eye extends Proto.Eye with LeafShape { final type Record = HNil override def record: Record = HNil final override type _Names = Names.Eye final override val names = Names.Eye final override type _Dimensions = Dimensions.Eye final override val dimensions = Dimensions.Eye } override lazy val Eye = new Eye // cartesian product symbol class ><[ TAIL <: Tuple, HEAD <: UpperBound ]( override val tail: TAIL, override val head: HEAD ) extends Proto.><[TAIL, HEAD](tail, head) with LeafShape { final type Field = head.Field final override type Record = Field :: tail.Record override lazy val record: Record = head.asField :: tail.record final override type _Names = Names.><[tail._Names, head.Name] final override val names = tail.names >< head.nameSingleton final override type _Dimensions = Dimensions.><[tail._Dimensions, head._Arity] final override val dimensions = new Dimensions.><(tail.dimensions, head.arity) override type PeekHead = Head } final type ><^[ TAIL <: Tuple, HEAD <: Arity ] = ><[TAIL, ArityAPI.^[HEAD]] trait FromArity extends AbstractFromHList { implicit def namelessInductive[ H_TAIL <: HList, TAIL <: Tuple, C <: Arity ]( implicit forTail: H_TAIL ==> TAIL ): (C :: H_TAIL) ==> (TAIL ><^ C) = { forAll[C :: H_TAIL].==> { v => val prev = apply(v.tail) val vHead = v.head: C val head: ArityAPI.^[C] = vHead.^ val result = prev.^ appendInner head result } } } object FromRecord extends FromArity { implicit def inductive[ H_TAIL <: HList, TAIL <: Tuple, N <: String, // CAUTION: cannot be reduced to w.T! Scala compiler is too dumb to figure it out C <: Arity ]( implicit forTail: H_TAIL ==> TAIL, w: Witness.Aux[N] ): ((N ->> C) :: H_TAIL) ==> (TAIL >< (C :<<- N)) = { forAll[(N ->> C) :: H_TAIL].==> { v => val prev = apply(v.tail) val vHead: C = v.head val head: C :<<- N = vHead.^ :<<- w val result = prev.^ appendInner head result } } } implicit def consAlways[TAIL <: Tuple, HEAD <: UpperBound]: Cons.FromFn2[TAIL, HEAD, TAIL >< HEAD] = { Cons.from[TAIL, HEAD].to { (tail, head) => new ><(tail, head) } } object FromLiterals extends AbstractFromHList { implicit def inductive[ H_TAIL <: HList, TAIL <: Tuple, HEAD <: Int with Singleton ]( implicit forTail: H_TAIL ==> TAIL, w: Witness.Aux[HEAD] ): (HEAD :: H_TAIL) ==> (TAIL ><^ Const.Literal[HEAD]) = { forAll[HEAD :: H_TAIL].==> { v => val prev = forTail(v.tail) val head = Arity(w) // Arity.Impl(Const.Literal(w)) prev.^ appendInner head } } } object FromNats extends AbstractFromHList { implicit def inductive[ H_TAIL <: HList, TAIL <: Tuple, HEAD <: Nat ]( implicit forTail: H_TAIL ==> TAIL, asOp: NatAsOp[HEAD] ) = { forAll[HEAD :: H_TAIL].==> { v => val prev = apply(v.tail) val head = Arity.FromNat(v.head) prev.^ appendInner head } } } implicit def endo[T <: LeafShape]: T =>> T = ProveShape.forAll[T].=>>(identity[T]) }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/shape/unary/UnaryIndexingFn.scala
<reponame>ithinkicancode/shapesafe package org.shapesafe.core.shape.unary import org.shapesafe.core.Poly1Base import org.shapesafe.core.axis.Axis.UB_->> import org.shapesafe.core.axis.NewNameAppender import org.shapesafe.core.shape.{LeafShape, Names, Shape} import org.shapesafe.m.viz.TypeVizCT import shapeless.{::, HList, HNil} trait UnaryIndexingFn extends Poly1Base[HList, HList] { implicit val nil: HNil ==> HNil = forAll[HNil].==> { v => HNil } // TODO: move to a more general 'AndThen' class object ToShape extends Poly1Base[HList, LeafShape] { val outer: UnaryIndexingFn.this.type = UnaryIndexingFn.this implicit def toShape[ I <: HList, O <: HList ]( implicit lemma1: outer.==>[I, O], lemma2: LeafShape.FromRecord.Case[O] ): I ==> lemma2.Out = { forAll[I].==> { i => lemma2.apply(lemma1.apply(i)) } } } } object UnaryIndexingFn { trait Distinct extends UnaryIndexingFn { implicit def consNewName[ TI <: HList, TO <: HList, HI <: UB_->> ]( implicit consTail: TI ==> TO, newName: NewNameAppender.Case[(TO, HI)] ): (HI :: TI) ==> newName.Out = { forAll[HI :: TI].==> { v => val ti = v.tail val to = consTail(ti) newName(to -> v.head) } } } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/shape/unary/Conjecture1.scala
package org.shapesafe.core.shape.unary import org.shapesafe.core.shape.{Shape, ShapeConjecture} trait Conjecture1 extends ShapeConjecture { type SS1 <: Shape } object Conjecture1 { trait ^[S1 <: Shape] extends Conjecture1 { final type SS1 = S1 } // object Refute1 extends ShapeReporters.RefuteReporter[Conjecture1 with CanRefute] { // // override object Step1 extends Poly1Base[Iub, MsgBroker] { // // implicit def evalS1[ // SELF <: Iub, // M1 <: MsgBroker // ]( // implicit // forS1: ShapeReporters.PeekShape.Step1.Case.Aux[SELF#SS1, M1] // ): SELF ==> Aux[Refute1[SELF, M1#Out]] = { // // forAll[SELF].==> { _ => // MsgBroker[Refute1[SELF, M1#Out]] //// MsgBroker[M1#Out] //// MsgBroker[Peek[SELF]] //// MsgBroker[Peek[SELF#SS1]] // } // } // } // } // // implicit def refute1[ // S1 <: Conjecture1 with CanRefute // ]( // implicit // step1: Refute1.Case[S1] // ): S1 |- Shape = { // ??? // } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/arity/Const.scala
<reponame>ithinkicancode/shapesafe package org.shapesafe.core.arity import org.shapesafe.graph.commons.util.IDMixin import org.shapesafe.core.arity.Utils.Op import shapeless.Witness import singleton.ops.{==, Require, ToString} trait Const[S] extends LeafArity with IDMixin { type SS = S def singleton: S override type _AsOpStr = ToString[S] override type _AsExpr = S override lazy val _id: S = singleton def proveSameType[N2]( implicit proof: S =:= N2 ): Unit = {} def proveEqualType[N2]( implicit proof: Require[S == N2] ): Unit = {} // TODO: should be named proofEqual, require should do everything in runtime? def requireEqual(w: Witness.Lt[Int])( implicit proof: Require[S == w.T] ): Unit = { proveEqualType[w.T] require(w.value == runtimeArity) } } object Const { import Witness._ class Derived[OP <: Op, OUT <: Int](override val singleton: OUT) extends Const[OUT] { override lazy val runtimeArity: Int = singleton } object Derived { implicit def summon[S <: Op]( implicit s: S ): Derived[S, s.OutInt] = { new Derived[S, s.OutInt](s.value.asInstanceOf[s.OutInt]) } } // this makes it impossible to construct directly from Int type class Literal[S <: Int](val singleton: S) extends Const[S] { override def runtimeArity: Int = singleton } object Literal { implicit def summon[S <: Int]( implicit w: Witness.Aux[S] ): Literal[S] = { new Literal[S](w.value) } def apply(w: Witness.Lt[Int]): Literal[w.T] = { Literal.summon[w.T](w) } } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/shape/Index.scala
package org.shapesafe.core.shape import org.shapesafe.graph.commons.util.IDMixin import org.shapesafe.graph.commons.util.reflect.format.FormatOvrd.Only import org.shapesafe.core.debugging.CanPeek import org.shapesafe.core.debugging.DebugUtil.StrOrRaw import shapeless.ops.nat.ToInt import shapeless.{Nat, Witness} import singleton.ops.ToString import scala.language.implicitConversions trait Index extends IDMixin with CanPeek { override lazy val toString: String = s"${_id}:${getClass.getSimpleName}" } object Index { trait Name_<:[+KUB] extends Index {} type Str = Name_<:[String] class Name[S <: String](val w: Witness.Aux[S]) extends Name_<:[S] { def name: S = w.value type Name = S override protected def _id = w.value override type _AsOpStr = StrOrRaw[S] override type _AsExpr = S } object Name { def apply(w: Witness.Lt[String]): Name[w.T] = new Name(w) } class I_th[N <: Nat](val index: N, indexInt: Int) extends Name_<:[Nothing] { type Ordinal = N override protected def _id = indexInt // TODO: type string is too long override type _AsOpStr = StrOrRaw[ToString[N]] override type _AsExpr = ToString[N] } object I_th { def apply(i: Nat)( implicit toIntN: ToInt[i.N] ) = new I_th[i.N](i.asInstanceOf[i.N], toIntN.apply()) } }
ithinkicancode/shapesafe
verify/breeze/src/main/scala/org/shapesafe/breeze/tensor/DoubleVector.scala
<filename>verify/breeze/src/main/scala/org/shapesafe/breeze/tensor/DoubleVector.scala package org.shapesafe.breeze.tensor import breeze.linalg.DenseVector import breeze.signal import org.shapesafe.core.arity.Const.Literal import org.shapesafe.core.arity.ProveArity.{|-, |-<} import org.shapesafe.core.arity.nullary.SizeOf import org.shapesafe.core.arity.{Arity, ArityAPI, LeafArity, Var} import org.shapesafe.core.util.Constraint.ElementOfType import shapeless.{HList, ProductArgs, Witness} import scala.language.implicitConversions import scala.util.Random class DoubleVector[A1 <: Arity]( val arity: ArityAPI.^[A1], val data: Vec[Double] // should support sparse/lazy vector ) extends Serializable { import org.shapesafe.core.arity.ops.ArityOps._ // TODO: the format should be customisable override lazy val toString: String = { s"${arity.toString} \u00d7 1: Double" } def reify[O <: LeafArity]( implicit prove: A1 |- O ): DoubleVector[O] = { val evaled = arity.eval new DoubleVector(evaled, data) } def dot_*[A2 <: Arity](that: DoubleVector[A2])( implicit proof: A1 ==! A2 |-< _ ): Double = { val result: Double = this.data.dot(that.data) result } def concat[A2 <: Arity, O <: LeafArity](that: DoubleVector[A2])( implicit lemma: (A1 :+ A2) |- O ): DoubleVector[O] = { // TODO: always successful, can execute lazily without lemma val op = this.arity :+ that.arity val proof = lemma(op) val data = DenseVector.vertcat(this.data.toDenseVector, that.data.toDenseVector) new DoubleVector(proof.value.^, data) } def pad[O <: LeafArity](padding: Witness.Lt[Int])( implicit lemma: (A1 :+ (Literal[padding.T] :* Arity._2._Arity)) |- O ): DoubleVector[O] = { val _padding = Arity(padding) val op = this.arity :+ (_padding :* Arity._2) val proof = lemma(op) val out = proof.value.^ val fill = DenseVector.fill(out.runtimeArity)(0.0) val dOut = DenseVector.vertcat(fill, this.data.toDenseVector, fill) new DoubleVector(out, dOut) } def conv[ A2 <: Arity, O <: LeafArity ]( kernel: DoubleVector[A2], stride: Witness.Lt[Int] )( implicit lemma: ((A1 :- A2 :+ Arity._1._Arity) :/ Literal[stride.T]) |- O ): DoubleVector[O] = { val _stride = Arity(stride) val op = (this.arity :- kernel.arity :+ Arity._1) :/ _stride val proof = lemma(op) val out = proof.value.^ val range = 0.to(this.data.size - kernel.data.size, stride.value) // for (padding = 0.to(that.data.size - this.data.size)) val dOut: DenseVector[Double] = signal.convolve( this.data.toDenseVector, kernel.data.toDenseVector, range ) new DoubleVector(out, dOut) } def conv[ A2 <: Arity, O <: LeafArity ]( kernel: DoubleVector[A2] )( implicit lemma: ((A1 :- A2 :+ Arity._1._Arity) :/ Arity._1._Arity) |- O ): DoubleVector[O] = { conv(kernel, 1) } } object DoubleVector extends ProductArgs { def applyProduct[D <: HList, O <: LeafArity](data: D)( implicit proofOfSize: SizeOf[D] |- O, proofOfType: D ElementOfType Double ): DoubleVector[O] = { val list = data.runtimeList.map { v => v.asInstanceOf[Double] } val size = SizeOf(data) new DoubleVector(proofOfSize.valueOf(size).^, Vec.apply(list.toArray)) } @transient object from { def hList[D <: HList, O <: LeafArity](data: D)( implicit proofOfSize: SizeOf[D] |- O, proofOfType: D ElementOfType Double ): DoubleVector[O] = { applyProduct(data)(proofOfSize, proofOfType) } } def zeros(lit: Witness.Lt[Int]): DoubleVector[Literal[lit.T]] = { new DoubleVector(Arity(lit), DenseVector.fill(lit.value)(0.0)) } def random(lit: Witness.Lt[Int]): DoubleVector[Literal[lit.T]] = { val list = DenseVector.fill(lit.value) { Random.nextDouble() } new DoubleVector(Arity(lit), list) } @transient object unsafe { def zeros(number: Int): DoubleVector[Var] = { new DoubleVector(Var(number).^, DenseVector.fill(number)(0.0)) } } case class Reified[A1 <: LeafArity](self: DoubleVector[A1]) { val arity = self.arity def crossValidate(): Unit = { arity.runtimeTry foreach { n => n == self.data.size } } } implicit def asReified[A1 <: Arity, O <: LeafArity](v: DoubleVector[A1])( implicit prove: A1 |- O ): Reified[O] = { Reified(v.reify) } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/shape/Dimensions.scala
package org.shapesafe.core.shape import org.shapesafe.core.arity.Arity import org.shapesafe.core.tuple.{CanInfix_><, StaticTuples} import scala.language.implicitConversions // should it be "Arities"? object Dimensions extends StaticTuples.Total[Arity] with CanInfix_>< {}
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/arity/VerifiedArity.scala
package org.shapesafe.core.arity /** * always successful, no need to verify */ trait VerifiedArity extends Arity.Verifiable { // final def in: this.type = this } object VerifiedArity { import ProveArity.Factory._ implicit def endo[T <: VerifiedArity]: T =>> T = ProveArity.forAll[T].=>>(identity[T]) }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/tuple/CanFromStatic.scala
package org.shapesafe.core.tuple import shapeless.{::, HList} trait CanFromStatic extends CanCons { _self: TupleSystem => object FromStatic extends AbstractFromHList { implicit def inductive[ H_TAIL <: HList, TAIL <: Tuple, HEAD <: UpperBound ]( implicit forTail: H_TAIL ==> TAIL, cons: Cons[TAIL, HEAD] ): (HEAD :: H_TAIL) ==> cons.ConsResult = { forAll[HEAD :: H_TAIL].==> { v => val prev = apply(v.tail) cons(prev, v.head) } } } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/ProofScope.scala
<gh_stars>1-10 package org.shapesafe.core import scala.annotation.implicitNotFound import scala.language.implicitConversions /** * If Poly1 works smoothly there will be no point in defining it, too bad the assumed compiler bug made it necessary * * @tparam OUB upper bound of output */ trait ProofScope { // TODO: no IUB? type OUB val root: ProofSystem[OUB] type Consequent = root.Term // TODO: this should potentially be merged with Refutation cases in MsgBroker // Such that successful proof can show reasoning at runtime. // At this moment, this feature is implemented in PeekReporter // which is too complex for its own good type Proof[-I, +P <: Consequent] <: root.Proof[I, P] /** * entailment, logical implication used only in existential proof summoning */ // TODO: how to override it in subclasses? @implicitNotFound( "[NO PROOF]\n${I}\n |-\n??? <: ${O}\n" ) type |-<[-I, O <: OUB] = Proof[I, root.Term.Lt[O]] @implicitNotFound( "[NO PROOF]\n${I}\n |-\n${O}\n" ) type |-[-I, O <: OUB] = Proof[I, root.Term.Aux[O]] def forAll[I]: root.Factory[I] def satisfying[OB <: OUB] = new Satisfying[OB]() class Satisfying[OB <: OUB]() { case class If[I](v: I) { implicit def findProof[O <: OB]( implicit prove: I |- O ): I |- O = prove implicit def canProve_^^[O <: OB]( implicit prove: I |- O ): root.Term.Aux[O] = prove.apply(v) implicit def canProve[O <: OB]( implicit prove: I |- O ): O = { canProve_^^(prove).value } } } } object ProofScope { case class ChildScope[O](root: ProofSystem[O]) extends ProofScope { type OUB = O trait Proof[-I, +P <: Consequent] extends root.Proof[I, P] override def forAll[I]: Factory[I] = new Factory[I] {} object Factory { trait =>>^^[-I, +P <: Consequent] extends Proof[I, P] with root.Factory.=>>^^[I, P] trait =>>[-I, O <: OUB] extends =>>^^[I, root.Term.^[O]] with root.Factory.=>>[I, O] } trait Factory[I] extends root.Factory[I] { import Factory._ override def =>>^^[P <: Consequent](_fn: I => P) = new (I =>>^^ P) { override def apply(v: I): P = _fn(v) } override def =>>[O <: OUB](_fn: I => O): I =>> O = new (I =>> O) { // override def valueOf(v: I): O = fn(v) override def apply(v: I): root.Term.^[O] = root.Term.^[O](_fn(v)) } } } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/shape/args/ApplyLiterals.scala
<gh_stars>1-10 package org.shapesafe.core.shape.args import org.shapesafe.core.shape.ShapeAPI.^ import org.shapesafe.core.shape.{LeafShape, Names} import shapeless.ops.hlist.Reverse import shapeless.{HList, SingletonProductArgs} trait ApplyLiterals extends SingletonProductArgs with ApplyArgs { // TODO: should the reverse be justified? def applyProduct[H1 <: HList, H2 <: HList]( v: H1 )( implicit reverse: Reverse.Aux[H1, H2], lemma: fromHList.Case[H2] ): Result[lemma.Out] = { val out = lemma.apply(v.reverse) toResult(out) } } object ApplyLiterals { trait ToNames extends ApplyLiterals { type OUB = Names override val fromHList: Names.FromLiterals.type = Names.FromLiterals override type Result[T <: OUB] = T override def toResult[T <: OUB](v: T): T = v } trait ToShape extends ApplyLiterals { type OUB = LeafShape override val fromHList: LeafShape.FromLiterals.type = LeafShape.FromLiterals override type Result[T <: OUB] = ^[T] override def toResult[T <: OUB](v: T) = v.^ } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/tuple/CanCons.scala
package org.shapesafe.core.tuple trait CanCons { _self: TupleSystem => // TODO:too much boilerplate, switch to ~~> Proof pattern or Poly1/Poly2? trait Cons[-TAIL <: Tuple, -HEAD <: UpperBound] { type ConsResult <: Tuple def apply(tail: TAIL, head: HEAD): ConsResult } object Cons { def from[TAIL <: Tuple, HEAD <: UpperBound] = new Factory[TAIL, HEAD] class Factory[TAIL <: Tuple, HEAD <: UpperBound] { def to[O <: Tuple](fn: (TAIL, HEAD) => O) = new FromFn2[TAIL, HEAD, O](fn) } case class FromFn2[-TAIL <: Tuple, -HEAD <: UpperBound, O <: Tuple]( fn: (TAIL, HEAD) => O ) extends Cons[TAIL, HEAD] { final type ConsResult = O final override def apply(tail: TAIL, head: HEAD): ConsResult = fn(tail, head) } def summonFor[TAIL <: Tuple, HEAD <: UpperBound](tail: TAIL, head: HEAD)( implicit ev: Cons[TAIL, HEAD] ): ev.type = ev def apply[TAIL <: Tuple, HEAD <: UpperBound](tail: TAIL, head: HEAD)( implicit ev: Cons[TAIL, HEAD] ): ev.ConsResult = { ev.apply(tail, head) } } }
ithinkicancode/shapesafe
core/src/test/scala/org/shapesafe/core/Spike.scala
<reponame>ithinkicancode/shapesafe package org.shapesafe.core import org.scalatest.Ignore import org.shapesafe.BaseSpec import shapeless.Witness @Ignore class Spike extends BaseSpec {} object Spike { def adhocW = Witness("a") val singletonW = Witness("a") }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/shape/Shape.scala
<filename>core/src/main/scala/org/shapesafe/core/shape/Shape.scala package org.shapesafe.core.shape import org.shapesafe.graph.commons.util.{TreeFormat, TreeLike} import org.shapesafe.core.debugging.CanPeek import org.shapesafe.core.shape.LeafShape.Eye import org.shapesafe.core.shape.args.{ApplyLiterals, ApplyNats} import scala.language.implicitConversions trait Shape extends TreeLike with CanPeek { override lazy val treeFormat: TreeFormat = TreeFormat.Indent2Minimal } object Shape extends ApplyLiterals.ToShape with ShapeAPI { def box[T <: Shape](self: T): ShapeAPI.^[T] = ShapeAPI.^(self) implicit class Converters[S <: Shape](self: S) { def ^ : ShapeAPI.^[S] = ShapeAPI.^(self) } object Nats extends ApplyNats.ToShape {} object Literals extends ApplyLiterals.ToShape {} override type _Shape = LeafShape.Eye override def shape: Eye = LeafShape.Eye }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/arity/Unchecked.scala
<reponame>ithinkicancode/shapesafe<filename>core/src/main/scala/org/shapesafe/core/arity/Unchecked.scala<gh_stars>1-10 package org.shapesafe.core.arity trait Unchecked extends LeafArity {} case object Unchecked extends Unchecked { override def runtimeArity: Int = throw new UnsupportedOperationException("<no runtime value>") }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/tuple/TupleLambdaSystem.scala
//package org.shapesafe.core.tuple // ///** // * TODO: Not sure if this thing is ever going to be useful, maybe HType1 => HList => mapped => HType2 is sufficient // * // * @tparam UBI Upper bound type of input system // * @tparam UBO Upper bound type of output system // */ //trait TupleLambdaSystem[UBI, UBO] { // // val sysI: StaticTuples[UBI] // val sysO: StaticTuples[UBO] // // // Implementations of HTypedSystem MUST declare type classes for this trait // trait UnitLambda[ // -I <: UBI, // +O <: UBO // ] { // // def apply(in: I): O // } // // trait ~~>[-IN <: sysI.Impl, +OUT <: sysO.Impl] { // // def apply(in: IN): OUT // } // // object ~~> { // // implicit def eye2eye: sysI.Eye ~~> sysO.Eye = { _ => // sysO.Eye // } // // implicit def inductive[ // TAIL_I <: sysI.Impl, // HEAD_I <: UBI, // TAIL_O <: sysO.Impl, // HEAD_O <: UBO // ]( // implicit // forTail: TAIL_I ~~> TAIL_O, // forHead: HEAD_I UnitLambda HEAD_O // ): sysI.><[TAIL_I, HEAD_I] ~~> sysO.><[TAIL_O, HEAD_O] = { v => // val tailI = v.tail // val headI = v.head // // new sysO.><(forTail(tailI), forHead(headI)) // } // } //}
ithinkicancode/shapesafe
core/src/test/scala/org/shapesafe/core/shape/LeafShapeSpec.scala
<reponame>ithinkicancode/shapesafe package org.shapesafe.core.shape import org.shapesafe.BaseSpec import org.shapesafe.core.arity.Arity import org.shapesafe.core.shape.LeafShape.{FromRecord, FromStatic} import shapeless.HNil class LeafShapeSpec extends BaseSpec { import shapeless.record._ describe("create") { it("named") { val shape = Shape >|< Arity(2) :<<- "x" append Arity(3) :<<- "y" typeInferShort(shape.shape).shouldBe( """ |LeafShape.Eye >< (Const.Literal[Int(2)] :<<- String("x")) >< (Const.Literal[Int(3)] :<<- String("y")) |""".stripMargin ) // VizType.infer(shape.static).toString.shouldBe() assert(shape.record.apply("x").runtimeArity == 2) } it("nameless") { val shape = Shape >|< Arity(2) append Arity(3) typeInferShort(shape.shape).shouldBe( """ |LeafShape.Eye >< ArityAPI.^[Const.Literal[Int(2)]] >< ArityAPI.^[Const.Literal[Int(3)]] |""".stripMargin ) } it("mixed") { val shape = Shape >|< Arity(2) :<<- "x" >|< Arity(3) append Arity(4) :<<- "z" assert(shape.record.apply("x").runtimeArity == 2) assert(shape.record.apply("z").runtimeArity == 4) typeInferShort(shape.shape).shouldBe( """ |LeafShape.Eye >< (Const.Literal[Int(2)] :<<- String("x")) >< ArityAPI.^[Const.Literal[Int(3)]] >< (Const.Literal[Int(4)] :<<- String("z")) |""".stripMargin ) } } it("toString") { val shape = Shape >|< Arity(2) :<<- "x" >|< Arity(3) append Arity(4) :<<- "z" shape.toString.shouldBe( """ |➊ >< | 2:Literal :<<- x >< | 3:Literal >< | 4:Literal :<<- z |""".stripMargin ) } describe(FromStatic.getClass.getSimpleName) { it("from HNil") { val hh = HNil val shape = LeafShape.FromStatic(hh) assert(shape == LeafShape.Eye) } it("1") { val hh = (Arity(3) :<<- "x") :: HNil val shape = LeafShape.FromStatic(hh) // VizType.infer(shape).toString.shouldBe() assert(shape.static == hh) } it("2") { val hh = (Arity(3) :<<- "x") :: (Arity(4) :<<- "y") :: HNil val shape = LeafShape.FromStatic(hh) // VizType.infer(shape).toString.shouldBe() assert(shape.static == hh) assert(shape.static.head.nameless == Arity(3)) } } describe(FromRecord.getClass.getSimpleName) { import shapeless.syntax.singleton.mkSingletonOps it("from HNil") { val hh = HNil val shape = LeafShape.FromRecord(hh) assert(shape == LeafShape.Eye) } it("1") { val hh = ("x" ->> Arity(3).arity) :: HNil val shape = LeafShape.FromRecord(hh) // VizType.infer(shape).toString.shouldBe() assert(shape.dimensions.static == hh) } it("2") { val hh = ("x" ->> Arity(3).arity) :: ("y" ->> Arity(4).arity) :: HNil val shape = LeafShape.FromRecord(hh) // VizType.infer(shape).toString.shouldBe() assert(shape.dimensions.static == hh) assert(shape.static.head.nameless == Arity(3)) } } describe(Shape.Literals.getClass.getSimpleName) { it("1") { val ss = Shape.Literals(4) ss.dimensions.static.head.requireEqual(4) ss.dimensions.static.last.requireEqual(4) val nn = (ss |<<- (Names >< "i")).eval nn.toString.shouldBe( """ |➊ >< | 4:Literal :<<- i |""".stripMargin ) } it("2") { val ss = Shape.Literals(4, 3, 2) ss.dimensions.static.head.requireEqual(2) ss.dimensions.static.last.requireEqual(4) val nn = (ss |<<- (Names >< "i" >< "j" >< "k")).eval nn.toString.shouldBe( """ |➊ >< | 4:Literal :<<- i >< | 3:Literal :<<- j >< | 2:Literal :<<- k |""".stripMargin ) } } describe(Shape.Nats.getClass.getSimpleName) { it("1") { val ss = Shape.Nats(4) ss.dimensions.static.head.requireEqual(4) ss.dimensions.static.last.requireEqual(4) val nn = (ss |<<- (Names >< "i")).eval nn.toString.shouldBe( """ |➊ >< | 4:Derived :<<- i |""".stripMargin ) } it("2") { val ss = Shape.Nats(4, 3, 2) ss.dimensions.static.head.requireEqual(2) ss.dimensions.static.last.requireEqual(4) val nn = (ss |<<- (Names >< "i" >< "j" >< "k")).eval nn.toString.shouldBe( """ |➊ >< | 4:Derived :<<- i >< | 3:Derived :<<- j >< | 2:Derived :<<- k |""".stripMargin ) } // it("3") { // val ss = (Shape.Literals(4, 3, 2) |<<- (Names >< "i" >< "j" >< "k")).eval // // ss.dimensions.static.head.core.requireEqual(2) // ss.dimensions.static.last.core.requireEqual(4) // } } describe("index") { it("1") { val shape = Shape >|< Arity(2) :<<- "x" val record = shape.record // VizType.infer(record).treeString.shouldBe() typeInferShort(record.keys).shouldBe( """ |String("x") :: HNil""".stripMargin ) typeInferShort(record.values).shouldBe( """ |Const.Literal[Int(2)] :: HNil""".stripMargin ) assert(record.get("x") == Arity(2).arity) } it("2") { val shape = Shape >|< Arity(2) :<<- "x" >|< Arity(3) :<<- "y" val record = shape.record // VizType.infer(static).treeString.shouldBe() typeInferShort(record.keys).shouldBe( """ |String("y") :: String("x") :: HNil""".stripMargin ) typeInferShort(record.values).shouldBe( """ |Const.Literal[Int(3)] :: Const.Literal[Int(2)] :: HNil |""".stripMargin ) assert(record.get("x").^.nameless == Arity(2)) } } describe("record") { it("1") { val shape = Shape >|< Arity(2) :<<- "x" val record = shape.record // VizType.infer(record).treeString.shouldBe() typeInferShort(record.keys).shouldBe( """ |String("x") :: HNil""".stripMargin ) typeInferShort(record.values).shouldBe( """ |Const.Literal[Int(2)] :: HNil""".stripMargin ) assert(record.get("x") == Arity(2).arity) } it("2") { val shape = Shape >|< Arity(2) :<<- "x" >|< Arity(3) :<<- "y" val record = shape.record // VizType.infer(static).treeString.shouldBe() typeInferShort(record.keys).shouldBe( """ |String("y") :: String("x") :: HNil""".stripMargin ) typeInferShort(record.values).shouldBe( """ |Const.Literal[Int(3)] :: Const.Literal[Int(2)] :: HNil |""".stripMargin ) assert(record.get("x") == Arity(2).arity) } } describe("names") { val shape = Shape >|< Arity(2) :<<- "x" append Arity(3) :<<- "y" it("1") { val nn = shape.names assert(nn.head == "y") } } describe("values") { val shape = Shape >|< Arity(2) :<<- "x" >|< Arity(3) :<<- "y" it("1") { val vv = shape.dimensions assert(vv.head == Arity(3).arity) } } describe("peek & interrupt") { it("Eye") { shouldNotCompile( """Shape.interrupt""", """.*(➊).*""" ) } it("1") { val s = Shape(1, 2) shouldNotCompile( """s.interrupt""", """.*(1 >< 2).*""" ) } it("2") { val s = Shape(1, 2).|<<-*("a", "b").eval shouldNotCompile( """s.interrupt""", """.*(\Q1 :<<- a >< (2 :<<- b)\E).*""" ) } } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/tuple/StaticTuples.scala
<reponame>ithinkicancode/shapesafe<filename>core/src/main/scala/org/shapesafe/core/tuple/StaticTuples.scala<gh_stars>1-10 package org.shapesafe.core.tuple import org.shapesafe.graph.commons.util.{IDMixin, TextBlock} import org.shapesafe.core.debugging.OpStrs.OpStr import org.shapesafe.core.debugging.Expressions.Expr import org.shapesafe.core.debugging.{CanPeek, Expressions} import org.shapesafe.core.util.RecordView import shapeless.{::, HList, HNil, Witness} import singleton.ops.+ import scala.language.implicitConversions trait StaticTuples[UB] extends TupleSystem with CanFromStatic { import StaticTuples._ final type UpperBound = UB trait Tuple extends IDMixin with CanPeek { // TODO: rename to `Tuple` type Static <: HList def static: Static lazy val staticView: RecordView[Static] = RecordView(static) def asList: List[UB] override protected def _id: Any = asList final type Cons[HH <: UB] = StaticTuples.this.><[this.type, UB] type _ConsExpr[PEEK <: CanPeek] } class Eye extends Tuple { override type Static = HNil override def static: HNil = HNil override def asList: List[UB] = Nil override lazy val toString: _AsOpStr = EYE.value final override type _AsOpStr = EYE.T final override type _ConsExpr[PEEK <: CanPeek] = Expr[PEEK] final override type _AsExpr = EYE.T } override lazy val Eye = new Eye // cartesian product symbol class ><[ TAIL <: Tuple, HEAD <: UB ]( val tail: TAIL, val head: HEAD ) extends Tuple { // in scala 3 these will be gone type Tail = TAIL type Head = HEAD override type Static = HEAD :: tail.Static override def static: Static = head :: tail.static override def asList: List[UB] = tail.asList ++ Seq(head) // override lazy val toString = s"${tail.toString} >< $head" override lazy val toString: String = { s"""${tail.toString} >< |${TextBlock(head.toString).indent(" ").build} | """.stripMargin.trim } type PeekHead <: CanPeek final override type _AsOpStr = OpStr[TAIL] + " >< " + OpStr[PeekHead] final override type _ConsExpr[PEEK <: CanPeek] = Expressions.><[Expr[this.type], Expr[PEEK]] // final override type _Expr = Expr.><[Expr[TAIL], Expr[PeekHead]] final override type _AsExpr = TAIL#_ConsExpr[PeekHead] } } object StaticTuples { val EYE = Witness("➊") // implicit def toEyeOps(s: TupleSystem[_]): s.Impl.InfixOps[s.Eye] = new s.Impl.InfixOps(s.Eye) trait Total[UB] extends StaticTuples[UB] { implicit def consAlways[TAIL <: Tuple, HEAD <: UB]: Cons.FromFn2[TAIL, HEAD, TAIL >< HEAD] = { Cons.from[TAIL, HEAD].to { (tail, head) => new ><(tail, head) } } } object W { final val eye = Witness("Eye") final val >< = Witness(" >< ") } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/arity/ArityAPI.scala
<filename>core/src/main/scala/org/shapesafe/core/arity/ArityAPI.scala package org.shapesafe.core.arity import org.shapesafe.core.arity.ArityReporters.{InterruptArity, PeekArity} import org.shapesafe.core.arity.ProveArity.|- import org.shapesafe.core.arity.ops.ArityOpsLike import org.shapesafe.core.axis.{Axis, NoName, NoNameW} import org.shapesafe.core.debugging.OpStrs.OpStr import shapeless.Witness import shapeless.Witness.Aux import scala.language.implicitConversions trait ArityAPI extends ArityOpsLike with Axis { override type _Axis >: this.type <: ArityAPI final override def toString: String = arity.toString def verify[ O <: Arity ]( implicit prove: _Arity |- O ): ArityAPI.^[O] = prove.apply(arity).value.^ def eval[ O <: LeafArity ]( implicit prove: _Arity |- O ): ArityAPI.^[O] = verify(prove) def peek( implicit reporter: PeekArity.Case[_Arity] ): this.type = this def interrupt( implicit reporter: InterruptArity.Case[_Arity] ): this.type = this def reason[ O <: LeafArity ]( implicit reporter: ArityReporters.PeekArity.Case[_Arity], prove: _Arity |- O ): ArityAPI.^[O] = eval(prove) final override val nameSingleton: Aux[NoName] = NoNameW } object ArityAPI { type Aux[A <: Arity] = ArityAPI { type _Arity = A } final case class ^[A <: Arity](arity: A) extends ArityAPI { override type _Arity = A type _Axis = ^[A] type _AsOpStr = OpStr[A] type _AsExpr = A#_AsExpr } implicit def unbox[A <: Arity](v: Aux[A]): A = v.arity // implicit def unbox[T <: ArityAPI](v: T): v._Arity = v._arity // TODO: why is it not effective? // implicit def box[T <: Arity](v: T): ^[T] = ArityAPI.^(v) // TODO: remove, type parameter is arbitrary implicit def fromIntS[T <: Int with Singleton](v: T)( implicit toW: Witness.Aux[T] ): ArityAPI.^[Const.Literal[T]] = { Arity(toW) } }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/arity/Utils.scala
<gh_stars>1-10 package org.shapesafe.core.arity import shapeless.Nat import singleton.ops.ToInt import singleton.ops.impl.{std, OpId, OpMacro} import singleton.twoface.impl.TwoFaceAny object Utils { type Op = singleton.ops.impl.Op // TODO: enable after this has been fixed: // https://github.com/fthomas/singleton-ops/issues/148 // type Op = singleton.ops.impl.OpInt[_] type NatAsOp[N <: Nat] = ToInt[N] type RequireLike[S <: Op] = OpMacro[OpId.Require, S, _, _] // implicit def const[ // N1, // N2, // Fr[_, _] // ]( // implicit // major: Consts[N1, N2], // minor: Fr[N1, N2] with Op // ): Consts[N1, N2]#Op2Impl[Fr] = { // // new major.Op2Impl[Fr]() // } type IntSh[??[_, _] <: Op] = TwoFaceAny.Int.Shell2[??, Int, std.Int, Int, std.Int] type BoolSh[??[_, _] <: Op] = TwoFaceAny.Boolean.Shell2[??, Int, std.Int, Int, std.Int] }
ithinkicancode/shapesafe
core/src/main/scala/org/shapesafe/core/shape/binary/Conjecture2.scala
package org.shapesafe.core.shape.binary import org.shapesafe.core.shape.{Shape, ShapeConjecture} trait Conjecture2 extends ShapeConjecture { type SS1 <: Shape type SS2 <: Shape } object Conjecture2 { trait ^[S1 <: Shape, S2 <: Shape] extends Conjecture2 { final override type SS1 = S1 final override type SS2 = S2 } // object Refute2 extends ShapeReporters.RefuteReporter[Conjecture2 with CanRefute] { // // override object Step1 extends Poly1Base[Iub, MsgBroker] { // // implicit def evalS2[ // SELF <: Iub, // M1 <: MsgBroker, // M2 <: MsgBroker // ]( // implicit // forS1: ShapeReporters.PeekShape.Step1.Case.Aux[SELF#SS1, M1], // forS2: ShapeReporters.PeekShape.Step1.Case.Aux[SELF#SS1, M2] // ): SELF ==> Aux[Refute2[SELF, M1#Out, M2#Out]] = { // // forAll[SELF].==> { _ => // MsgBroker[Refute2[SELF, M1#Out, M2#Out]] // // MsgBroker[M1#Out] // // MsgBroker[Peek[SELF]] // // MsgBroker[Peek[SELF#SS1]] // } // } // } // } // TODO: disabled for causing slow performance. Need a good bypass // implicit def refute2[ // S1 <: Conjecture2 // ]( // implicit // step1: Refute2.Case[S1] // ): S1 |- S1 = { // ??? // } }
ithinkicancode/shapesafe
macro/src/main/scala/org/shapesafe/m/viz/TypeVizCT.scala
<gh_stars>1-10 package org.shapesafe.m.viz import org.shapesafe.graph.commons.util.reflect.format.{Formats, TypeFormat} import org.shapesafe.graph.commons.util.viz.TypeVizFormat import scala.language.experimental.macros import scala.reflect.macros.whitebox case object TypeVizCT extends VizCTSystem { override def format: TypeVizFormat = TypeFormat.Default override def useTree: Boolean = true implicit def infoOf[I]: InfoOf[I] = macro VizCTSystem.Macros.infoOf[I, this.type] // case object Stub extends VizCTSystem { // // override def vizFormat: TypeVizFormat = Formats.TypeInfo.Short // override def useTree: Boolean = false // // implicit def infoOf[I]: InfoOf[I] = macro VizCTSystem.Macros.infoOf[I, this.type] // } }
ithinkicancode/shapesafe
core/src/test/scala/org/shapesafe/core/shape/NamesSpec.scala
<filename>core/src/test/scala/org/shapesafe/core/shape/NamesSpec.scala package org.shapesafe.core.shape import org.shapesafe.graph.commons.util.viz.TypeViz import org.shapesafe.BaseSpec import org.shapesafe.core.shape.Index.Name import org.shapesafe.core.shape.Indices.Infix import shapeless.{HNil, Witness} class NamesSpec extends BaseSpec { import shapeless.syntax.singleton._ val names = Names >< "x" >< "y" >< "z" val hList = "z".narrow :: "y".narrow :: "x".narrow :: HNil describe("create") { it("1") { require(names.static == hList) val t1 = TypeViz.infer(names.static) val t2 = TypeViz.infer(hList) t1.===!(t2) } it("2") { val n2 = Names("x", "y", "z") val t1 = TypeViz.infer(names) val t2 = TypeViz.infer(n2) t1.===!(t2) } it("3") { import Names.Syntax._ val n2 = "x" >< "y" >< "z" val t1 = TypeViz.infer(names) val t2 = TypeViz.infer(n2) t1.===!(t2) } } it("cons") { // shouldNotCompile( // TODO : enable this after // """implicitly[Names.Cons[Names.Eye, String]]""" // ) val w = Witness("a") val hh = implicitly[Names.Cons[Names.Eye, w.T]] hh.apply(Names.Eye, w.value) .toString .shouldBe( """ |➊ >< | a |""".stripMargin ) } it("FromLiterals") { val names2 = Names.FromLiterals(hList) // TODO: runtime assertion? val t1 = TypeViz.infer(names) val t2 = TypeViz.infer(names2) t1.===!(t2) } it("as Indices") { TypeViz[Names.Eye.AsIndices].should_=:=(TypeViz[Indices.Eye]) val ii = Indices >< Name("x") >< Name("y") >< Name("z") TypeViz.infer(names.asIndices).===!(TypeViz.infer(ii)) type P = String <:< Int // implicitly[Names.Eye <:< Indices.Eye] // //// val namesT = WideTyped(names) // val namesT = WideTyped(Names >< "x") // // implicitly[namesT.Wide <:< Indices.Impl] // //// val indicesT = WideTyped(Indices >< Name("x") >< Name("y") >< Name("z")) // val indicesT = WideTyped(Indices >< Name("x")) // // VizType[indicesT.Wide].shouldBe() // // implicitly[namesT.Wide <:< indicesT.Wide] } }
ithinkicancode/shapesafe
core/src/test/scala/org/shapesafe/core/arity/binary/Require2Spec.scala
<reponame>ithinkicancode/shapesafe package org.shapesafe.core.arity.binary import org.shapesafe.core.arity.ops.ArityOpsLike.RequireEqual import org.shapesafe.core.arity.{Arity, ArityFixture, Unchecked} import scala.language.existentials class Require2Spec extends ArityFixture { describe("can prove") { describe("Arity.Const ==") { it("a") { val op = RequireEqual.on(a, a).^ op.eval.requireEqual(3) } it("a + b") { val op = RequireEqual.on(a :+ b, ab).^ op.eval.requireEqual(7) } it("a + b + c") { val op = RequireEqual.on(a :+ b :+ c, abc).^ op.eval.requireEqual(12) } } describe("Op2 ==") { it("a + b + c") { val op = RequireEqual.on(a :+ b :+ c, ab :+ c).^ op.eval.requireEqual(12) } } describe("Arity.Unchecked ==") { it("a") { val op = RequireEqual.on(Unchecked.^, a).^ val out = op.eval out.requireEqual(3) // op.asProof.out.core.requireEqual(3) } it("a + b") { val sum = a :+ b val op = RequireEqual.on(sum, Unchecked.^).^ val out = op.eval out.requireEqual(7) } } } describe("CANNOT prove") { describe("Unprovable == ?") { it("1") { val op = RequireEqual.on(Arity.Unprovable, a).^ shouldNotCompile( "op.eval", // ".*(Arity.Unprovable.type != 3)"// TODO: doesn't work until fallback mechanism is implemented ".*" ) } it("2") { val op = RequireEqual.on(Arity.Unprovable, a :+ b).^ shouldNotCompile( "op.eval", // ".*(Arity.Unprovable.type != 7)" // TODO: doesn't work until fallback mechanism is implemented ".*" ) } } describe("a == b if not") { it("1") { val op = RequireEqual.on(a, c).^ shouldNotCompile( "op.eval", """.*(3 != 5).*""" ) } it("2") { val op = RequireEqual.on(a, b :+ c).^ shouldNotCompile( "op.eval", """.*(3 != 9).*""" ) } } } }
ithinkicancode/shapesafe
core/src/test/scala/org/shapesafe/core/ProofSystemSpike1.scala
<reponame>ithinkicancode/shapesafe<gh_stars>1-10 //package org.shapesafe.core // //import org.shapesafe.BaseSpec // //class ProofSystemSpike1 extends BaseSpec {} // //object ProofSystemSpike1 { // // trait Boolean // trait True extends Boolean // // object Sys extends ProofSystem[Boolean] // import Sys._ // // trait NOT[-K] // implicit def !![S]: NOT[NOT[S]] =>> S = ??? // // trait Number // // trait Rational extends Number // object Two extends Rational // type Two = Two.type // // trait Irrational extends Number with NOT[Rational] // object SqrtTwo extends Irrational // type SqrtTwo = SqrtTwo.type // // trait ^^[A, B] // trait **[A, B] // // implicit def commutative[A, B]: (A ** B) =>> (B ** A) = ??? // // implicit def means[A]: (A ^^ Two) =>> (A ** A) = ??? // // implicit def chain[A, B, C]: ((A ^^ B) ^^ C) =>> (A ^^ (B ** C)) = ??? // implicit def tt: (SqrtTwo ** SqrtTwo) =>> Two = ??? // // implicit def r1[A, B, C]: ((A ** B) =:= (A ** C)) =>> B =:= C = ??? // implicit def r2[A, B, C]: ((A ^^ B) =:= (A ^^ C)) =>> B =:= C = ??? // // type V = (SqrtTwo ^^ SqrtTwo) ^^ SqrtTwo // // // becomes // type VV = (SqrtTwo ^^ Two) // // implicitly[(SqrtTwo ^^ SqrtTwo) ^^ SqrtTwo ~~> Number]( // from[SqrtTwo ^^ SqrtTwo ^^ SqrtTwo].=>> { v => // val c = chain[SqrtTwo, SqrtTwo, SqrtTwo] // c.valueOf(v) // } // ) // // //........... // //........... // // //target //// implicitly[(SqrtTwo ^^ SqrtTwo) ~~> Rational] // //}
LemurPwned/EMB_datastreaming
spark-scala/build.sbt
<filename>spark-scala/build.sbt name := "CassandraInteg" version := "1.0" scalaVersion := "2.11.8" resolvers += "Spark Packages Repo" at "https://dl.bintray.com/spark-packages/maven" libraryDependencies ++= Seq( "org.apache.spark" %% "spark-sql" % "2.3.1", "datastax" % "spark-cassandra-connector" % "2.3.1-s_2.11", "com.github.servicenow.stl4j" % "stl-decomp-4j" % "1.0.3", "org.apache.commons" % "commons-math3" % "3.2", "org.apache.kafka" %% "kafka" % "1.1.1", "org.apache.kafka" % "kafka-clients" % "1.1.1", "org.apache.avro" % "avro" % "1.8.2", "io.confluent" % "kafka-avro-serializer" % "3.2.1") mergeStrategy in assembly := { case PathList("META-INF", xs @ _*) => MergeStrategy.discard case x => MergeStrategy.first }
LemurPwned/EMB_datastreaming
spark-scala/AnomalyData.scala
<reponame>LemurPwned/EMB_datastreaming package anomalyStruct class AnomalyData(tStart: String, tStop: String, per: Int, obs: Int){ val timestampStart = tStart val timestampStop = tStop val period = per val observations = obs override def toString = { s"Start: $timestampStart, Stop: $timestampStop, period: $period, observations: $observations" } }
LemurPwned/EMB_datastreaming
spark-scala/spark_int.scala
package cassandra_job import org.apache.commons.math3.distribution.TDistribution import org.apache.spark.sql.types.DoubleType import org.apache.spark.sql.functions._ import org.apache.spark.rdd.RDD import org.apache.spark.sql.SparkSession import org.apache.spark.sql.DataFrame import com.datastax.spark.connector._ import org.apache.spark.sql.cassandra._ import com.datastax.spark.connector.cql.CassandraConnector import scala.collection.mutable.ListBuffer import com.github.servicenow.ds.stats.stl.SeasonalTrendLoess import java.sql.Timestamp import java.util.Properties import java.util.Collection import scala.collection.JavaConverters._ import org.apache.kafka.common.serialization.StringDeserializer import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.errors.TimeoutException import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer} import org.apache.log4j.{LogManager, Level} import org.apache.avro.io.{Decoder, DecoderFactory, DatumReader} import org.apache.avro.specific.SpecificDatumReader import org.apache.avro.generic.GenericRecord import org.apache.avro.Schema import scala.io.Source import anomalyStruct.AnomalyData object CassandraInteg { // logger val log = LogManager.getRootLogger val schemaStr = Source.fromFile("cassandra_schema.avsc").mkString val schema = new Schema.Parser().parse(schemaStr) def main(args: Array[String]): Unit = { val spark = SparkSession.builder().appName("CassandraInteg") .config("spark.cassandra.connection.host", "localhost") .config("spart.cassandra.connection.port", "9042") .getOrCreate(); spark.sparkContext.setLogLevel("ERROR") log.setLevel(Level.WARN) val connector = CassandraConnector(spark.sparkContext.getConf) prepareDatabase(connector) kafkaConsumer(spark) //val tm = spark.read.format("csv").option("header", "true").load("emb.csv") //runADJob(spark, tm, 21814. 30, 10000) spark.stop() } def runADJob(spark: SparkSession, dataframe: DataFrame, numObs: Int, period: Int, anomalies: Int): Unit ={ log.warn("Running an anomaly detection job") val tmSeries1 = dataframe.select("timestamp", "v1") val tmSeries = tmSeries1.withColumn("v1", tmSeries1("v1").cast(DoubleType)).na.fill(0.0, Seq("v1")) val anomaliesData = anomalyDetection(tmSeries, numObs, period, anomalies) log.warn("Saving anomaly data to Cassandra...") if (anomaliesData.size != 0){ val collection = spark.sparkContext.parallelize(anomaliesData) collection.saveToCassandra("anomal", "anomaly_data", SomeColumns("timestamp", "anomaly")) } log.warn("Finished Anomaly Detection Job...") } def prepareDatabase(connector: CassandraConnector):Unit ={ log.warn("Prepraing anomaly database...") connector.withSessionDo(session => session.execute(""" CREATE KEYSPACE IF NOT EXISTS anomal WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': 1}; """ )) connector.withSessionDo(session => session.execute("DROP TABLE IF EXISTS anomal.anomaly_data")) connector.withSessionDo(session => session.execute(""" CREATE TABLE IF NOT EXISTS anomal.anomaly_data( timestamp timestamp, anomaly float, PRIMARY KEY (timestamp)); """ )) // register the view for spark } def kafkaConsumer(spark: SparkSession): Unit = { val properties = new Properties() properties.put("bootstrap.servers", "localhost:9092") properties.put("group.id", "consumer") properties.put("key.deserializer", classOf[StringDeserializer]) properties.put("value.deserializer", classOf[ByteArrayDeserializer]) properties.put("auto.commit.interval.ms","8000") val kafkaConsumer = new KafkaConsumer[String, Array[Byte]](properties) kafkaConsumer.subscribe(Seq("spark_emb").asJava) log.warn("Boostrapping kafka consumption...") while (true) { val results = kafkaConsumer.poll(2000).asScala for (record <- results){ try{ val data = parseData(record.value()) val df = spark.read.format("org.apache.spark.sql.cassandra") .options(Map("table"->"emb_data", "keyspace"->"emb")) .load() .filter(s"timestamp >= '${data.timestampStart}' AND" +s" timestamp <= '${data.timestampStop}'") log.warn(data.toString) log.warn(df.rdd.isEmpty) if (!df.rdd.isEmpty()){ log.warn("Processing for: " + data.toString) val anomalUpperBound = 10 // Hardcoded, idk what it should be runADJob(spark, df, data.observations, data.period, anomalUpperBound) } kafkaConsumer.commitSync } catch { case timeOutEx: TimeoutException => println("Timeout") } } } } def parseData(msg: Array[Byte]): AnomalyData = { val reader = new SpecificDatumReader[GenericRecord](schema) val decoder = DecoderFactory.get().binaryDecoder(msg, null) val data = reader.read(null, decoder) new AnomalyData(data.get("timestamp_start").toString, data.get("timestamp_stop").toString, data.get("period").asInstanceOf[Int], data.get("observations").asInstanceOf[Int]) } /** * Performs Anomaly Detection on a timeseries * @dataset: Rdd of timeseries * @numberObs: number of observations i.e. length of Rdd * @numberObsPerPeriod: number observations in a given period, used during stl * @anomalyUpperBound: maximum number of expected anomalies, must be less * than 49% of observations * @significanceLevel: significance level for statistical test * */ def anomalyDetection(dataset: DataFrame, numberObs: Int, numberObsPerPeriod: Int, anomalyUpperBound: Int, significanceLevel: Double = 0.05): List[(Timestamp, Double)] = { var anomalyList = new ListBuffer[(Timestamp, Double)]() // at least 2 periods are needed if (numberObs < numberObsPerPeriod*2) throw new IllegalStateException("2 periods needed." + "Insufficient number of observations") val data = dataset.select("v1").rdd.map(r=>r(0)).collect().map(_.asInstanceOf[Double]) val dataTm = dataset.select("timestamp").rdd.map(r=>r(0)).collect().map(_.asInstanceOf[Timestamp]) println(data.size, dataTm.size, numberObs) assert(data.size == dataTm.size) assert(data.size == numberObs) val stlBuilder = new SeasonalTrendLoess.Builder val stlParam = stlBuilder .setPeriodLength(numberObsPerPeriod) .setPeriodic() .setRobust() .buildSmoother(data) val stl = stlParam.decompose() val seasonal = stl.getSeasonal() val trend = stl.getTrend() //remove seasonal component from data and median to create univariate remainder val med = dataset.stat.approxQuantile("v1", Array(0.5), 0.001)(0) // get median var univariateComponent = data.zip(seasonal).map(x => x._1 - x._2 - med) val smoothedSeasonal = data.zip(trend).map(x => x._1 + x._2) //smoothedSeasonal.foreach(println) var numAnom = 0 for (i <- 1 to anomalyUpperBound){ val med = median(univariateComponent) var residual = univariateComponent.map(x => Math.abs(x - med)) val mdev = medianAbsoluteDeviation(univariateComponent)*1.4826 if (mdev != 0){ residual = residual.map(x => x/mdev) val maxRes = residual.reduceLeft(_ max _) val idMax = residual.indexOf(maxRes) // mistake here -- changing size of an array var insertTuple = (dataTm(idMax), data(idMax)) anomalyList += insertTuple // remove the anomaly from the dataset univariateComponent = univariateComponent.take(idMax) ++ univariateComponent.drop(idMax+1) val pVal = 1 - significanceLevel/(2*(numberObs+1-i)) // get t-student distribution and compare against anomalyLevel val t = new TDistribution(numberObs-1-i).inverseCumulativeProbability(pVal) val thres = t*(numberObs-i)/Math.sqrt((numberObs-i-1+Math.pow(t, 2))*(numberObs-i+1)) if (maxRes > thres) numAnom = i } } log.warn("Finished detecting anomalies") log.warn(s"DETECTED ANOMALIES $numAnom") anomalyList.toList } def median(values: Array[Double]): Double = { val (low, up) = values.sortWith(_ < _).splitAt(values.size/2) if (values.size %2 == 0) (low.last + up.head)/2 else up.head } def medianAbsoluteDeviation(dataBuffer: Array[Double]): Double = { val t_median = median(dataBuffer) val absoluteMedian = dataBuffer.map(x => Math.abs(x - t_median)) median(absoluteMedian) } }
bjfletcher/playframework
framework/src/play-server/src/main/scala/play/core/server/common/ServerResultUtils.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.core.server.common import akka.stream.Materializer import akka.stream.scaladsl.Sink import akka.util.ByteString import play.api.mvc._ import play.api.http.{ Status, HttpEntity, HttpProtocol } import play.api.http.HeaderNames._ object ServerResultUtils { /** * Determine whether the connection should be closed, and what header, if any, should be added to the response. */ def determineConnectionHeader(request: RequestHeader, result: Result): ConnectionHeader = { if (request.version == HttpProtocol.HTTP_1_1) { if (result.header.headers.get(CONNECTION).exists(_.equalsIgnoreCase(CLOSE))) { // Close connection, header already exists DefaultClose } else if ((result.body.isInstanceOf[HttpEntity.Streamed] && result.body.contentLength.isEmpty) || request.headers.get(CONNECTION).exists(_.equalsIgnoreCase(CLOSE))) { // We need to close the connection and set the header SendClose } else { DefaultKeepAlive } } else { if (result.header.headers.get(CONNECTION).exists(_.equalsIgnoreCase(CLOSE))) { DefaultClose } else if ((result.body.isInstanceOf[HttpEntity.Streamed] && result.body.contentLength.isEmpty) || request.headers.get(CONNECTION).forall(!_.equalsIgnoreCase(KEEP_ALIVE))) { DefaultClose } else { SendKeepAlive } } } /** * Validate the result. * * Returns the validated result, which may be an error result if validation failed. */ def validateResult(request: RequestHeader, result: Result)(implicit mat: Materializer): Result = { if (request.version == HttpProtocol.HTTP_1_0 && result.body.isInstanceOf[HttpEntity.Chunked]) { cancelEntity(result.body) Results.Status(Status.HTTP_VERSION_NOT_SUPPORTED) .apply("The response to this request is chunked and hence requires HTTP 1.1 to be sent, but this is a HTTP 1.0 request.") .withHeaders(CONNECTION -> CLOSE) } else if (!mayHaveEntity(result.header.status) && !result.body.isKnownEmpty) { cancelEntity(result.body) result.copy(body = HttpEntity.Strict(ByteString.empty, result.body.contentType)) } else { result } } private def mayHaveEntity(status: Int) = status != Status.NO_CONTENT && status != Status.NOT_MODIFIED /** * Cancel the entity. * * While theoretically, an Akka streams Source is not supposed to hold resources, in practice, this is very often not * the case, for example, the response from an Akka HTTP client may have an associated Source that must be consumed * (or cancelled) before the associated connection can be returned to the connection pool. */ def cancelEntity(entity: HttpEntity)(implicit mat: Materializer) = { entity match { case HttpEntity.Chunked(chunks, _) => chunks.runWith(Sink.cancelled) case HttpEntity.Streamed(data, _, _) => data.runWith(Sink.cancelled) case _ => } } /** * The connection header logic to use for the result. */ sealed trait ConnectionHeader { def willClose: Boolean def header: Option[String] } /** * A `Connection: keep-alive` header should be sent. Used to * force an HTTP 1.0 connection to remain open. */ case object SendKeepAlive extends ConnectionHeader { override def willClose = false override def header = Some(KEEP_ALIVE) } /** * A `Connection: close` header should be sent. Used to * force an HTTP 1.1 connection to close. */ case object SendClose extends ConnectionHeader { override def willClose = true override def header = Some(CLOSE) } /** * No `Connection` header should be sent. Used on an HTTP 1.0 * connection where the default behavior is to close the connection, * or when the response already has a Connection: close header. */ case object DefaultClose extends ConnectionHeader { override def willClose = true override def header = None } /** * No `Connection` header should be sent. Used on an HTTP 1.1 * connection where the default behavior is to keep the connection * open. */ case object DefaultKeepAlive extends ConnectionHeader { override def willClose = false override def header = None } // Values for the Connection header private val KEEP_ALIVE = "keep-alive" private val CLOSE = "close" /** * Update the result's Set-Cookie header so that it removes any Flash cookies we received * in the incoming request. */ def cleanFlashCookie(requestHeader: RequestHeader, result: Result): Result = { val optResultFlashCookies: Option[_] = result.header.headers.get(SET_COOKIE).flatMap { setCookieValue: String => Cookies.decodeSetCookieHeader(setCookieValue).find(_.name == Flash.COOKIE_NAME) } if (optResultFlashCookies.isDefined) { // We're already setting a flash cookie in the result, just pass that // through unchanged result } else { val requestFlash: Flash = requestHeader.flash if (requestFlash.isEmpty) { // Neither incoming nor outgoing flash cookies; nothing to do result } else { // We got incoming flash cookies, but there are no outgoing flash cookies, // so we need to clear the cookies for the next request result.discardingCookies(Flash.discard) } } } /** * Given a map of headers, split it into a sequence of individual headers. * Most headers map into a single pair in the new sequence. The exception is * the `Set-Cookie` header which we split into a pair for each cookie it * contains. This allows us to work around issues with clients that can't * handle combined headers. (Also RFC6265 says multiple headers shouldn't * be folded together, which Play's API unfortunately does.) */ def splitSetCookieHeaders(headers: Map[String, String]): Iterable[(String, String)] = { if (headers.contains(SET_COOKIE)) { // Rewrite the headers with Set-Cookie split into separate headers headers.to[Seq].flatMap { case (SET_COOKIE, value) => val cookieParts = Cookies.SetCookieHeaderSeparatorRegex.split(value) cookieParts.map { cookiePart => SET_COOKIE -> cookiePart } case (name, value) => Seq((name, value)) } } else { // No Set-Cookie header so we can just use the headers as they are headers } } }
bjfletcher/playframework
framework/src/play-akka-http-server/src/main/scala/play/core/server/akkahttp/ModelConversion.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.core.server.akkahttp import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers._ import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.util.ByteString import java.net.InetSocketAddress import play.api.Logger import play.api.http.{ HttpEntity => PlayHttpEntity, HttpChunk } import play.api.http.HeaderNames._ import play.api.libs.iteratee._ import play.api.mvc._ import play.core.server.common.{ ConnectionInfo, ForwardedHeaderHandler, ServerResultUtils } import scala.collection.immutable /** * Conversions between Akka's and Play's HTTP model objects. */ private[akkahttp] class ModelConversion(forwardedHeaderHandler: ForwardedHeaderHandler) { private val logger = Logger(getClass) /** * Convert an Akka `HttpRequest` to a `RequestHeader` and an `Enumerator` * for its body. */ def convertRequest( requestId: Long, remoteAddress: InetSocketAddress, secureProtocol: Boolean, request: HttpRequest)(implicit fm: Materializer): (RequestHeader, Option[Source[ByteString, Any]]) = { ( convertRequestHeader(requestId, remoteAddress, secureProtocol, request), convertRequestBody(request) ) } /** * Convert an Akka `HttpRequest` to a `RequestHeader`. */ private def convertRequestHeader( requestId: Long, remoteAddress: InetSocketAddress, secureProtocol: Boolean, request: HttpRequest): RequestHeader = { val remoteHostAddress = remoteAddress.getAddress.getHostAddress // Taken from PlayDefaultUpstreamHander // Avoid clash between method arg and RequestHeader field val remoteAddressArg = remoteAddress new RequestHeader { override val id = requestId // Send a tag so our tests can tell which kind of server we're using. // We could get NettyServer to send a similar tag, but for the moment // let's not, just in case it slows NettyServer down a bit. override val tags = Map("HTTP_SERVER" -> "akka-http") override def uri = request.uri.toString override def path = request.uri.path.toString override def method = request.method.name override def version = request.protocol.value override def queryString = request.uri.query().toMultiMap override val headers = convertRequestHeaders(request) private lazy val remoteConnection: ConnectionInfo = { forwardedHeaderHandler.remoteConnection(remoteAddressArg.getAddress, secureProtocol, headers) } override def remoteAddress = remoteConnection.address.getHostAddress override def secure = remoteConnection.secure } } /** * Convert the request headers of an Akka `HttpRequest` to a Play * `Headers` object. */ private def convertRequestHeaders(request: HttpRequest): Headers = { val entityHeaders: Seq[(String, String)] = request.entity match { case HttpEntity.Strict(contentType, _) => Seq((CONTENT_TYPE, contentType.value)) case HttpEntity.Default(contentType, contentLength, _) => Seq((CONTENT_TYPE, contentType.value), (CONTENT_LENGTH, contentLength.toString)) case HttpEntity.Chunked(contentType, _) => Seq((CONTENT_TYPE, contentType.value)) } val normalHeaders: Seq[(String, String)] = request.headers.map((rh: HttpHeader) => (rh.name, rh.value)) new Headers(entityHeaders ++ normalHeaders) } /** * Convert an Akka `HttpRequest` to an `Enumerator` of the request body. */ private def convertRequestBody( request: HttpRequest)(implicit fm: Materializer): Option[Source[ByteString, Any]] = { request.entity match { case HttpEntity.Strict(_, data) if data.isEmpty => None case HttpEntity.Strict(_, data) => Some(Source.single(data)) case HttpEntity.Default(_, 0, _) => None case HttpEntity.Default(contentType, contentLength, pubr) => // FIXME: should do something with the content-length? Some(pubr) case HttpEntity.Chunked(contentType, chunks) => // FIXME: do something with trailing headers? Some(chunks.takeWhile(!_.isLastChunk).map(_.data())) } } /** * Convert a Play `Result` object into an Akka `HttpResponse` object. */ def convertResult( requestHeaders: RequestHeader, unvalidated: Result, protocol: HttpProtocol)(implicit mat: Materializer): HttpResponse = { val result = ServerResultUtils.validateResult(requestHeaders, unvalidated) val convertedHeaders: AkkaHttpHeaders = convertResponseHeaders(result.header.headers) val entity = convertResultBody(requestHeaders, convertedHeaders, result, protocol) val connectionHeader = ServerResultUtils.determineConnectionHeader(requestHeaders, result) val closeHeader = connectionHeader.header.map(Connection(_)) HttpResponse( status = result.header.status, headers = convertedHeaders.misc ++ closeHeader, entity = entity, protocol = protocol ) } def convertResultBody( requestHeaders: RequestHeader, convertedHeaders: AkkaHttpHeaders, result: Result, protocol: HttpProtocol): ResponseEntity = { val contentType = result.body.contentType.fold(ContentTypes.NoContentType: ContentType) { ct => HttpHeader.parse(CONTENT_TYPE, ct) match { case HttpHeader.ParsingResult.Ok(`Content-Type`(akkaCt), _) => akkaCt case _ => ContentTypes.NoContentType } } result.body match { case PlayHttpEntity.Strict(data, _) => HttpEntity.Strict(contentType, data) case PlayHttpEntity.Streamed(data, Some(contentLength), _) => HttpEntity.Default(contentType, contentLength, data) case PlayHttpEntity.Streamed(data, _, _) => HttpEntity.CloseDelimited(contentType, data) case PlayHttpEntity.Chunked(data, _) => val akkaChunks = data.map { case HttpChunk.Chunk(chunk) => HttpEntity.Chunk(chunk) case HttpChunk.LastChunk(trailers) if trailers.headers.isEmpty => HttpEntity.LastChunk case HttpChunk.LastChunk(trailers) => HttpEntity.LastChunk(trailer = convertHeaders(trailers.headers)) } HttpEntity.Chunked(contentType, akkaChunks) } } private def convertHeaders(headers: Iterable[(String, String)]): immutable.Seq[HttpHeader] = { headers.map { case (name, value) => HttpHeader.parse(name, value) match { case HttpHeader.ParsingResult.Ok(header, errors /* errors are ignored if Ok */ ) => header case HttpHeader.ParsingResult.Error(error) => sys.error(s"Error parsing header: $error") } }.to[immutable.Seq] } /** * A representation of Akka HTTP headers separate from an `HTTPMessage`. * Akka HTTP treats some headers specially and these are split out into * separate values. * * @param misc General headers. Guaranteed not to contain any of the special * headers stored in the other values. */ case class AkkaHttpHeaders( misc: immutable.Seq[HttpHeader], transferEncoding: Option[immutable.Seq[TransferEncoding]]) /** * Convert Play response headers into `HttpHeader` objects, then separate * out any special headers. */ private def convertResponseHeaders( playHeaders: Map[String, String]): AkkaHttpHeaders = { val rawHeaders: Iterable[(String, String)] = ServerResultUtils.splitSetCookieHeaders(playHeaders) val convertedHeaders: Seq[HttpHeader] = convertHeaders(rawHeaders) val emptyHeaders = AkkaHttpHeaders(immutable.Seq.empty, None) convertedHeaders.foldLeft(emptyHeaders) { case (accum, te: `Transfer-Encoding`) => accum.copy(transferEncoding = Some(te.encodings)) case (accum, miscHeader) => accum.copy(misc = accum.misc :+ miscHeader) } } }
bjfletcher/playframework
framework/src/sbt-plugin/src/sbt-test/play-sbt-plugin/distribution/app/controllers/Application.scala
<reponame>bjfletcher/playframework /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package controllers import play.api._ import play.api.mvc._ import play.api.Play.current import scala.collection.JavaConverters._ class Application extends Controller { def index = Action { Ok(views.html.index("Your new application is ready.")) } def config = Action { Ok(Play.configuration.underlying.getString("some.config")) } def count = Action { val num = Play.classloader.getResources("application.conf").asScala.toSeq.size Ok(num.toString) } }
bjfletcher/playframework
framework/src/play-ws/src/main/scala/play/api/libs/ws/ning/NingWS.scala
<reponame>bjfletcher/playframework /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.ws.ning import akka.stream.Materializer import org.asynchttpclient._ import play.api._ import play.api.inject.ApplicationLifecycle import play.api.libs.ws._ import play.api.libs.ws.ahc.{ AhcWSClientConfigParser, AhcWSAPI, AhcWSClient, AhcWSRequest } import play.api.libs.ws.ssl._ /** * A WS client backed by a Ning AsyncHttpClient. * * If you need to debug Ning, set logger.com.ning.http.client=DEBUG in your application.conf file. * * @param config a client configuration object */ @deprecated("Use AhcWSClient instead", "2.5") case class NingWSClient(config: AsyncHttpClientConfig)(implicit materializer: Materializer) extends WSClient { private val ahcWsClient = AhcWSClient(config) def underlying[T]: T = ahcWsClient.underlying private[libs] def executeRequest[T](request: Request, handler: AsyncHandler[T]): ListenableFuture[T] = ahcWsClient.executeRequest(request, handler) def close(): Unit = ahcWsClient.close() def url(url: String): WSRequest = AhcWSRequest(ahcWsClient, url, "GET", EmptyBody, Map(), Map(), None, None, None, None, None, None, None) } @deprecated("Use AhcWSClient instead", "2.5") object NingWSClient { /** * Convenient factory method that uses a [[WSClientConfig]] value for configuration instead of an [[https://asynchttpclient.github.io/async-http-client/apidocs/com/ning/http/client/AsyncHttpClientConfig.html org.asynchttpclient.AsyncHttpClientConfig]]. * * Typical usage: * * {{{ * val client = NingWSClient() * val request = client.url(someUrl).get() * request.foreach { response => * doSomething(response) * client.close() * } * }}} * * @param config configuration settings */ def apply(config: NingWSClientConfig = NingWSClientConfig())(implicit materializer: Materializer): NingWSClient = { val client = new NingWSClient(new NingAsyncHttpClientConfigBuilder(config).build()) new SystemConfiguration().configure(config.wsClientConfig) client } } /** * Ning WS API implementation components. */ @deprecated("Use AhcWSClient instead", "2.5") trait NingWSComponents { def environment: Environment def configuration: Configuration def applicationLifecycle: ApplicationLifecycle def materializer: Materializer lazy val wsClientConfig: WSClientConfig = new WSConfigParser(configuration, environment).parse() private lazy val ahcWsClientConfig = new AhcWSClientConfigParser(wsClientConfig, configuration, environment).parse() lazy val ningWsClientConfig: NingWSClientConfig = NingWSClientConfig( wsClientConfig = wsClientConfig, maxConnectionsPerHost = ahcWsClientConfig.maxConnectionsPerHost, maxConnectionsTotal = ahcWsClientConfig.maxConnectionsTotal, maxConnectionLifetime = ahcWsClientConfig.maxConnectionLifetime, idleConnectionInPoolTimeout = ahcWsClientConfig.idleConnectionInPoolTimeout, maxNumberOfRedirects = ahcWsClientConfig.maxNumberOfRedirects, maxRequestRetry = ahcWsClientConfig.maxRequestRetry, disableUrlEncoding = ahcWsClientConfig.disableUrlEncoding, keepAlive = ahcWsClientConfig.keepAlive ) lazy val wsApi: WSAPI = new AhcWSAPI(environment, ahcWsClientConfig, applicationLifecycle)(materializer) lazy val wsClient: WSClient = wsApi.client }
bjfletcher/playframework
framework/src/sbt-fork-run-plugin/src/main/scala/play/sbt/forkrun/PlayForkProcess.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.sbt.forkrun import sbt._ import java.io.File import java.lang.{ Process => JProcess, ProcessBuilder => JProcessBuilder, Runtime => JRuntime } import java.util.concurrent.CountDownLatch import scala.concurrent.duration.FiniteDuration case class PlayForkOptions( workingDirectory: File, jvmOptions: Seq[String], classpath: Seq[File], baseDirectory: File, configKey: String, logLevel: Level.Value, logSbtEvents: Boolean, shutdownTimeout: FiniteDuration) /** * This differs from sbt's fork run mainly in the way that the process is stopped. * * When the (background job) thread is interrupted, or the sbt process exits while the fork * is still running, then the process is stopped by closing its input. * This is checked for by the fork run process and allows a graceful shutdown, rather than * forcibly terminating the process with `destroy`. */ object PlayForkProcess { def apply(options: PlayForkOptions, args: Seq[String], log: Logger): Unit = { val logProperties = Seq("-Dfork.run.log.level=" + options.logLevel.toString, "-Dfork.run.log.events=" + options.logSbtEvents) val jvmOptions = options.jvmOptions ++ logProperties val arguments = Seq(options.baseDirectory.getAbsolutePath, options.configKey) ++ args run(options.workingDirectory, jvmOptions, options.classpath, "play.forkrun.ForkRun", arguments, log, options.shutdownTimeout) } def run(workingDirectory: File, jvmOptions: Seq[String], classpath: Seq[File], mainClass: String, arguments: Seq[String], log: Logger, shutdownTimeout: FiniteDuration): Unit = { val java = (file(sys.props("java.home")) / "bin" / "java").absolutePath val (classpathEnv, options) = makeOptions(jvmOptions, classpath, mainClass, arguments) val command = (java +: options).toArray val builder = new JProcessBuilder(command: _*) builder.directory(workingDirectory) for (cp <- classpathEnv) builder.environment.put("CLASSPATH", cp) val process = builder.start() val stopLatch = new CountDownLatch(1) val inputThread = spawn { stopLatch.await(); process.getOutputStream.close() } val outputThread = spawn { BasicIO.processFully(logLine(log, Level.Info))(process.getInputStream) } val errorThread = spawn { BasicIO.processFully(logLine(log, Level.Error))(process.getErrorStream) } def stop(): Unit = { // counting down triggers closing stdinput stopLatch.countDown() // wait a bit for clean exit timedWaitFor(process, shutdownTimeout.toMillis) match { case None => log.info("Forked Play process did not exit on its own, terminating it") // fire-and-forget sigterm, may or may not work process.destroy() case Some(x) => log.info(s"Forked Play process exited with status: $x") } // now join our logging threads (process is supposed to be gone, so nothing to log) try process.getInputStream.close() catch { case _: Exception => } try process.getErrorStream.close() catch { case _: Exception => } outputThread.join() errorThread.join() } val shutdownHook = newThread { stop() } JRuntime.getRuntime.addShutdownHook(shutdownHook) try process.waitFor() catch { case _: InterruptedException => stop() } try JRuntime.getRuntime.removeShutdownHook(shutdownHook) catch { case _: IllegalStateException => } // thrown when already shutting down } def timedWaitFor(process: JProcess, millis: Long): Option[Int] = try { // exitValue throws if process hasn't exited Some(process.exitValue()) } catch { case _: IllegalThreadStateException => Thread.sleep(100) if (millis > 0) timedWaitFor(process, millis - 100) else None } def makeOptions(jvmOptions: Seq[String], classpath: Seq[File], mainClass: String, arguments: Seq[String]): (Option[String], Seq[String]) = { val classpathOption = Path.makeString(classpath) val options = jvmOptions ++ Seq("-classpath", classpathOption, mainClass) ++ arguments // if the options get too long for Windows, put the classpath in an environment variable if (optionsTooLong(options)) { val otherOptions = jvmOptions ++ Seq(mainClass) ++ arguments (Option(classpathOption), otherOptions) } else { (None, options) } } val isWindows: Boolean = sys.props("os.name").toLowerCase(java.util.Locale.ENGLISH).contains("windows") val MaxOptionsLength = 5000 def optionsTooLong(options: Seq[String]): Boolean = isWindows && (options.mkString(" ").length > MaxOptionsLength) val ansiCode = "(?:\\033\\[[0-9;]+m)?" val LogLine = s"^${ansiCode}\\[${ansiCode}([a-z]+)${ansiCode}\\] (.*)".r // detect log level from output lines and re-log at the same level def logLine(logger: Logger, defaultLevel: Level.Value): String => Unit = (line: String) => line match { case LogLine(level, message) => logger.log(Level(level).getOrElse(defaultLevel), message) case message => logger.log(defaultLevel, message) } def spawn(f: => Unit): Thread = { val thread = newThread(f) thread.start() thread } def newThread(f: => Unit): Thread = new Thread(new Runnable { def run(): Unit = f }) }
bjfletcher/playframework
framework/src/play-json/src/test/scala/play/api/libs/json/JsonExtensionSpec.scala
<filename>framework/src/play-json/src/test/scala/play/api/libs/json/JsonExtensionSpec.scala<gh_stars>0 /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.json import org.specs2.mutable._ import play.api.libs.json._ import play.api.libs.json.Json._ case class User(age: Int, name: String) case class Dog(name: String, master: User) case class Cat(name: String) case class RecUser(name: String, cat: Option[Cat] = None, hobbies: List[String] = List(), friends: List[RecUser] = List()) case class User1(name: String, friend: Option[User1] = None) case class UserMap(name: String, friends: Map[String, UserMap] = Map()) case class Toto(name: String) case class Toto2(name: Option[String]) case class Toto3(name: List[Double]) case class Toto4(name: Set[Long]) case class Toto5(name: Map[String, Int]) case class Toto6(name: Seq[Dog]) case class UserFail(name: String, bd: Toto) case class Id[A](id: A) case class C1[A](id: Id[A], name: String) case class X( _1: String, _2: String, _3: String, _4: String, _5: String, _6: String, _7: String, _8: String, _9: String, _10: String, _11: String, _12: String, _13: String, _14: String, _15: String, _16: String, _17: String, _18: String, _19: String, _20: String, _21: String, _22: String) case class Program(id: Long, name: String, logoPath: Option[String], logoThumb: Option[String]) object Program { def programs = List.empty[Program] } case class Person(name: String, age: Int) object Person { implicit val personReads: Reads[Person] = Json.reads[Person] implicit val personWrites: OWrites[Person] = Json.writes[Person] } package foreign { case class Foreigner(name: String) } object ForeignTest { implicit val foreignerReads: Reads[foreign.Foreigner] = Json.reads[foreign.Foreigner] implicit val foreignerWrites: OWrites[foreign.Foreigner] = Json.writes[foreign.Foreigner] } import play.api.libs.json._ case class Person2(names: List[String]) case class GenericCaseClass[A](obj: A) case class GenericCaseClass2[A, B](obj1: A, obj2: B) case class WrappedGenericInt(int: GenericCaseClass[Int]) case class WrappedGenericIntString(intString: GenericCaseClass2[Int, String]) case class VarArgsOnly(ints: Int*) case class LastVarArg(name: String, ints: Int*) object Person2 { implicit val person2Fmt: OFormat[Person2] = Json.format[Person2] } case class CustomApply(a: Int, b: String) object CustomApply { def apply(): CustomApply = apply(10, "foo") } object JsonExtensionSpec extends Specification { "JsonExtension" should { "create a reads[User]" in { import play.api.libs.json.Json //object User {def apply(age:Int):User = User(age,"")} implicit val userReads = Json.reads[User] Json.fromJson[User](Json.obj("name" -> "toto", "age" -> 45)) must beEqualTo(JsSuccess(User(45, "toto"))) } "create a writes[User]" in { import play.api.libs.json.Json implicit val userWrites = Json.writes[User] Json.toJson(User(45, "toto")) must beEqualTo(Json.obj("name" -> "toto", "age" -> 45)) } "create a format[User]" in { import play.api.libs.json.Json implicit val userFormat = Json.format[User] Json.fromJson[User](Json.obj("name" -> "toto", "age" -> 45)) must beEqualTo(JsSuccess(User(45, "toto"))) Json.toJson(User(45, "toto")) must beEqualTo(Json.obj("name" -> "toto", "age" -> 45)) } "create a reads[Dog]" in { import play.api.libs.json.Json implicit val userReads = Json.reads[User] implicit val dogReads = Json.reads[Dog] Json.fromJson[Dog]( Json.obj( "name" -> "medor", "master" -> Json.obj("name" -> "toto", "age" -> 45) ) ) must beEqualTo(JsSuccess(Dog("medor", User(45, "toto")))) } "create a writes[Dog]" in { import play.api.libs.json.Json implicit val userWrites = Json.writes[User] implicit val dogWrites = Json.writes[Dog] Json.toJson(Dog("medor", User(45, "toto"))) must beEqualTo( Json.obj( "name" -> "medor", "master" -> Json.obj("name" -> "toto", "age" -> 45) ) ) } "create a format[Dog]" in { import play.api.libs.json.Json implicit val userFormat = Json.format[User] implicit val dogFormat = Json.format[Dog] Json.fromJson[Dog]( Json.obj( "name" -> "medor", "master" -> Json.obj("name" -> "toto", "age" -> 45) ) ) must beEqualTo(JsSuccess(Dog("medor", User(45, "toto")))) Json.toJson(Dog("medor", User(45, "toto"))) must beEqualTo( Json.obj( "name" -> "medor", "master" -> Json.obj("name" -> "toto", "age" -> 45) ) ) } "create a reads[RecUser]" in { import play.api.libs.json.Json implicit val catReads = Json.reads[Cat] implicit val recUserReads = Json.reads[RecUser] Json.fromJson[RecUser]( Json.obj( "name" -> "bob", "cat" -> Json.obj("name" -> "minou"), "hobbies" -> Json.arr("bobsleig", "manhunting"), "friends" -> Json.arr(Json.obj("name" -> "tom", "hobbies" -> Json.arr(), "friends" -> Json.arr())) ) ) must beEqualTo( JsSuccess( RecUser( "bob", Some(Cat("minou")), List("bobsleig", "manhunting"), List(RecUser("tom")) ) ) ) } "create a writes[RecUser]" in { import play.api.libs.json.Json implicit val catWrites = Json.writes[Cat] implicit val recUserWrites = Json.writes[RecUser] Json.toJson( RecUser( "bob", Some(Cat("minou")), List("bobsleig", "manhunting"), List(RecUser("tom")) ) ) must beEqualTo( Json.obj( "name" -> "bob", "cat" -> Json.obj("name" -> "minou"), "hobbies" -> Json.arr("bobsleig", "manhunting"), "friends" -> Json.arr(Json.obj("name" -> "tom", "hobbies" -> Json.arr(), "friends" -> Json.arr())) ) ) } "create a format[RecUser]" in { import play.api.libs.json.Json implicit val catFormat = Json.format[Cat] implicit val recUserFormat = Json.format[RecUser] Json.fromJson[RecUser]( Json.obj( "name" -> "bob", "cat" -> Json.obj("name" -> "minou"), "hobbies" -> Json.arr("bobsleig", "manhunting"), "friends" -> Json.arr(Json.obj("name" -> "tom", "hobbies" -> Json.arr(), "friends" -> Json.arr())) ) ) must beEqualTo( JsSuccess( RecUser( "bob", Some(Cat("minou")), List("bobsleig", "manhunting"), List(RecUser("tom")) ) ) ) Json.toJson( RecUser( "bob", Some(Cat("minou")), List("bobsleig", "manhunting"), List(RecUser("tom")) ) ) must beEqualTo( Json.obj( "name" -> "bob", "cat" -> Json.obj("name" -> "minou"), "hobbies" -> Json.arr("bobsleig", "manhunting"), "friends" -> Json.arr(Json.obj("name" -> "tom", "hobbies" -> Json.arr(), "friends" -> Json.arr())) ) ) } "create a reads[User1]" in { import play.api.libs.json.Json implicit val userReads = Json.reads[User1] Json.fromJson[User1]( Json.obj( "name" -> "bob", "friend" -> Json.obj("name" -> "tom") ) ) must beEqualTo( JsSuccess( User1( "bob", Some(User1("tom")) ) ) ) } "create a writes[User1]" in { import play.api.libs.json.Json implicit val userWrites = Json.writes[User1] Json.toJson( User1( "bob", Some(User1("tom")) ) ) must beEqualTo( Json.obj( "name" -> "bob", "friend" -> Json.obj("name" -> "tom") ) ) } "create a format[User1]" in { import play.api.libs.json.Json implicit val userFormat = Json.format[User1] Json.fromJson[User1]( Json.obj( "name" -> "bob", "friend" -> Json.obj("name" -> "tom") ) ) must beEqualTo( JsSuccess( User1( "bob", Some(User1("tom")) ) ) ) Json.toJson( User1( "bob", Some(User1("tom")) ) ) must beEqualTo( Json.obj( "name" -> "bob", "friend" -> Json.obj("name" -> "tom") ) ) } "create a format[WrappedGenericInt]" in { import play.api.libs.json.Json._ import play.api.libs.functional.syntax._ implicit def genericFormat[A: Format]: Format[GenericCaseClass[A]] = ( ( (__ \ "obj").format[A] ).inmap )(GenericCaseClass[A] _, unlift(GenericCaseClass.unapply[A])) implicit val wrappedGenericIntFormat = Json.format[WrappedGenericInt] val genericInt = GenericCaseClass(obj = 1) val wrapped = WrappedGenericInt(int = genericInt) val expectedJsObj = Json.obj( "int" -> Json.obj("obj" -> 1) ) Json.toJson(wrapped) must beEqualTo(expectedJsObj) Json.fromJson[WrappedGenericInt](expectedJsObj).get must beEqualTo(wrapped) } "create a format[WrappedGenericIntString]" in { import play.api.libs.json.Json._ import play.api.libs.functional.syntax._ implicit def genericEntityWrapperFormat[A: Format, B: Format]: Format[GenericCaseClass2[A, B]] = ( ( (__ \ "obj1").format[A] and (__ \ "obj2").format[B] ) )(GenericCaseClass2[A, B] _, unlift(GenericCaseClass2.unapply[A, B])) implicit val genericHolderFormat = Json.format[WrappedGenericIntString] val genericIntString = GenericCaseClass2(obj1 = 1, obj2 = "hello") val genericHolder = WrappedGenericIntString(intString = genericIntString) val expectedJsObj = Json.obj( "intString" -> Json.obj("obj1" -> 1, "obj2" -> "hello") ) Json.toJson(genericHolder) must beEqualTo(expectedJsObj) Json.fromJson[WrappedGenericIntString](expectedJsObj).get must beEqualTo(genericHolder) } "VarArgsOnly reads, writes, format" should { val reads = Json.reads[VarArgsOnly] val writes = Json.writes[VarArgsOnly] val format = Json.format[VarArgsOnly] val obj = VarArgsOnly(1, 2, 3) val jsObj = Json.obj("ints" -> Seq(1, 2, 3)) "formats should be able to read and write" in { Json.toJson(obj)(format) must beEqualTo(jsObj) jsObj.as[VarArgsOnly](format) must beEqualTo(obj) } "reads should be able to read valid Json and ignore invalid Json" in { jsObj.as[VarArgsOnly](reads) must beEqualTo(obj) Json.fromJson[VarArgsOnly](Json.obj("hello" -> "world"))(reads).isError must beTrue } "writes should be able to spit out valid json" in { Json.toJson(obj)(writes) must beEqualTo(jsObj) } } "LastVarArg reads, writes, format" should { val reads = Json.reads[LastVarArg] val writes = Json.writes[LastVarArg] val format = Json.format[LastVarArg] val obj = LastVarArg("hello", 1, 2, 3) val jsObj = Json.obj("name" -> "hello", "ints" -> Seq(1, 2, 3)) "formats should be able to read and write" in { Json.toJson(obj)(format) must beEqualTo(jsObj) jsObj.as[LastVarArg](format) must beEqualTo(obj) } "reads should be able to read valid Json and ignore invalid Json" in { jsObj.as[LastVarArg](reads) must beEqualTo(obj) Json.fromJson[LastVarArg](Json.obj("hello" -> "world"))(reads).isError must beTrue } "writes should be able to spit out valid json" in { Json.toJson(obj)(writes) must beEqualTo(jsObj) } } "manage Map[String, User]" in { import play.api.libs.json.Json implicit val userReads = Json.reads[UserMap] Json.fromJson[UserMap]( Json.obj("name" -> "toto", "friends" -> Json.obj("tutu" -> Json.obj("name" -> "tutu", "friends" -> Json.obj()))) ) must beEqualTo( JsSuccess(UserMap("toto", Map("tutu" -> UserMap("tutu")))) ) } "manage Boxed class" in { import play.api.libs.functional.syntax._ implicit def idReads[A](implicit rds: Reads[A]): Reads[Id[A]] = Reads[Id[A]] { js => rds.reads(js).map(Id[A](_)) } //val c2Reads1 = Json.reads[C2] implicit def c1Reads[A](implicit rds: Reads[Id[A]]) = { ( (__ \ 'id).read(rds) and (__ \ 'name).read[String] )((id, name) => C1[A](id, name)) } val js = Json.obj("id" -> 123L, "name" -> "toto") js.validate(c1Reads[Long]).get must beEqualTo(C1[Long](Id[Long](123L), "toto")) } /** * test to validate it doesn't compile if missing implicit * "fail if missing " in { * import play.api.libs.json.Json * * implicit val userReads = Json.reads[UserFail] * * success * } */ "test 21 fields" in { implicit val XReads = Json.reads[X] implicit val XWrites = Json.writes[X] implicit val XFormat = Json.format[X] success } "test inception with overriden object" in { implicit val programFormat = Json.reads[Program] success } "test case class 1 field" in { implicit val totoReads = Json.reads[Toto] implicit val totoWrites = Json.writes[Toto] implicit val totoFormat = Json.format[Toto] success } "test case class 1 field option" in { implicit val toto2Reads = Json.reads[Toto2] implicit val toto2Writes = Json.writes[Toto2] implicit val toto2Format = Json.format[Toto2] success } "test case class 1 field list" in { implicit val toto3Reads = Json.reads[Toto3] implicit val toto3Writes = Json.writes[Toto3] implicit val toto3Format = Json.format[Toto3] success } "test case class 1 field set" in { implicit val toto4Reads = Json.reads[Toto4] implicit val toto4Writes = Json.writes[Toto4] implicit val toto4Format = Json.format[Toto4] success } "test case class 1 field map" in { implicit val toto5Reads = Json.reads[Toto5] implicit val toto5Writes = Json.writes[Toto5] implicit val toto5Format = Json.format[Toto5] success } "test case class 1 field seq[Dog]" in { implicit val userFormat = Json.format[User] implicit val dogFormat = Json.format[Dog] implicit val toto6Reads = Json.reads[Toto6] implicit val toto6Writes = Json.writes[Toto6] implicit val toto6Format = Json.format[Toto6] val js = Json.obj("name" -> Json.arr( Json.obj( "name" -> "medor", "master" -> Json.obj("name" -> "toto", "age" -> 45) ), Json.obj( "name" -> "brutus", "master" -> Json.obj("name" -> "tata", "age" -> 23) ) )) Json.fromJson[Toto6](js).get must beEqualTo( Toto6(Seq( Dog("medor", User(45, "toto")), Dog("brutus", User(23, "tata")) )) ) } "test case reads in companion object" in { Json.fromJson[Person](Json.toJson(Person("bob", 15))).get must beEqualTo(Person("bob", 15)) } "test case single-field in companion object" in { Json.fromJson[Person2](Json.toJson(Person2(List("bob", "bobby")))).get must beEqualTo(Person2(List("bob", "bobby"))) } "test hygiene" in { val play = "" type LazyHelper = Any; val LazyHelper = () val scala = "" type String = Any; val String = "" type Unit = Any; val Unit = "" type Any = Nothing; val Any = "" type Int = String; val Int = "" implicit val toto2Reads = Json.reads[Toto2] implicit val toto2Writes = Json.writes[Toto2] implicit val toto2Format = Json.format[Toto2] success } "create a format[CustomApply]" in { import play.api.libs.json.Json implicit val fmt = Json.format[CustomApply] Json.fromJson[CustomApply](Json.obj("a" -> 5, "b" -> "foo")) must beEqualTo(JsSuccess(CustomApply(5, "foo"))) Json.toJson(CustomApply(5, "foo")) must beEqualTo(Json.obj("a" -> 5, "b" -> "foo")) Json.toJson(CustomApply()) must beEqualTo(Json.obj("a" -> 10, "b" -> "foo")) } } }
bjfletcher/playframework
framework/src/play/src/test/scala/views/js/helper/HelpersSpec.scala
<gh_stars>0 /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package views.js.helper import org.specs2.mutable.Specification object HelpersSpec extends Specification { "@json" should { "Produce valid JavaScript strings" in { json("foo").toString must equalTo("\"foo\"") } "Properly escape quotes" in { json("fo\"o").toString must equalTo("\"fo\\\"o\"") } "Not escape HTML entities" in { json("fo&o").toString must equalTo("\"fo&o\"") } "Produce valid JavaScript literal objects" in { json(Map("foo" -> "bar")).toString must equalTo("{\"foo\":\"bar\"}") } "Produce valid JavaScript arrays" in { json(List("foo", "bar")).toString must equalTo("[\"foo\",\"bar\"]") } } }
bjfletcher/playframework
framework/src/play-docs/src/main/scala/play/docs/DocServerStart.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.docs import java.io.File import java.util.concurrent.Callable import play.api._ import play.api.mvc._ import play.api.routing.Router import play.core._ import play.core.server._ import scala.concurrent.Future import scala.util.Success /** * Used to start the documentation server. */ class DocServerStart { def start(projectPath: File, buildDocHandler: BuildDocHandler, translationReport: Callable[File], forceTranslationReport: Callable[File], port: java.lang.Integer): ServerWithStop = { val application: Application = { val environment = Environment(projectPath, this.getClass.getClassLoader, Mode.Test) val context = ApplicationLoader.createContext(environment) val components = new BuiltInComponentsFromContext(context) { lazy val router = Router.empty } components.application } Play.start(application) val applicationProvider = new ApplicationProvider { override def get = Success(application) override def handleWebCommand(request: RequestHeader) = buildDocHandler.maybeHandleDocRequest(request).asInstanceOf[Option[Result]].orElse( if (request.path == "/@report") { if (request.getQueryString("force").isDefined) { forceTranslationReport.call() Some(Results.Redirect("/@report")) } else { Some(Results.Ok.sendFile(translationReport.call(), inline = true, fileName = _ => "report.html")) } } else None ).orElse( Some(Results.Redirect("/@documentation")) ) } val config = ServerConfig( rootDir = projectPath, port = Some(port), mode = Mode.Test, properties = System.getProperties ) val serverProvider: ServerProvider = ServerProvider.fromConfiguration(getClass.getClassLoader, config.configuration) val context = ServerProvider.Context( config, applicationProvider, application.actorSystem, application.materializer, stopHook = () => Future.successful(()) ) serverProvider.createServer(context) } }
bjfletcher/playframework
documentation/manual/working/scalaGuide/main/forms/code/ScalaFieldConstructor.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package scalaguide.forms.scalafieldconstructor { import org.specs2.mutable.Specification import play.api.{Environment, Configuration} import play.api.i18n.{DefaultLangs, DefaultMessagesApi, Messages} object ScalaFieldConstructorSpec extends Specification { val conf = Configuration.reference implicit val messages: Messages = new DefaultMessagesApi(Environment.simple(), conf, new DefaultLangs(conf)).preferred(Seq.empty) "field constructors" should { "be possible to import" in { html.userImport(MyForm.form).body must contain("--foo--") } "be possible to declare" in { html.userDeclare(MyForm.form).body must contain("--foo--") } } } object MyForm { import play.api.data.Form import play.api.data.Forms._ import html.models.User val form = Form(mapping( "username" -> text )(User.apply)(User.unapply)) } package html { //#form-myfield-helper object MyHelpers { import views.html.helper.FieldConstructor implicit val myFields = FieldConstructor(html.myFieldConstructorTemplate.f) } //#form-myfield-helper } package html.models { case class User(username:String) } }
bjfletcher/playframework
framework/src/play-java-ws/src/test/scala/play/libs/ws/ahc/AhcWSRequestSpec.scala
package play.libs.ws.ahc import org.specs2.mock.Mockito import org.specs2.mutable._ class AhcWSRequestSpec extends Specification with Mockito { "AhcWSRequest" should { "should respond to getMethod" in { val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.buildRequest().getMethod must be_==("GET") } "should set virtualHost appropriately" in { val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.setVirtualHost("foo.com") val actual = request.buildRequest().getVirtualHost() actual must beEqualTo("foo.com") } "should support setting a request timeout" in { requestWithTimeout(1000) must beEqualTo(1000) } "should support setting an infinite request timeout" in { requestWithTimeout(-1) must beEqualTo(-1) } "should not support setting a request timeout < -1" in { requestWithTimeout(-2) must throwA[IllegalArgumentException] } "should not support setting a request timeout > Integer.MAX_VALUE" in { requestWithTimeout(Int.MaxValue.toLong + 1) must throwA[IllegalArgumentException] } "Only send first content type header and add charset=utf-8 to the Content-Type header if it's manually adding but lacking charset" in { import scala.collection.JavaConverters._ val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.setBody("HELLO WORLD") request.setHeader("Content-Type", "application/json") request.setHeader("Content-Type", "application/xml") val req = request.buildRequest() req.getHeaders.get("Content-Type") must be_==("application/json; charset=utf-8") } "Only send first content type header and keep the charset if it has been set manually with a charset" in { import scala.collection.JavaConverters._ val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.setBody("HELLO WORLD") request.setHeader("Content-Type", "application/json; charset=US-ASCII") request.setHeader("Content-Type", "application/xml") val req = request.buildRequest() req.getHeaders.get("Content-Type") must be_==("application/json; charset=US-ASCII") } } def requestWithTimeout(timeout: Long) = { val client = mock[AhcWSClient] val request = new AhcWSRequest(client, "http://example.com", /*materializer*/ null) request.setRequestTimeout(timeout) request.buildRequest().getRequestTimeout() } }
bjfletcher/playframework
framework/src/iteratees/src/test/scala/play/api/libs/iteratee/ExecutionSpec.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.iteratee import scala.language.reflectiveCalls import org.specs2.mutable._ import java.io.OutputStream import java.util.concurrent.{ CountDownLatch, TimeUnit } import scala.concurrent.{ ExecutionContext, Promise, Future, Await } import scala.concurrent.duration.{ Duration, SECONDS } import scala.util.{ Failure, Success, Try } object ExecutionSpec extends Specification { import Execution.trampoline val waitTime = Duration(5, SECONDS) "trampoline" should { "execute code in the same thread" in { val f = Future(Thread.currentThread())(trampoline) Await.result(f, waitTime) must equalTo(Thread.currentThread()) } "not overflow the stack" in { def executeRecursively(ec: ExecutionContext, times: Int) { if (times > 0) { ec.execute(new Runnable { def run() = executeRecursively(ec, times - 1) }) } } // Work out how deep to go to cause an overflow val overflowingExecutionContext = new ExecutionContext { def execute(runnable: Runnable): Unit = { runnable.run() } def reportFailure(t: Throwable): Unit = t.printStackTrace() } var overflowTimes = 1 << 10 try { while (overflowTimes > 0) { executeRecursively(overflowingExecutionContext, overflowTimes) overflowTimes = overflowTimes << 1 } sys.error("Can't get the stack to overflow") } catch { case _: StackOverflowError => () } // Now verify that we don't overflow Try(executeRecursively(trampoline, overflowTimes)) must beSuccessfulTry[Unit] } "execute code in the order it was submitted" in { val runRecord = scala.collection.mutable.Buffer.empty[Int] case class TestRunnable(id: Int, children: Runnable*) extends Runnable { def run() = { runRecord += id for (c <- children) trampoline.execute(c) } } trampoline.execute( TestRunnable(0, TestRunnable(1), TestRunnable(2, TestRunnable(4, TestRunnable(6), TestRunnable(7)), TestRunnable(5, TestRunnable(8))), TestRunnable(3)) ) runRecord must equalTo(0 to 8) } } }
bjfletcher/playframework
framework/src/iteratees/src/test/scala/play/api/libs/iteratee/RunQueueSpec.scala
<reponame>bjfletcher/playframework<filename>framework/src/iteratees/src/test/scala/play/api/libs/iteratee/RunQueueSpec.scala /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.iteratee import scala.language.reflectiveCalls import org.specs2.mutable._ import java.util.concurrent.atomic.AtomicInteger import scala.concurrent.{ ExecutionContext, Promise, Future, Await } import scala.concurrent.duration.{ Duration, SECONDS } object RunQueueSpec extends Specification with ExecutionSpecification { val waitTime = Duration(20, SECONDS) trait QueueTester { def schedule(body: => Future[Unit])(implicit ec: ExecutionContext): Unit } class RunQueueTester extends QueueTester { val rq = new RunQueue() def schedule(body: => Future[Unit])(implicit ec: ExecutionContext) = rq.schedule(body) } class NaiveQueueTester extends QueueTester { def schedule(body: => Future[Unit])(implicit ec: ExecutionContext) = Future(body) } def countOrderingErrors(runs: Int, queueTester: QueueTester)(implicit ec: ExecutionContext): Future[Int] = { val result = Promise[Int]() val runCount = new AtomicInteger(0) val orderingErrors = new AtomicInteger(0) for (i <- 0 until runs) { queueTester.schedule { val observedRunCount = runCount.getAndIncrement() // Introduce another Future just to make things complicated :) Future { // We see observedRunCount != i then this task was run out of order if (observedRunCount != i) { orderingErrors.incrementAndGet() // Record the error } // If this is the last task, complete our result promise if ((observedRunCount + 1) >= runs) { result.success(orderingErrors.get) } } } } result.future } "RunQueue" should { "run code in order" in { import ExecutionContext.Implicits.global def percentageOfRunsWithOrderingErrors(runSize: Int, queueTester: QueueTester): Int = { val results: Seq[Future[Int]] = for (i <- 0 until 9) yield { countOrderingErrors(runSize, queueTester) } Await.result(Future.sequence(results), waitTime).filter(_ > 0).size * 10 } // Iteratively increase the run size until we get observable errors 90% of the time // We want a high error rate because we want to then use the RunQueueTester // on the same run size and know that it is fixing up some problems. If the run size // is too small then the RunQueueTester probably isn't doing anything. We use // dynamic run sizing because the actual size that produces errors will vary // depending on the environment in which this test is run. var runSize = 8 // This usually reaches 8192 on my dev machine with 10 simultaneous queues var errorPercentage = 0 while (errorPercentage < 90 && runSize < 1000000) { runSize = runSize << 1 errorPercentage = percentageOfRunsWithOrderingErrors(runSize, new NaiveQueueTester()) } //println(s"Got $errorPercentage% ordering errors on run size of $runSize") // Now show that this run length works fine with the RunQueueTester percentageOfRunsWithOrderingErrors(runSize, new RunQueueTester()) must_== 0 } "use the ExecutionContext exactly once per scheduled item" in { val rq = new RunQueue() mustExecute(1) { implicit runEC => val runFinished = Promise[Unit]() rq.schedule { runFinished.success(()) Future.successful(()) } Await.result(runFinished.future, waitTime) must_== (()) } } } }
bjfletcher/playframework
documentation/manual/working/scalaGuide/main/tests/code-scalatestplus-play/playspec/ExampleSpec.scala
<reponame>bjfletcher/playframework /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package scalaguide.tests.scalatest.playspec import play.api.test._ import org.scalatest._ import org.scalatestplus.play._ import play.api.test.Helpers._ import play.api.libs.ws._ import play.api.mvc._ import Results._ import org.scalatest.concurrent.ScalaFutures import org.scalatest.concurrent.IntegrationPatience // #scalafunctionaltest-playspec class ExampleSpec extends PlaySpec with OneServerPerSuite with ScalaFutures with IntegrationPatience { // Override app if you need a FakeApplication with other than // default parameters. implicit override lazy val app: FakeApplication = FakeApplication( additionalConfiguration = Map("ehcacheplugin" -> "disabled"), withRoutes = { case ("GET", "/testing") => Action( Results.Ok( "<html>" + "<head><title>Test Page</title></head>" + "<body>" + "<input type='button' name='b' value='Click Me' onclick='document.title=\"scalatest\"' />" + "</body>" + "</html>" ).as("text/html") ) } ) "WsScalaTestClient's" must { "wsUrl works correctly" in { val futureResult = wsUrl("/testing").get val body = futureResult.futureValue.body val expectedBody = "<html>" + "<head><title>Test Page</title></head>" + "<body>" + "<input type='button' name='b' value='Click Me' onclick='document.title=\"scalatest\"' />" + "</body>" + "</html>" assert(body == expectedBody) } "wsCall works correctly" in { val futureResult = wsCall(Call("get", "/testing")).get val body = futureResult.futureValue.body val expectedBody = "<html>" + "<head><title>Test Page</title></head>" + "<body>" + "<input type='button' name='b' value='Click Me' onclick='document.title=\"scalatest\"' />" + "</body>" + "</html>" assert(body == expectedBody) } } } // #scalafunctionaltest-playspec
bjfletcher/playframework
templates/play-scala/test/ApplicationSpec.scala
<reponame>bjfletcher/playframework import org.specs2.mutable._ import org.specs2.runner._ import org.junit.runner._ import play.api.test._ import play.api.test.Helpers._ /** * Add your spec here. * You can mock out a whole application including requests, plugins etc. * For more information, consult the wiki. */ @RunWith(classOf[JUnitRunner]) class ApplicationSpec extends Specification { "Routes" should { "send 404 for an unknown resource" in new WithApplication { route(app, FakeRequest(GET, "/boum")) must beSome.which (status(_) == NOT_FOUND) } } "HomeController" should { "render the index page" in new WithApplication { val home = route(app, FakeRequest(GET, "/")).get status(home) must equalTo(OK) contentType(home) must beSome.which(_ == "text/html") contentAsString(home) must contain ("Your new application is ready.") } } "CountController" should { "return an increasing count" in new WithApplication { contentAsString(route(FakeRequest(GET, "/count")).get) must_== ("0") contentAsString(route(FakeRequest(GET, "/count")).get) must_== ("1") contentAsString(route(FakeRequest(GET, "/count")).get) must_== ("2") } } }
bjfletcher/playframework
framework/src/play-cache/src/main/scala/play/api/cache/Cached.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.cache import javax.inject.Inject import play.api._ import play.api.libs.streams.Accumulator import play.api.mvc._ import play.api.libs.Codecs import play.api.http.HeaderNames.{ IF_NONE_MATCH, ETAG, EXPIRES } import play.api.mvc.Results.NotModified import play.core.Execution.Implicits.internalContext import scala.concurrent.duration._ /** * A helper to add caching to an Action. */ class Cached @Inject() (cache: CacheApi) { /** * Cache an action. * * @param key Compute a key from the request header * @param caching Compute a cache duration from the respone header */ def apply( key: RequestHeader => String, caching: PartialFunction[ResponseHeader, Duration]): CachedBuilder = new CachedBuilder(cache, key, caching) /** * Cache an action. * * @param key Compute a key from the request header */ def apply(key: RequestHeader => String): CachedBuilder = { apply(key, duration = 0) } /** * Cache an action. * * @param key Cache key */ def apply(key: String): CachedBuilder = { apply(_ => key, duration = 0) } /** * Cache an action. * * @param key Cache key * @param duration Cache duration (in seconds) */ def apply(key: RequestHeader => String, duration: Int): CachedBuilder = { new CachedBuilder(cache, key, { case (_: ResponseHeader) => Duration(duration, SECONDS) }) } /** * A cached instance caching nothing * Useful for composition */ def empty(key: RequestHeader => String): CachedBuilder = new CachedBuilder(cache, key, PartialFunction.empty) /** * Caches everything, forever */ def everything(key: RequestHeader => String): CachedBuilder = empty(key).default(0) /** * Caches everything for the specified seconds */ def everything(key: RequestHeader => String, duration: Int): CachedBuilder = empty(key).default(duration) /** * Caches the specified status, for the specified number of seconds */ def status(key: RequestHeader => String, status: Int, duration: Int): CachedBuilder = empty(key).includeStatus(status, Duration(duration, SECONDS)) /** * Caches the specified status forever */ def status(key: RequestHeader => String, status: Int): CachedBuilder = empty(key).includeStatus(status) } /** * A helper to add caching to an Action. This helper uses the Application's default cache. * If you want to inject a custom cache, see the `Cached` class. */ object Cached { /** * Cache an action. * * @param key Compute a key from the request header * @param caching Compute a cache duration from the respone header */ def apply( key: RequestHeader => String, caching: PartialFunction[ResponseHeader, Duration]): UnboundCachedBuilder = new UnboundCachedBuilder(key, caching) /** * Cache an action. * * @param key Compute a key from the request header */ def apply(key: RequestHeader => String): UnboundCachedBuilder = { apply(key, duration = 0) } /** * Cache an action. * * @param key Cache key */ def apply(key: String): UnboundCachedBuilder = { apply(_ => key, duration = 0) } /** * Cache an action. * * @param key Cache key * @param duration Cache duration (in seconds) */ def apply(key: RequestHeader => String, duration: Int): UnboundCachedBuilder = { new UnboundCachedBuilder(key, { case (_: ResponseHeader) => Duration(duration, SECONDS) }) } /** * A cached instance caching nothing * Useful for composition */ def empty(key: RequestHeader => String): UnboundCachedBuilder = new UnboundCachedBuilder(key, PartialFunction.empty) /** * Caches everything, forever */ def everything(key: RequestHeader => String): UnboundCachedBuilder = empty(key).default(0) /** * Caches everything for the specified seconds */ def everything(key: RequestHeader => String, duration: Int): UnboundCachedBuilder = empty(key).default(duration) /** * Caches the specified status, for the specified number of seconds */ def status(key: RequestHeader => String, status: Int, duration: Int): UnboundCachedBuilder = empty(key).includeStatus(status, Duration(duration, SECONDS)) /** * Caches the specified status forever */ def status(key: RequestHeader => String, status: Int): UnboundCachedBuilder = empty(key).includeStatus(status) } /** * Builds an action with caching behavior. Typically created with one of the methods in the `Cached` * class. Uses both server and client caches: * * - Adds an `Expires` header to the response, so clients can cache response content ; * - Adds an `Etag` header to the response, so clients can cache response content and ask the server for freshness ; * - Cache the result on the server, so the underlying action is not computed at each call. * * @param cache The cache used for caching results * @param key Compute a key from the request header * @param caching A callback to get the number of seconds to cache results for */ final class CachedBuilder( cache: CacheApi, key: RequestHeader => String, caching: PartialFunction[ResponseHeader, Duration]) { /** * Compose the cache with an action */ def apply(action: EssentialAction): EssentialAction = build(action) /** * Compose the cache with an action */ def build(action: EssentialAction): EssentialAction = EssentialAction { request => val resultKey = key(request) val etagKey = s"$resultKey-etag" // Has the client a version of the resource as fresh as the last one we served? val notModified = for { requestEtag <- request.headers.get(IF_NONE_MATCH) etag <- cache.get[String](etagKey) if requestEtag == "*" || etag == requestEtag } yield Accumulator.done(NotModified) notModified.orElse { // Otherwise try to serve the resource from the cache, if it has not yet expired cache.get[SerializableResult](resultKey).map { sr: SerializableResult => Accumulator.done(sr.result) } }.getOrElse { // The resource was not in the cache, we have to run the underlying action val accumulatorResult = action(request) // Add cache information to the response, so clients can cache its content accumulatorResult.map(handleResult(_, etagKey, resultKey)) } } /** * Eternity is one year long. Duration zero means eternity. */ private val cachingWithEternity = caching.andThen { duration => // FIXME: Surely Duration.Inf is a better marker for eternity than 0? val zeroDuration: Boolean = duration.neg().equals(duration) if (zeroDuration) { Duration(60 * 60 * 24 * 365, SECONDS) } else { duration } } private def handleResult(result: Result, etagKey: String, resultKey: String): Result = { cachingWithEternity.andThen { duration => // Format expiration date according to http standard val expirationDate = http.dateFormat.print(System.currentTimeMillis() + duration.toMillis) // Generate a fresh ETAG for it // Use quoted sha1 hash of expiration date as ETAG val etag = s""""${Codecs.sha1(expirationDate)}"""" val resultWithHeaders = result.withHeaders(ETAG -> etag, EXPIRES -> expirationDate) // Cache the new ETAG of the resource cache.set(etagKey, etag, duration) // Cache the new Result of the resource cache.set(resultKey, new SerializableResult(resultWithHeaders), duration) resultWithHeaders }.applyOrElse(result.header, (_: ResponseHeader) => result) } /** * Whether this cache should cache the specified response if the status code match * This method will cache the result forever */ def includeStatus(status: Int): CachedBuilder = includeStatus(status, Duration.Zero) /** * Whether this cache should cache the specified response if the status code match * This method will cache the result for duration seconds * * @param status the status code to check * @param duration the number of seconds to cache the result for */ def includeStatus(status: Int, duration: Int): CachedBuilder = includeStatus(status, Duration(duration, SECONDS)) /** * Whether this cache should cache the specified response if the status code match * This method will cache the result for duration seconds * * @param status the status code to check * @param duration how long should we cache the result for */ def includeStatus(status: Int, duration: Duration): CachedBuilder = compose { case e if e.status == status => { duration } } /** * The returned cache will store all responses whatever they may contain * @param duration how long we should store responses */ def default(duration: Duration): CachedBuilder = compose(PartialFunction((_: ResponseHeader) => duration)) /** * The returned cache will store all responses whatever they may contain * @param duration the number of seconds we should store responses */ def default(duration: Int): CachedBuilder = default(Duration(duration, SECONDS)) /** * Compose the cache with new caching function * @param alternative a closure getting the reponseheader and returning the duration * we should cache for */ def compose(alternative: PartialFunction[ResponseHeader, Duration]): CachedBuilder = new CachedBuilder( cache = cache, key = key, caching = caching.orElse(alternative) ) } /** * Builds an action with caching behavior. Typically created with one of the methods in the `Cached` * companion object. Uses both server and client caches: * * - Adds an `Expires` header to the response, so clients can cache response content ; * - Adds an `Etag` header to the response, so clients can cache response content and ask the server for freshness ; * - Cache the result on the server, so the underlying action is not computed at each call. * * Unlike `CachedBuilder`, an `UnboundCachedBuilder` isn't bound to a particular * cache when it is created. It binds the default cache of the current application * when it builds an action. * * @param key Compute a key from the request header * @param caching A callback to get the number of seconds to cache results for */ class UnboundCachedBuilder(key: RequestHeader => String, caching: PartialFunction[ResponseHeader, Duration]) { import Cached._ /** * Compose the cache with an action */ def apply(action: EssentialAction)(implicit app: Application): EssentialAction = build(action) /** * Compose the cache with an action */ def build(action: EssentialAction)(implicit app: Application): EssentialAction = { new CachedBuilder(Cache.cacheApi, key, caching).build(action) } /** * Whether this cache should cache the specified response if the status code match * This method will cache the result forever */ def includeStatus(status: Int): UnboundCachedBuilder = includeStatus(status, Duration.Zero) /** * Whether this cache should cache the specified response if the status code match * This method will cache the result for duration seconds * * @param status the status code to check * @param duration the number of seconds to cache the result for */ def includeStatus(status: Int, duration: Int): UnboundCachedBuilder = includeStatus(status, Duration(duration, SECONDS)) /** * Whether this cache should cache the specified response if the status code match * This method will cache the result for duration seconds * * @param status the status code to check * @param duration how long should we cache the result for */ def includeStatus(status: Int, duration: Duration): UnboundCachedBuilder = compose { case e if e.status == status => { duration } } /** * The returned cache will store all responses whatever they may contain * @param duration how long we should store responses */ def default(duration: Duration): UnboundCachedBuilder = compose(PartialFunction((_: ResponseHeader) => duration)) /** * The returned cache will store all responses whatever they may contain * @param duration the number of seconds we should store responses */ def default(duration: Int): UnboundCachedBuilder = default(Duration(duration, SECONDS)) /** * Compose the cache with new caching function * @param alternative a closure getting the reponseheader and returning the duration * we should cache for */ def compose(alternative: PartialFunction[ResponseHeader, Duration]): UnboundCachedBuilder = new UnboundCachedBuilder( key = key, caching = caching.orElse(alternative) ) }
bjfletcher/playframework
framework/src/play/src/main/scala/play/core/j/JavaHttpErrorHandlerAdapter.scala
<filename>framework/src/play/src/main/scala/play/core/j/JavaHttpErrorHandlerAdapter.scala /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.core.j import javax.inject.Inject import play.api.http.HttpErrorHandler import play.api.mvc.RequestHeader import play.http.{ HttpErrorHandler => JHttpErrorHandler } /** * Adapter from a Java HttpErrorHandler to a Scala HttpErrorHandler */ class JavaHttpErrorHandlerAdapter @Inject() (underlying: JHttpErrorHandler) extends HttpErrorHandler { def onClientError(request: RequestHeader, statusCode: Int, message: String) = { JavaHelpers.invokeWithContext(request, req => underlying.onClientError(req, statusCode, message)) } def onServerError(request: RequestHeader, exception: Throwable) = { JavaHelpers.invokeWithContext(request, req => underlying.onServerError(req, exception)) } }
bjfletcher/playframework
framework/src/sbt-plugin/src/sbt-test/play-sbt-plugin/multiproject/transitive/app/Transitive.scala
<filename>framework/src/sbt-plugin/src/sbt-test/play-sbt-plugin/multiproject/transitive/app/Transitive.scala /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ object Transitive
bjfletcher/playframework
framework/src/play-ws/src/test/scala/play/api/libs/openid/RichUrl.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.openid trait RichUrl[A] { def hostAndPath: String }
bjfletcher/playframework
framework/src/play-ws/src/test/scala/play/api/libs/openid/package.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs import scala.io.Source import org.jboss.netty.handler.codec.http.QueryStringDecoder import java.net.{ MalformedURLException, URL } import util.control.Exception._ import collection.JavaConverters._ import scala.language.implicitConversions package object openid { type Params = Map[String, Seq[String]] implicit def stringToSeq(s: String): Seq[String] = Seq(s) implicit def urlToRichUrl(url: URL) = new RichUrl[URL] { def hostAndPath = new URL(url.getProtocol, url.getHost, url.getPort, url.getPath).toExternalForm } def readFixture(filePath: String): String = this.synchronized { Source.fromInputStream(this.getClass.getResourceAsStream(filePath)).mkString } def parseQueryString(url: String): Params = { catching(classOf[MalformedURLException]) opt new URL(url) map { url => new QueryStringDecoder(url.toURI.getRawQuery, false).getParameters.asScala.mapValues(_.asScala.toSeq).toMap } getOrElse Map() } // See 10.1 - Positive Assertions // http://openid.net/specs/openid-authentication-2_0.html#positive_assertions def createDefaultResponse(claimedId: String, identity: String, defaultSigned: String = "op_endpoint,claimed_id,identity,return_to,response_nonce,assoc_handle"): Map[String, Seq[String]] = Map( "openid.ns" -> "http://specs.openid.net/auth/2.0", "openid.mode" -> "id_res", "openid.op_endpoint" -> "https://www.google.com/a/example.com/o8/ud?be=o8", "openid.claimed_id" -> claimedId, "openid.identity" -> identity, "openid.return_to" -> "https://example.com/openid?abc=false", "openid.response_nonce" -> "2012-05-25T06:47:55ZEJvRv76xQcWbTG", "openid.assoc_handle" -> "<KEY>", "openid.signed" -> defaultSigned, "openid.sig" -> "MWRsJZ/9AOMQt9gH6zTZIfIjk6g=" ) }
bjfletcher/playframework
framework/src/play-jdbc-evolutions/src/test/scala/play/api/db/evolutions/EvolutionsReaderSpec.scala
<filename>framework/src/play-jdbc-evolutions/src/test/scala/play/api/db/evolutions/EvolutionsReaderSpec.scala /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.db.evolutions import java.io.File import org.specs2.mutable.Specification import play.api.{ Environment, Mode } object EvolutionsReaderSpec extends Specification { "EnvironmentEvolutionsReader" should { "read evolution files from classpath" in { val environment = Environment(new File("."), getClass.getClassLoader, Mode.Test) val reader = new EnvironmentEvolutionsReader(environment) reader.evolutions("test") must_== Seq( Evolution(1, "create table test (id bigint not null, name varchar(255));", "drop table if exists test;"), Evolution(2, "insert into test (id, name) values (1, 'alice');\ninsert into test (id, name) values (2, 'bob');", "delete from test;"), Evolution(3, "insert into test (id, name) values (3, 'charlie');\ninsert into test (id, name) values (4, 'dave');", ""), Evolution(4, "", "") ) } } }
bjfletcher/playframework
documentation/manual/working/scalaGuide/main/tests/code-scalatestplus-play/oneserverpertest/ExampleSpec.scala
<gh_stars>0 /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package scalaguide.tests.scalatest.oneserverpertest import play.api.test._ import org.scalatest._ import org.scalatestplus.play._ import play.api.test.Helpers._ import play.api.libs.ws._ import play.api.mvc._ import Results._ // #scalafunctionaltest-oneserverpertest class ExampleSpec extends PlaySpec with OneServerPerTest { // Override newAppForTest if you need a FakeApplication with other than // default parameters. override def newAppForTest(testData: TestData): FakeApplication = new FakeApplication( additionalConfiguration = Map("ehcacheplugin" -> "disabled"), withRoutes = { case ("GET", "/") => Action { Ok("ok") } } ) "The OneServerPerTest trait" must { "test server logic" in { val myPublicAddress = s"localhost:$port" val testPaymentGatewayURL = s"http://$myPublicAddress" // The test payment gateway requires a callback to this server before it returns a result... val callbackURL = s"http://$myPublicAddress/callback" // await is from play.api.test.FutureAwaits val response = await(WS.url(testPaymentGatewayURL).withQueryString("callbackURL" -> callbackURL).get()) response.status mustBe (OK) } } } // #scalafunctionaltest-oneserverpertest
bjfletcher/playframework
framework/src/play/src/test/scala/play/core/test/package.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.core import play.api.{ ApplicationLoader, BuiltInComponentsFromContext, Environment, Play } package object test { /** * Run the given block of code with an application. */ def withApplication[T](block: => T): T = { val app = new BuiltInComponentsFromContext(ApplicationLoader.createContext(Environment.simple())) { def router = play.api.routing.Router.empty }.application Play.start(app) try { block } finally { Play.stop(app) } } }
bjfletcher/playframework
framework/src/play-ws/src/test/scala/play/api/libs/oauth/OAuthSpec.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.oauth import akka.util.ByteString import play.api.Application import play.api.libs.ws.WS import play.api.mvc._ import play.api.test._ import scala.concurrent.{ Future, Promise } class OAuthSpec extends PlaySpecification { sequential val consumerKey = ConsumerKey("someConsumerKey", "someVerySecretConsumerSecret") val requestToken = RequestToken("someRequestToken", "someVerySecretRequestSecret") val oauthCalculator = OAuthCalculator(consumerKey, requestToken) "OAuth" should { "sign a simple get request" in { val (request, body, hostUrl) = receiveRequest { implicit app => hostUrl => WS.url(hostUrl + "/foo").sign(oauthCalculator).get() } OAuthRequestVerifier.verifyRequest(request, body, hostUrl, consumerKey, requestToken) } "sign a get request with query parameters" in { val (request, body, hostUrl) = receiveRequest { implicit app => hostUrl => WS.url(hostUrl + "/foo").withQueryString("param" -> "paramValue").sign(oauthCalculator).get() } OAuthRequestVerifier.verifyRequest(request, body, hostUrl, consumerKey, requestToken) } "sign a post request with a body" in { val (request, body, hostUrl) = receiveRequest { implicit app => hostUrl => WS.url(hostUrl + "/foo").sign(oauthCalculator).post(Map("param" -> Seq("paramValue"))) } OAuthRequestVerifier.verifyRequest(request, body, hostUrl, consumerKey, requestToken) } } def receiveRequest(makeRequest: Application => String => Future[_]): (RequestHeader, ByteString, String) = { val hostUrl = "http://localhost:" + testServerPort val promise = Promise[(RequestHeader, ByteString)]() val app = FakeApplication(withRoutes = { case _ => Action(BodyParsers.parse.raw) { request => promise.success((request, request.body.asBytes().getOrElse(ByteString.empty))) Results.Ok } }) running(TestServer(testServerPort, app)) { await(makeRequest(app)(hostUrl)) } val (request, body) = await(promise.future) (request, body, hostUrl) } }
bjfletcher/playframework
documentation/manual/working/scalaGuide/main/dependencyinjection/code/static.sbt
<gh_stars>0 //#content routesGenerator := StaticRoutesGenerator //#content
bjfletcher/playframework
framework/src/play-jdbc/src/test/scala/play/api/db/ConnectionPoolConfigSpec.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.db import javax.inject.Inject import play.api.test._ class ConnectionPoolConfigSpec extends PlaySpecification { "DBModule bindings" should { "use HikariCP when default pool is default" in new WithApplication(FakeApplication( additionalConfiguration = Map( "db.default.url" -> "jdbc:h2:mem:default", "db.other.driver" -> "org.h2.Driver", "db.other.url" -> "jdbc:h2:mem:other" ) )) { val db = app.injector.instanceOf[DBApi] db.database("default").withConnection { c => c.getClass.getName must contain("hikari") } } "use HikariCP when default pool is 'hikaricp'" in new WithApplication(FakeApplication( additionalConfiguration = Map( "play.db.pool" -> "hikaricp", "db.default.url" -> "jdbc:h2:mem:default", "db.other.driver" -> "org.h2.Driver", "db.other.url" -> "jdbc:h2:mem:other" ) )) { val db = app.injector.instanceOf[DBApi] db.database("default").withConnection { c => c.getClass.getName must contain("hikari") } } "use BoneCP when default pool is 'bonecp'" in new WithApplication(FakeApplication( additionalConfiguration = Map( "play.db.pool" -> "bonecp", "db.default.url" -> "jdbc:h2:mem:default", "db.other.driver" -> "org.h2.Driver", "db.other.url" -> "jdbc:h2:mem:other" ) )) { val db = app.injector.instanceOf[DBApi] db.database("default").withConnection { c => c.getClass.getName must contain("bonecp") } } "use BoneCP when database-specific pool is 'bonecp'" in new WithApplication(FakeApplication( additionalConfiguration = Map( "db.default.pool" -> "bonecp", "db.default.url" -> "jdbc:h2:mem:default", "db.other.driver" -> "org.h2.Driver", "db.other.url" -> "jdbc:h2:mem:other" ) )) { val db = app.injector.instanceOf[DBApi] db.database("default").withConnection { c => c.getClass.getName must contain("bonecp") } } } }
bjfletcher/playframework
framework/src/play-netty-server/src/main/scala/play/core/server/netty/NettyModelConversion.scala
<reponame>bjfletcher/playframework /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.core.server.netty import java.net.{ URI, InetSocketAddress } import akka.stream.Materializer import akka.stream.scaladsl.{ Sink, Source } import akka.util.ByteString import com.typesafe.netty.http.{ DefaultStreamedHttpResponse, StreamedHttpRequest } import io.netty.buffer.{ ByteBuf, Unpooled } import io.netty.handler.codec.http._ import io.netty.util.ReferenceCountUtil import play.api.Logger import play.api.http.HeaderNames._ import play.api.http.{ Status, HttpChunk, HttpEntity } import play.api.mvc._ import play.core.server.common.{ ConnectionInfo, ServerResultUtils, ForwardedHeaderHandler } import scala.collection.JavaConverters._ import scala.util.{ Failure, Try } import scala.util.control.NonFatal private[server] class NettyModelConversion(forwardedHeaderHandler: ForwardedHeaderHandler) { private val logger = Logger(classOf[NettyModelConversion]) /** * Convert a Netty request to a Play RequestHeader. * * Will return a failure if there's a protocol error or some other error in the header. */ def convertRequest(requestId: Long, remoteAddress: InetSocketAddress, secureProtocol: Boolean, request: HttpRequest): Try[RequestHeader] = { if (request.getDecoderResult.isFailure) { Failure(request.getDecoderResult.cause()) } else { tryToCreateRequest(request, requestId, remoteAddress, secureProtocol) } } /** Try to create the request. May fail if the path is invalid */ private def tryToCreateRequest(request: HttpRequest, requestId: Long, remoteAddress: InetSocketAddress, secureProtocol: Boolean): Try[RequestHeader] = { Try { val uri = new QueryStringDecoder(request.getUri) val parameters: Map[String, Seq[String]] = { val decodedParameters = uri.parameters() if (decodedParameters.isEmpty) Map.empty else { decodedParameters.asScala.mapValues(_.asScala).toMap } } // wrapping into URI to handle absoluteURI val path = new URI(uri.path()).getRawPath createRequestHeader(request, requestId, path, parameters, remoteAddress, secureProtocol) } } /** Create the request header */ private def createRequestHeader(request: HttpRequest, requestId: Long, parsedPath: String, parameters: Map[String, Seq[String]], _remoteAddress: InetSocketAddress, secureProtocol: Boolean): RequestHeader = { new RequestHeader { override val id = requestId override val tags = Map.empty[String, String] override def uri = request.getUri override def path = parsedPath override def method = request.getMethod.name() override def version = request.getProtocolVersion.text() override def queryString = parameters override val headers = new NettyHeadersWrapper(request.headers) private lazy val remoteConnection: ConnectionInfo = { forwardedHeaderHandler.remoteConnection(_remoteAddress.getAddress, secureProtocol, headers) } override def remoteAddress = remoteConnection.address.getHostAddress override def secure = remoteConnection.secure } } /** Create an unparsed request header. Used when even Netty couldn't parse the request. */ def createUnparsedRequestHeader(requestId: Long, request: HttpRequest, _remoteAddress: InetSocketAddress, secureProtocol: Boolean) = { new RequestHeader { override def id = requestId override def tags = Map.empty[String, String] override def uri = request.getUri override lazy val path = { // The URI may be invalid, so instead, do a crude heuristic to drop the host and query string from it to get the // path, and don't decode. val withoutHost = request.getUri.dropWhile(_ != '/') val withoutQueryString = withoutHost.split('?').head if (withoutQueryString.isEmpty) "/" else withoutQueryString } override def method = request.getMethod.name() override def version = request.getProtocolVersion.text() override lazy val queryString: Map[String, Seq[String]] = { // Very rough parse of query string that doesn't decode if (request.getUri.contains("?")) { request.getUri.split("\\?", 2)(1).split('&').map { keyPair => keyPair.split("=", 2) match { case Array(key) => key -> "" case Array(key, value) => key -> value } }.groupBy(_._1).map { case (name, values) => name -> values.map(_._2).toSeq } } else { Map.empty } } override val headers = new NettyHeadersWrapper(request.headers) override def remoteAddress = _remoteAddress.getAddress.toString override def secure = secureProtocol } } /** Create the source for the request body */ def convertRequestBody(request: HttpRequest)(implicit mat: Materializer): Option[Source[ByteString, Any]] = { request match { case full: FullHttpRequest => val content = httpContentToByteString(full) if (content.isEmpty) { None } else { Some(Source.single(content)) } case streamed: StreamedHttpRequest => Some(Source.fromPublisher(SynchronousMappedStreams.map(streamed, httpContentToByteString))) } } /** Convert an HttpContent object to a ByteString */ private def httpContentToByteString(content: HttpContent): ByteString = { val builder = ByteString.newBuilder content.content().readBytes(builder.asOutputStream, content.content().readableBytes()) val bytes = builder.result() ReferenceCountUtil.release(content) bytes } /** Create a Netty response from the result */ def convertResult(result: Result, requestHeader: RequestHeader, httpVersion: HttpVersion)(implicit mat: Materializer): HttpResponse = { val responseStatus = result.header.reasonPhrase match { case Some(phrase) => new HttpResponseStatus(result.header.status, phrase) case None => HttpResponseStatus.valueOf(result.header.status) } val connectionHeader = ServerResultUtils.determineConnectionHeader(requestHeader, result) val skipEntity = requestHeader.method == HttpMethod.HEAD.name() val response: HttpResponse = result.body match { case any if skipEntity => ServerResultUtils.cancelEntity(any) new DefaultFullHttpResponse(httpVersion, responseStatus, Unpooled.EMPTY_BUFFER) case HttpEntity.Strict(data, _) => new DefaultFullHttpResponse(httpVersion, responseStatus, byteStringToByteBuf(data)) case HttpEntity.Streamed(stream, _, _) => createStreamedResponse(stream, httpVersion, responseStatus) case HttpEntity.Chunked(chunks, _) => createChunkedResponse(chunks, httpVersion, responseStatus) } // Set response headers val headers = ServerResultUtils.splitSetCookieHeaders(result.header.headers) try { headers foreach { case (name, value) => response.headers().add(name, value) } // Content type and length if (mayHaveContentLength(result.header.status)) { result.body.contentLength.foreach { contentLength => if (HttpHeaders.isContentLengthSet(response)) { logger.warn("Content-Length header was set manually in the header, ignoring manual header") } HttpHeaders.setContentLength(response, contentLength) } } result.body.contentType.foreach { contentType => if (response.headers().contains(CONTENT_TYPE)) { logger.warn(s"Content-Type set both in header (${response.headers().get(CONTENT_TYPE)}) and attached to entity ($contentType), ignoring content type from entity. To remove this warning, use Result.as(...) to set the content type, rather than setting the header manually.") } else { response.headers().add(CONTENT_TYPE, contentType) } } connectionHeader.header.foreach { headerValue => response.headers().set(CONNECTION, headerValue) } // Netty doesn't add the required Date header for us, so make sure there is one here if (!response.headers().contains(DATE)) { response.headers().add(DATE, dateHeader) } response } catch { case NonFatal(e) => if (logger.isErrorEnabled) { val prettyHeaders = headers.map { case (name, value) => s"$name -> $value" }.mkString("[", ",", "]") val msg = s"Exception occurred while setting response's headers to $prettyHeaders. Action taken is to set the response's status to ${HttpResponseStatus.INTERNAL_SERVER_ERROR} and discard all headers." logger.error(msg, e) } val response = new DefaultFullHttpResponse(httpVersion, HttpResponseStatus.INTERNAL_SERVER_ERROR, Unpooled.EMPTY_BUFFER) HttpHeaders.setContentLength(response, 0) response.headers().add(DATE, dateHeader) response.headers().add(CONNECTION, "close") response } } /** Create a Netty streamed response. */ private def createStreamedResponse(stream: Source[ByteString, _], httpVersion: HttpVersion, responseStatus: HttpResponseStatus)(implicit mat: Materializer) = { val publisher = SynchronousMappedStreams.map(stream.runWith(Sink.asPublisher(false)), byteStringToHttpContent) new DefaultStreamedHttpResponse(httpVersion, responseStatus, publisher) } /** Create a Netty chunked response. */ private def createChunkedResponse(chunks: Source[HttpChunk, _], httpVersion: HttpVersion, responseStatus: HttpResponseStatus)(implicit mat: Materializer) = { val publisher = chunks.runWith(Sink.asPublisher(false)) val httpContentPublisher = SynchronousMappedStreams.map[HttpChunk, HttpContent](publisher, { case HttpChunk.Chunk(bytes) => new DefaultHttpContent(byteStringToByteBuf(bytes)) case HttpChunk.LastChunk(trailers) => val lastChunk = new DefaultLastHttpContent() trailers.headers.foreach { case (name, value) => lastChunk.trailingHeaders().add(name, value) } lastChunk }) val response = new DefaultStreamedHttpResponse(httpVersion, responseStatus, httpContentPublisher) HttpHeaders.setTransferEncodingChunked(response) response } /** Whether the the given status may have a content length header or not. */ private def mayHaveContentLength(status: Int) = status != Status.NO_CONTENT && status != Status.NOT_MODIFIED /** Convert a ByteString to a Netty ByteBuf. */ private def byteStringToByteBuf(bytes: ByteString): ByteBuf = { if (bytes.isEmpty) { Unpooled.EMPTY_BUFFER } else { Unpooled.wrappedBuffer(bytes.asByteBuffer) } } private def byteStringToHttpContent(bytes: ByteString): HttpContent = { new DefaultHttpContent(byteStringToByteBuf(bytes)) } // cache the date header of the last response so we only need to compute it every second private var cachedDateHeader: (Long, String) = (Long.MinValue, null) private def dateHeader: String = { val currentTimeMillis = System.currentTimeMillis() val currentTimeSeconds = currentTimeMillis / 1000 cachedDateHeader match { case (cachedSeconds, dateHeaderString) if cachedSeconds == currentTimeSeconds => dateHeaderString case _ => val dateHeaderString = ResponseHeader.httpDateFormat.print(currentTimeMillis) cachedDateHeader = currentTimeSeconds -> dateHeaderString dateHeaderString } } }
bjfletcher/playframework
framework/src/play-test/src/main/scala/play/api/test/package.scala
<reponame>bjfletcher/playframework<filename>framework/src/play-test/src/main/scala/play/api/test/package.scala /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api /** * Contains test helpers. */ package object test { /** * Provided as an implicit by WithServer and WithBrowser. */ type Port = Int }
bjfletcher/playframework
framework/src/play-java/src/main/scala/play/core/ObjectMapperModule.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.core import com.fasterxml.jackson.databind.ObjectMapper import play.api._ import play.api.inject._ import play.libs.Json import javax.inject._ import scala.concurrent.Future /** * Module that injects an object mapper to the JSON library on start and on stop. * * This solves the issue of the ObjectMapper cache from holding references to the application class loader between * reloads. */ class ObjectMapperModule extends Module { def bindings(environment: Environment, configuration: Configuration) = Seq( bind[ObjectMapper].toProvider[ObjectMapperProvider].eagerly() ) } @Singleton class ObjectMapperProvider @Inject() (lifecycle: ApplicationLifecycle) extends Provider[ObjectMapper] { lazy val get = { val objectMapper = Json.newDefaultMapper() Json.setObjectMapper(objectMapper) lifecycle.addStopHook { () => Future.successful(Json.setObjectMapper(null)) } objectMapper } }
bjfletcher/playframework
documentation/manual/working/scalaGuide/main/async/code/ScalaWebSockets.scala
<filename>documentation/manual/working/scalaGuide/main/async/code/ScalaWebSockets.scala<gh_stars>0 /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package scalaguide.async.websockets import akka.stream.Materializer import akka.stream.scaladsl._ import play.api.http.websocket.{ TextMessage, Message } import play.api.mvc.WebSocket.MessageFlowTransformer import play.api.test._ import scala.concurrent.{ Future, Promise } object ScalaWebSockets extends PlaySpecification { import java.io.Closeable import play.api.mvc.{Result, WebSocket} import play.api.libs.json.Json "Scala WebSockets" should { def runWebSocket[In, Out](webSocket: WebSocket, in: Source[Message, _], expectOut: Int)(implicit mat: Materializer): Either[Result, List[Message]] = { await(webSocket(FakeRequest())).right.map { flow => // When running in the real world, if the flow cancels upstream, Play's WebSocket protocol implementation will // handle this and close the WebSocket, but here, that won't happen, so we redeem the future when we receive // enough. val promise = Promise[List[Message]]() if (expectOut == 0) promise.success(Nil) val flowResult = in via flow runWith Sink.fold[(List[Message], Int), Message]((Nil, expectOut)) { (state, out) => val (result, remaining) = state if (remaining == 1) { promise.success(result :+ out) } (result :+ out, remaining - 1) } import play.api.libs.iteratee.Execution.Implicits.trampoline await(Future.firstCompletedOf(Seq(promise.future, flowResult.map(_._1)))) } } "support actors" in { import akka.actor._ "allow creating a simple echoing actor" in new WithApplication() { runWebSocket(Samples.Controller1.socket, Source.single(TextMessage("foo")), 1) must beRight.like { case list => list must_== List(TextMessage("I received your message: foo")) } } "allow cleaning up" in new WithApplication() { val closed = Promise[Unit]() val someResource = new Closeable() { def close() = closed.success(()) } class MyActor extends Actor { def receive = PartialFunction.empty //#actor-post-stop override def postStop() = { someResource.close() } //#actor-post-stop } runWebSocket( WebSocket.acceptWithActor[String, String](req => out => Props(new MyActor)), Source.empty, 0 ) must beRight[List[Message]] await(closed.future) must_== () } "allow closing the WebSocket" in new WithApplication() { class MyActor extends Actor { def receive = PartialFunction.empty //#actor-stop import akka.actor.PoisonPill self ! PoisonPill //#actor-stop } runWebSocket( WebSocket.acceptWithActor[String, String](req => out => Props(new MyActor)), Source.maybe, 0 ) must beRight[List[Message]] } "allow rejecting the WebSocket" in new WithApplication() { runWebSocket(Samples.Controller2.socket, Source.empty, 0) must beLeft.which { result => result.header.status must_== FORBIDDEN } } "allow creating a json actor" in new WithApplication() { val json = Json.obj("foo" -> "bar") runWebSocket(Samples.Controller4.socket, Source.single(TextMessage(Json.stringify(json))), 1) must beRight.which { out => out must_== List(TextMessage(Json.stringify(json))) } } "allow creating a higher level object actor" in new WithApplication() { runWebSocket( Samples.Controller5.socket, Source.single(TextMessage(Json.stringify(Json.toJson(Samples.Controller5.InEvent("blah"))))), 1 ) must beRight.which { out => out must_== List(TextMessage(Json.stringify(Json.toJson(Samples.Controller5.OutEvent("blah"))))) } } } "support iteratees" in { "iteratee1" in new WithApplication() { runWebSocket(Samples.Controller6.socket, Source.empty, 1) must beRight.which { out => out must_== List(TextMessage("Hello!")) } } "iteratee2" in new WithApplication() { runWebSocket(Samples.Controller7.socket, Source.maybe, 1) must beRight.which { out => out must_== List(TextMessage("Hello!")) } } "iteratee3" in new WithApplication() { runWebSocket(Samples.Controller8.socket, Source.single(TextMessage("foo")), 1) must beRight.which { out => out must_== List(TextMessage("I received your message: foo")) } } } } /** * The default await timeout. Override this to change it. */ import scala.concurrent.duration._ override implicit def defaultAwaitTimeout = 2.seconds } object Samples { object Controller1 { import Actor1.MyWebSocketActor //#actor-accept import play.api.mvc._ import play.api.Play.current import play.api.Play.materializer def socket = WebSocket.acceptWithActor[String, String] { request => out => MyWebSocketActor.props(out) } //#actor-accept } object Actor1 { //#example-actor import akka.actor._ object MyWebSocketActor { def props(out: ActorRef) = Props(new MyWebSocketActor(out)) } class MyWebSocketActor(out: ActorRef) extends Actor { def receive = { case msg: String => out ! ("I received your message: " + msg) } } //#example-actor } object Controller2 extends play.api.mvc.Controller { import Actor1.MyWebSocketActor //#actor-try-accept import scala.concurrent.Future import play.api.mvc._ import play.api.Play.current import play.api.Play.materializer def socket = WebSocket.tryAcceptWithActor[String, String] { request => Future.successful(request.session.get("user") match { case None => Left(Forbidden) case Some(_) => Right(MyWebSocketActor.props) }) } //#actor-try-accept } object Controller4 { import akka.actor._ class MyWebSocketActor(out: ActorRef) extends Actor { import play.api.libs.json.JsValue def receive = { case msg: JsValue => out ! msg } } object MyWebSocketActor { def props(out: ActorRef) = Props(new MyWebSocketActor(out)) } //#actor-json import play.api.mvc._ import play.api.libs.json._ import play.api.Play.current import play.api.Play.materializer def socket = WebSocket.acceptWithActor[JsValue, JsValue] { request => out => MyWebSocketActor.props(out) } //#actor-json } object Controller5 { import akka.actor._ case class InEvent(foo: String) case class OutEvent(bar: String) class MyWebSocketActor(out: ActorRef) extends Actor { def receive = { case InEvent(foo) => out ! OutEvent(foo) } } object MyWebSocketActor { def props(out: ActorRef) = Props(new MyWebSocketActor(out)) } //#actor-json-formats import play.api.libs.json._ implicit val inEventFormat = Json.format[InEvent] implicit val outEventFormat = Json.format[OutEvent] //#actor-json-formats //#actor-json-frames import play.api.mvc.WebSocket.FrameFormatter implicit val messageFlowTransformer = MessageFlowTransformer.jsonMessageFlowTransformer[InEvent, OutEvent] //#actor-json-frames //#actor-json-in-out import play.api.mvc._ import play.api.Play.current import play.api.Play.materializer def socket = WebSocket.acceptWithActor[InEvent, OutEvent] { request => out => MyWebSocketActor.props(out) } //#actor-json-in-out } object Controller6 { //#iteratee1 import play.api.mvc._ import play.api.libs.iteratee._ import play.api.libs.concurrent.Execution.Implicits.defaultContext def socket = WebSocket.using[String] { request => // Log events to the console val in = Iteratee.foreach[String](println).map { _ => println("Disconnected") } // Send a single 'Hello!' message val out = Enumerator("Hello!") (in, out) } //#iteratee1 } object Controller7 { //#iteratee2 import play.api.mvc._ import play.api.libs.iteratee._ def socket = WebSocket.using[String] { request => // Just ignore the input val in = Iteratee.ignore[String] // Send a single 'Hello!' message and close val out = Enumerator("Hello!").andThen(Enumerator.eof) (in, out) } //#iteratee2 } object Controller8 { //#iteratee3 import play.api.mvc._ import play.api.libs.iteratee._ import play.api.libs.concurrent.Execution.Implicits.defaultContext def socket = WebSocket.using[String] { request => // Concurrent.broadcast returns (Enumerator, Concurrent.Channel) val (out, channel) = Concurrent.broadcast[String] // log the message to stdout and send response back to client val in = Iteratee.foreach[String] { msg => println(msg) // the Enumerator returned by Concurrent.broadcast subscribes to the channel and will // receive the pushed messages channel push("I received your message: " + msg) } (in,out) } //#iteratee3 } }
bjfletcher/playframework
framework/src/play-jdbc/src/main/scala/play/api/db/package.scala
<gh_stars>0 /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api /** * Contains the JDBC database access API. * * Example, retrieving a connection from the 'customers' datasource: * {{{ * val conn = DB.getConnection("customers") * }}} */ package object db { type NamedDatabase = play.db.NamedDatabase }
bjfletcher/playframework
framework/src/fork-run/src/main/scala/play/forkrun/Logger.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.forkrun import java.io.{ PrintStream, PrintWriter, StringWriter } import play.runsupport.{ Colors, LoggerProxy } object Logger { case class Level(value: Int, name: String, label: String) { override def toString = name } object Level { val Debug = Level(1, "debug", Label.debug) val Info = Level(2, "info", Label.info) val Warn = Level(3, "warn", Label.warn) val Error = Level(4, "error", Label.error) val levels = Seq(Debug, Info, Warn, Error) def apply(value: Int): Option[Level] = levels find (_.value == value) def apply(name: String): Option[Level] = levels find (_.name == name) } object Label { val debug = "[debug] " val info = "[info] " val warn = "[" + Colors.yellow("warn") + "] " val error = "[" + Colors.red("error") + "] " val success = "[" + Colors.green("success") + "] " } val NewLine = sys.props("line.separator") def apply(level: Level): Logger = new Logger(level) def apply(level: String): Logger = new Logger(Level(level).getOrElse(Level.Info)) } class Logger(out: PrintStream, logLevel: Logger.Level) extends LoggerProxy { import Logger._ def this(logLevel: Logger.Level) = this(System.out, logLevel) def level: Logger.Level = logLevel def verbose(message: => String): Unit = debug(message) def debug(message: => String): Unit = log(Level.Debug, message) def info(message: => String): Unit = log(Level.Info, message) def warn(message: => String): Unit = log(Level.Warn, message) def error(message: => String): Unit = log(Level.Error, message) def trace(t: => Throwable): Unit = { val stackTrace = new StringWriter t.printStackTrace(new PrintWriter(stackTrace)) log(Level.Error, stackTrace.toString) } def success(message: => String): Unit = printLog(Label.success, message) def log(level: Level, message: => String): Unit = { if (level.value >= logLevel.value) printLog(level.label, message) } def log(level: String, message: => String): Unit = { for (logLevel <- Logger.Level(level)) log(logLevel, message) } def printLog(label: String, message: String, separator: String = NewLine): Unit = out.synchronized { for (line <- message.split(separator)) { out.print(label) out.println(line) } } }
bjfletcher/playframework
framework/src/play/src/main/scala/play/core/parsers/FormUrlEncodedParser.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.core.parsers /** An object for parsing application/x-www-form-urlencoded data */ object FormUrlEncodedParser { /** * Parse the content type "application/x-www-form-urlencoded" which consists of a bunch of & separated key=value * pairs, both of which are URL encoded. * @param data The body content of the request, or whatever needs to be so parsed * @param encoding The character encoding of data * @return A ListMap of keys to the sequence of values for that key */ def parseNotPreservingOrder(data: String, encoding: String = "utf-8"): Map[String, Seq[String]] = { // Generate the pairs of values from the string. parseToPairs(data, encoding).groupBy(_._1).map(param => param._1 -> param._2.map(_._2)).toMap } /** * Parse the content type "application/x-www-form-urlencoded" which consists of a bunch of & separated key=value * pairs, both of which are URL encoded. We are careful in this parser to maintain the original order of the * keys by using OrderPreserving.groupBy as some applications depend on the original browser ordering. * @param data The body content of the request, or whatever needs to be so parsed * @param encoding The character encoding of data * @return A ListMap of keys to the sequence of values for that key */ def parse(data: String, encoding: String = "utf-8"): Map[String, Seq[String]] = { // Generate the pairs of values from the string. val pairs: Seq[(String, String)] = parseToPairs(data, encoding) // Group the pairs by the key (first item of the pair) being sure to preserve insertion order play.utils.OrderPreserving.groupBy(pairs)(_._1) } /** * Parse the content type "application/x-www-form-urlencoded", mapping to a Java compatible format. * @param data The body content of the request, or whatever needs to be so parsed * @param encoding The character encoding of data * @return A Map of keys to the sequence of values for that key */ def parseAsJava(data: String, encoding: String): java.util.Map[String, java.util.List[String]] = { import scala.collection.JavaConverters._ parse(data, encoding).map { case (key, values) => key -> values.asJava }.asJava } /** * Parse the content type "application/x-www-form-urlencoded", mapping to a Java compatible format. * @param data The body content of the request, or whatever needs to be so parsed * @param encoding The character encoding of data * @return A Map of keys to the sequence of array values for that key */ def parseAsJavaArrayValues(data: String, encoding: String): java.util.Map[String, Array[String]] = { import scala.collection.JavaConverters._ parse(data, encoding).map { case (key, values) => key -> values.toArray }.asJava } /** * Do the basic parsing into a sequence of key/value pairs * @param data The data to parse * @param encoding The encoding to use for interpreting the data * @return The sequence of key/value pairs */ private def parseToPairs(data: String, encoding: String): Seq[(String, String)] = { import java.net._ // Generate all the pairs, with potentially redundant key values, by parsing the body content. data.split('&').flatMap { param => if (param.contains("=") && !param.startsWith("=")) { val parts = param.split("=") val key = URLDecoder.decode(parts.head, encoding) val value = URLDecoder.decode(parts.tail.headOption.getOrElse(""), encoding) Seq(key -> value) } else { Nil } }.toSeq } }
bjfletcher/playframework
framework/src/play/src/main/scala/play/utils/Colors.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.utils object Colors { import scala.Console._ lazy val isANSISupported = { Option(System.getProperty("sbt.log.noformat")).map(_ != "true").orElse { Option(System.getProperty("os.name")) .map(_.toLowerCase(java.util.Locale.ENGLISH)) .filter(_.contains("windows")) .map(_ => false) }.getOrElse(true) } def red(str: String): String = if (isANSISupported) (RED + str + RESET) else str def blue(str: String): String = if (isANSISupported) (BLUE + str + RESET) else str def cyan(str: String): String = if (isANSISupported) (CYAN + str + RESET) else str def green(str: String): String = if (isANSISupported) (GREEN + str + RESET) else str def magenta(str: String): String = if (isANSISupported) (MAGENTA + str + RESET) else str def white(str: String): String = if (isANSISupported) (WHITE + str + RESET) else str def black(str: String): String = if (isANSISupported) (BLACK + str + RESET) else str def yellow(str: String): String = if (isANSISupported) (YELLOW + str + RESET) else str }
bjfletcher/playframework
framework/src/sbt-fork-run-plugin/src/sbt-test/fork-run/dev-mode/project/plugins.sbt
<reponame>bjfletcher/playframework // // Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> // addSbtPlugin("com.typesafe.play" % "sbt-fork-run-plugin" % playVersion) addSbtPlugin("com.typesafe.sbt" % "sbt-less" % "1.0.0") // get the play version from a system property or otherwise the run.properties file (for sbt server) def playVersion: String = { sys.props.get("project.version") orElse { val properties = new java.util.Properties sbt.IO.load(properties, file("run.properties")) Option(properties.getProperty("project.version")) } getOrElse { sys.error("No play version specified") } }
bjfletcher/playframework
framework/src/sbt-plugin/src/main/scala/play/sbt/run/PlayRun.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.sbt.run import annotation.tailrec import collection.JavaConverters._ import sbt._ import sbt.Keys._ import play.sbt._ import play.sbt.PlayImport._ import play.sbt.PlayImport.PlayKeys._ import play.sbt.PlayInternalKeys._ import play.sbt.Colors import play.core.{ Build, BuildLink, BuildDocHandler } import play.runsupport.classloader._ import play.runsupport.{ AssetsClassLoader, FileWatchService, Reloader } import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport._ import com.typesafe.sbt.packager.Keys.executableScriptName import com.typesafe.sbt.web.SbtWeb.autoImport._ /** * Provides mechanisms for running a Play application in SBT */ object PlayRun { /** * Configuration for the Play docs application's dependencies. Used to build a classloader for * that application. Hidden so that it isn't exposed when the user application is published. */ val DocsApplication = config("docs").hide val createURLClassLoader: ClassLoaderCreator = Reloader.createURLClassLoader val createDelegatedResourcesClassLoader: ClassLoaderCreator = Reloader.createDelegatedResourcesClassLoader val playDefaultRunTask = playRunTask(playRunHooks, playDependencyClasspath, playDependencyClassLoader, playReloaderClasspath, playReloaderClassLoader, playAssetsClassLoader) /** * This method is public API, used by sbt-echo, which is used by Activator: * * https://github.com/typesafehub/sbt-echo/blob/v0.1.3/play/src/main/scala-sbt-0.13/com/typesafe/sbt/echo/EchoPlaySpecific.scala#L20 * * Do not change its signature without first consulting the Activator team. Do not change its signature in a minor * release. */ def playRunTask(runHooks: TaskKey[Seq[play.sbt.PlayRunHook]], dependencyClasspath: TaskKey[Classpath], dependencyClassLoader: TaskKey[ClassLoaderCreator], reloaderClasspath: TaskKey[Classpath], reloaderClassLoader: TaskKey[ClassLoaderCreator], assetsClassLoader: TaskKey[ClassLoader => ClassLoader]): Def.Initialize[InputTask[Unit]] = Def.inputTask { val args = Def.spaceDelimited().parsed val state = Keys.state.value val scope = resolvedScoped.value.scope val interaction = playInteractionMode.value val reloadCompile = () => PlayReload.compile( () => Project.runTask(playReload in scope, state).map(_._2).get, () => Project.runTask(reloaderClasspath in scope, state).map(_._2).get, () => Project.runTask(streamsManager in scope, state).map(_._2).get.toEither.right.toOption ) val runSbtTask: String => AnyRef = (task: String) => { val parser = Act.scopedKeyParser(state) val Right(sk) = complete.DefaultParsers.result(parser, task) val result = Project.runTask(sk.asInstanceOf[Def.ScopedKey[Task[AnyRef]]], state).map(_._2) result.flatMap(_.toEither.right.toOption).orNull } lazy val devModeServer = Reloader.startDevMode( runHooks.value, (javaOptions in Runtime).value, dependencyClasspath.value.files, dependencyClassLoader.value, reloadCompile, reloaderClassLoader.value, assetsClassLoader.value, playCommonClassloader.value, playMonitoredFiles.value, fileWatchService.value, (managedClasspath in DocsApplication).value.files, playDocsJar.value, playDefaultPort.value, playDefaultAddress.value, baseDirectory.value, devSettings.value, args, runSbtTask, (mainClass in (Compile, Keys.run)).value.get ) interaction match { case nonBlocking: PlayNonBlockingInteractionMode => nonBlocking.start(devModeServer) case blocking => devModeServer println() println(Colors.green("(Server started, use Ctrl+D to stop and go back to the console...)")) println() // If we have both Watched.Configuration and Watched.ContinuousState // attributes and if Watched.ContinuousState.count is 1 then we assume // we're in ~ run mode val maybeContinuous = for { watched <- state.get(Watched.Configuration) watchState <- state.get(Watched.ContinuousState) if watchState.count == 1 } yield watched maybeContinuous match { case Some(watched) => // ~ run mode interaction doWithoutEcho { twiddleRunMonitor(watched, state, devModeServer.buildLink, Some(WatchState.empty)) } case None => // run mode interaction.waitForCancel() } devModeServer.close() println() } } /** * Monitor changes in ~run mode. */ @tailrec private def twiddleRunMonitor(watched: Watched, state: State, reloader: BuildLink, ws: Option[WatchState] = None): Unit = { val ContinuousState = AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.") def isEOF(c: Int): Boolean = c == 4 @tailrec def shouldTerminate: Boolean = (System.in.available > 0) && (isEOF(System.in.read()) || shouldTerminate) val sourcesFinder = PathFinder { watched watchPaths state } val watchState = ws.getOrElse(state get ContinuousState getOrElse WatchState.empty) val (triggered, newWatchState, newState) = try { val (triggered, newWatchState) = SourceModificationWatch.watch(sourcesFinder, watched.pollInterval, watchState)(shouldTerminate) (triggered, newWatchState, state) } catch { case e: Exception => val log = state.log log.error("Error occurred obtaining files to watch. Terminating continuous execution...") (false, watchState, state.fail) } if (triggered) { //Then launch compile Project.synchronized { val start = System.currentTimeMillis Project.runTask(compile in Compile, newState).get._2.toEither.right.map { _ => val duration = System.currentTimeMillis - start val formatted = duration match { case ms if ms < 1000 => ms + "ms" case seconds => (seconds / 1000) + "s" } println("[" + Colors.green("success") + "] Compiled in " + formatted) } } // Avoid launching too much compilation Thread.sleep(Watched.PollDelayMillis) // Call back myself twiddleRunMonitor(watched, newState, reloader, Some(newWatchState)) } else { () } } val playPrefixAndAssetsSetting = playPrefixAndAssets := { assetsPrefix.value -> (WebKeys.public in Assets).value } val playAllAssetsSetting = playAllAssets := Seq(playPrefixAndAssets.value) val playAssetsClassLoaderSetting = playAssetsClassLoader := { parent => new AssetsClassLoader(parent, playAllAssets.value) } val playTestProdCommand = Command.args("testProd", "<port>")(testProd) val playStartCommand = Command.args("start", "<port>") { (state: State, args: Seq[String]) => state.log.warn("The start command is deprecated, and will be removed in a future version of Play.") state.log.warn("To run Play in production mode, run 'stage' instead, and then execute the generated start script in target/universal/stage/bin.") state.log.warn("To test your application using production mode, run 'testProd' instead.") testProd(state, args) } private def testProd(state: State, args: Seq[String]): State = { val extracted = Project.extract(state) val interaction = extracted.get(playInteractionMode) val noExitSbt = args.contains("--no-exit-sbt") val filter = Set("--no-exit-sbt") val filtered = args.filterNot(filter) val devSettings = Seq.empty[(String, String)] // there are no dev settings in a prod website // Parse HTTP port argument val (properties, httpPort, httpsPort, httpAddress) = Reloader.filterArgs(filtered, extracted.get(playDefaultPort), extracted.get(playDefaultAddress), devSettings) require(httpPort.isDefined || httpsPort.isDefined, "You have to specify https.port when http.port is disabled") Project.runTask(stage, state).get._2.toEither match { case Left(_) => println() println("Cannot start with errors.") println() state.fail case Right(_) => val stagingBin = Some(extracted.get(stagingDirectory in Universal) / "bin" / extracted.get(executableScriptName)).map { f => if (System.getProperty("os.name").toLowerCase(java.util.Locale.ENGLISH).contains("win")) f.getAbsolutePath + ".bat" else f.getAbsolutePath }.get val javaProductionOptions = Project.runTask(javaOptions in Production, state).get._2.toEither.right.getOrElse(Seq[String]()) // Note that I'm unable to pass system properties along with properties... if I do then I receive: // java.nio.charset.IllegalCharsetNameException: "UTF-8" // Things are working without passing system properties, and I'm unsure that they need to be passed explicitly. If def main(args: Array[String]){ // problem occurs in this area then at least we know what to look at. val args = Seq(stagingBin) ++ properties.map { case (key, value) => s"-D$key=$value" } ++ javaProductionOptions ++ Seq("-Dhttp.port=" + httpPort.getOrElse("disabled")) val builder = new java.lang.ProcessBuilder(args.asJava) new Thread { override def run() { if (noExitSbt) { Process(builder).! } else { System.exit(Process(builder).!) } } }.start() println(Colors.green( """| |(Starting server. Type Ctrl+D to exit logs, the server will remain in background) | """.stripMargin)) interaction.waitForCancel() println() if (noExitSbt) { state } else { state.copy(remainingCommands = Seq.empty) } } } val playStopProdCommand = Command.args("stopProd", "") { (state: State, args: Seq[String]) => val extracted = Project.extract(state) val pidFile = extracted.get(stagingDirectory in Universal) / "RUNNING_PID" if (!pidFile.exists) { println("No PID file found. Are you sure the app is running?") } else { val pid = IO.read(pidFile) s"kill $pid".! // PID file will be deleted by a shutdown hook attached on start in ServerStart.scala println(s"Stopped application with process ID $pid") } println() if (args.contains("--no-exit-sbt")) { state } else { state.copy(remainingCommands = Seq.empty) } } }
bjfletcher/playframework
framework/src/play/src/main/scala/play/core/Execution.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.core import java.util.concurrent.ForkJoinPool import play.api.{ Application, Play } import scala.concurrent.{ ExecutionContext, ExecutionContextExecutor } /** * Provides access to Play's internal ExecutionContext. */ private[play] object Execution { def internalContext: ExecutionContextExecutor = { val appOrNull: Application = Play._currentApp appOrNull match { case null => common case app: Application => app.actorSystem.dispatcher } } object Implicits { implicit def internalContext = Execution.internalContext } /** * Use this as a fallback when the application is unavailable. * The ForkJoinPool implementation promises to create threads on-demand * and clean them up when not in use (standard is when idle for 2 * seconds). */ private val common = ExecutionContext.fromExecutor(new ForkJoinPool()) }
bjfletcher/playframework
framework/src/play-integration-test/src/test/scala/play/it/tools/HttpBin.scala
<reponame>bjfletcher/playframework<filename>framework/src/play-integration-test/src/test/scala/play/it/tools/HttpBin.scala /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.it.tools import akka.stream.Materializer import akka.stream.scaladsl.Source import play.api.libs.ws.ahc.AhcWSComponents import play.api.routing.SimpleRouter import play.api.routing.Router.Routes import play.api.routing.sird._ import play.api.{ Environment, ApplicationLoader, BuiltInComponentsFromContext } import play.api.mvc._ import play.api.mvc.Results._ import play.api.libs.json._ import play.filters.gzip.GzipFilter /** * This is a reimplementation of the excellent httpbin.org service * by <NAME> * * Motivation: We couldn't use httpbin.org directly for our CI. */ object HttpBinApplication { private val requestHeaderWriter = new Writes[RequestHeader] { def writes(r: RequestHeader): JsValue = Json.obj( "origin" -> r.remoteAddress, "url" -> "", "args" -> r.queryString.mapValues(_.head), "headers" -> r.headers.toSimpleMap ) } private def requestWriter[A] = new Writes[Request[A]] { def writes(r: Request[A]): JsValue = requestHeaderWriter.writes(r).as[JsObject] ++ Json.obj( "json" -> JsNull, "data" -> "", "form" -> JsObject(Nil) ) ++ (r.body match { // Json Body case e: JsValue => Json.obj("json" -> e) // X-WWW-Form-Encoded case f: Map[String, Seq[String]] @unchecked => Json.obj("form" -> JsObject(f.mapValues(x => JsString(x.mkString(", "))).toSeq)) // Anything else case b => Json.obj("data" -> JsString(b.toString)) }) } val getIp: Routes = { case GET(p"/ip") => Action { request => Ok(Json.obj("origin" -> request.remoteAddress)) } } val getUserAgent: Routes = { case GET(p"/user-agent") => Action { request => Ok(Json.obj("user-agent" -> request.headers.get("User-Agent"))) } } val getHeaders: Routes = { case GET(p"/headers") => Action { request => Ok(Json.obj("headers" -> request.headers.toSimpleMap)) } } val get: Routes = { case GET(p"/get") => Action { request => Ok(requestHeaderWriter.writes(request)) } } val patch: Routes = { case PATCH(p"/patch") => Action { request => Ok(requestWriter.writes(request)) } } val post: Routes = { case POST(p"/post") => Action { request => Ok(requestWriter.writes(request)) } } val put: Routes = { case PUT(p"/put") => Action { request => Ok(requestWriter.writes(request)) } } val delete: Routes = { case DELETE(p"/delete") => Action { request => Ok(requestHeaderWriter.writes(request)) } } private def gzipFilter(mat: Materializer) = new GzipFilter()(mat) def gzip(implicit mat: Materializer) = Seq("GET", "PATCH", "POST", "PUT", "DELETE").map { method => val route: Routes = { case r @ p"/gzip" if r.method == method => gzipFilter(mat)(Action { request => Ok(requestHeaderWriter.writes(request).as[JsObject] ++ Json.obj("gzipped" -> true, "method" -> method)) }) } route }.reduceLeft((a, b) => a.orElse(b)) val status: Routes = { case GET(p"/status/$status<[0-9]+>") => Action { val code = status.toInt Results.Status(code) } } val responseHeaders: Routes = { case GET(p"/response-header") => Action { request => Ok("").withHeaders(request.queryString.mapValues(_.mkString(",")).toSeq: _*) } } val redirect: Routes = { case GET(p"/redirect/0") => Action { Redirect("/get") } case GET(p"/redirect/$param<([0-9]+)>") => Action { Redirect("redirect/" + param) } } val redirectTo: Routes = { case GET(p"/redirect-to") => Action { request => request.queryString.get("url").map { u => Redirect(u.head) }.getOrElse { BadRequest("") } } } val cookies: Routes = { case GET(p"/cookies") => Action { request => Ok(Json.obj("cookies" -> JsObject(request.cookies.toSeq.map(x => x.name -> JsString(x.value))))) } } val cookiesSet: Routes = { case GET(p"/cookies/set") => Action { request => Redirect("/cookies").withCookies(request.queryString.mapValues(_.head).toSeq.map { case (k, v) => Cookie(k, v) }: _*) } } val cookiesDelete: Routes = { case GET(p"/cookies/delete") => Action { request => Redirect("/cookies").discardingCookies(request.queryString.keys.toSeq.map(DiscardingCookie(_)): _*) } } val basicAuth: Routes = { case GET(p"/basic-auth/$username/$password") => Action { request => request.headers.get("Authorization").flatMap { authorization => authorization.split(" ").drop(1).headOption.filter { encoded => new String(org.apache.commons.codec.binary.Base64.decodeBase64(encoded.getBytes)).split(":").toList match { case u :: p :: Nil if u == username && password == p => true case _ => false } }.map(_ => Ok(Json.obj("authenticated" -> true))) }.getOrElse { Unauthorized.withHeaders("WWW-Authenticate" -> """Basic realm="Secured"""") } } } val stream: Routes = { case GET(p"/stream/$param<[0-9]+>") => Action { request => val body = requestHeaderWriter.writes(request).as[JsObject] val content = 0.to(param.toInt).map { index => body ++ Json.obj("id" -> index) } Ok.chunked(Source(content)).as("application/json") } } val delay: Routes = { case GET(p"/delay/$duration<[0-9+]") => Action.async { request => import scala.concurrent.Await import scala.concurrent.Promise import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.util.Try val p = Promise[Result]() Future { Try { Await.result(p.future, Duration(duration.toLong, SECONDS)) }.getOrElse { p.success(Ok(requestWriter.writes(request))) } } p.future } } val html: Routes = { case GET(p"/html") => Action { Ok(""" <!DOCTYPE html> <html> <head> </head> <body> <h1><NAME> - Moby-Dick</h1> <div> <p> Availing himself of the mild, summer-cool weather that now reigned in these latitudes, and in preparation for the peculiarly active pursuits shortly to be anticipated, Perth, the begrimed, blistered old blacksmith, had not removed his portable forge to the hold again, after concluding his contributory work for Ahab's leg, but still retained it on deck, fast lashed to ringbolts by the foremast; being now almost incessantly invoked by the headsmen, and harpooneers, and bowsmen to do some little job for them; altering, or repairing, or new shaping their various weapons and boat furniture. Often he would be surrounded by an eager circle, all waiting to be served; holding boat-spades, pike-heads, harpoons, and lances, and jealously watching his every sooty movement, as he toiled. Nevertheless, this old man's was a patient hammer wielded by a patient arm. No murmur, no impatience, no petulance did come from him. Silent, slow, and solemn; bowing over still further his chronically broken back, he toiled away, as if toil were life itself, and the heavy beating of his hammer the heavy beating of his heart. And so it was.β€”Most miserable! A peculiar walk in this old man, a certain slight but painful appearing yawing in his gait, had at an early period of the voyage excited the curiosity of the mariners. And to the importunity of their persisted questionings he had finally given in; and so it came to pass that every one now knew the shameful story of his wretched fate. Belated, and not innocently, one bitter winter's midnight, on the road running between two country towns, the blacksmith half-stupidly felt the deadly numbness stealing over him, and sought refuge in a leaning, dilapidated barn. The issue was, the loss of the extremities of both feet. Out of this revelation, part by part, at last came out the four acts of the gladness, and the one long, and as yet uncatastrophied fifth act of the grief of his life's drama. He was an old man, who, at the age of nearly sixty, had postponedly encountered that thing in sorrow's technicals called ruin. He had been an artisan of famed excellence, and with plenty to do; owned a house and garden; embraced a youthful, daughter-like, loving wife, and three blithe, ruddy children; every Sunday went to a cheerful-looking church, planted in a grove. But one night, under cover of darkness, and further concealed in a most cunning disguisement, a desperate burglar slid into his happy home, and robbed them all of everything. And darker yet to tell, the blacksmith himself did ignorantly conduct this burglar into his family's heart. It was the Bottle Conjuror! Upon the opening of that fatal cork, forth flew the fiend, and shrivelled up his home. Now, for prudent, most wise, and economic reasons, the blacksmith's shop was in the basement of his dwelling, but with a separate entrance to it; so that always had the young and loving healthy wife listened with no unhappy nervousness, but with vigorous pleasure, to the stout ringing of her young-armed old husband's hammer; whose reverberations, muffled by passing through the floors and walls, came up to her, not unsweetly, in her nursery; and so, to stout Labor's iron lullaby, the blacksmith's infants were rocked to slumber. Oh, woe on woe! Oh, Death, why canst thou not sometimes be timely? Hadst thou taken this old blacksmith to thyself ere his full ruin came upon him, then had the young widow had a delicious grief, and her orphans a truly venerable, legendary sire to dream of in their after years; and all of them a care-killing competency. </p> </div> </body> </html>""").as("text/html") } } val robots: Routes = { case GET(p"/robots.txt") => Action { Ok("User-agent: *\nDisallow: /deny") } case GET(p"deny") => Action { Ok(""" .-''''''-. .' _ _ '. / O O \ : : | | : __ : \ .-"` `"-. / '. .' '-......-' YOU SHOUDN'T BE HERE """) } } def app = { new BuiltInComponentsFromContext(ApplicationLoader.createContext(Environment.simple())) with AhcWSComponents { def router = SimpleRouter( PartialFunction.empty .orElse(getIp) .orElse(getUserAgent) .orElse(getHeaders) .orElse(get) .orElse(patch) .orElse(post) .orElse(put) .orElse(delete) .orElse(gzip) .orElse(status) .orElse(responseHeaders) .orElse(redirect) .orElse(redirectTo) .orElse(cookies) .orElse(cookiesSet) .orElse(cookiesDelete) .orElse(basicAuth) .orElse(stream) .orElse(delay) .orElse(html) .orElse(robots) ) }.application } }
bjfletcher/playframework
framework/src/play-java/src/test/scala/play/data/Task.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.data.models import java.util.Date import scala.beans.BeanProperty import scala.annotation.meta.field class Task { type Min = play.data.validation.Constraints.Min @field type Required = play.data.validation.Constraints.Required @field type DateTime = play.data.format.Formats.DateTime @field @Min(10) @BeanProperty var id: Long = _ @Required @BeanProperty var name: String = _ @BeanProperty var done: Boolean = true @BeanProperty @Required @DateTime(pattern = "dd/MM/yyyy") var dueDate: Date = _ }
bjfletcher/playframework
framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/debug/FixInternalDebugLogging.scala
/* * * * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> * */ package play.api.libs.ws.ssl.debug import play.api.libs.ws.ssl._ import java.security.AccessController import scala.util.control.NonFatal /** * This fixes logging for the SSL Debug class. It will worth for both Java 1.6 and Java 1.7 VMs. */ object FixInternalDebugLogging { private val logger = org.slf4j.LoggerFactory.getLogger("play.api.libs.ws.ssl.debug.FixInternalDebugLogging") class MonkeyPatchInternalSslDebugAction(val newOptions: String) extends FixLoggingAction { val logger = org.slf4j.LoggerFactory.getLogger("play.api.libs.ws.ssl.debug.FixInternalDebugLogging.MonkeyPatchInternalSslDebugAction") val initialResource = foldRuntime( older = "/javax/net/ssl/SSLContext.class", // in 1.6 the JSSE classes are in rt.jar newer = "/sun/security/ssl/Debug.class" // in 1.7 the JSSE classes are in jsse.jar ) val debugClassName = foldRuntime( older = "com.sun.net.ssl.internal.ssl.Debug", newer = "sun.security.ssl.Debug" ) /** * Returns true if this class has an instance of the class returned by debugClassName, false otherwise. * * @param className the name of the class. * @return true if this class should be returned in the set of findClasses, false otherwise. */ def isValidClass(className: String): Boolean = { if (className.startsWith("com.sun.net.ssl.internal.ssl")) return true if (className.startsWith("sun.security.ssl")) return true false } /** * Returns true if newOptions is not null and newOptions is not empty. If false, then debug values * @return */ def isUsingDebug: Boolean = (newOptions != null) && (!newOptions.isEmpty) def run() { System.setProperty("javax.net.debug", newOptions) val debugType: Class[_] = Thread.currentThread().getContextClassLoader.loadClass(debugClassName) val newDebug: AnyRef = debugType.newInstance().asInstanceOf[AnyRef] logger.debug(s"run: debugType = $debugType") val debugValue = if (isUsingDebug) newDebug else null var isPatched = false for ( debugClass <- findClasses; debugField <- debugClass.getDeclaredFields ) { if (isValidField(debugField, debugType)) { logger.debug(s"run: patching $debugClass with $debugValue") monkeyPatchField(debugField, debugValue) isPatched = true } } // Add an assertion here in case the class location changes, so the tests fail... if (!isPatched) { throw new IllegalStateException("No debug classes found!") } // Switch out the args (for certpath loggers that AREN'T static and final) // This will result in those classes using the base Debug class which will write to System.out, but // I don't know how to switch out the Debug.getInstance method itself without using a java agent. val argsField = debugType.getDeclaredField("args") monkeyPatchField(argsField, newOptions) } } def apply(newOptions: String) { logger.trace(s"apply: newOptions = ${newOptions}") try { val action = new MonkeyPatchInternalSslDebugAction(newOptions) AccessController.doPrivileged(action) } catch { case NonFatal(e) => throw new IllegalStateException("InternalDebug configuration error", e) } } }
bjfletcher/playframework
framework/src/play-integration-test/src/test/scala/play/it/bindings/GlobalSettingsSpec.scala
<reponame>bjfletcher/playframework /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.it.bindings import java.lang.reflect.Method import java.util.concurrent.CompletableFuture import play.api.inject.guice.GuiceApplicationBuilder import play.api.routing.Router import play.api.Application import play.api.mvc._ import play.api.mvc.Results._ import play.api.test._ import play.it._ import play.it.http.{ MockController, JAction } import play.mvc.Http import play.mvc.Http.Context object NettyGlobalSettingsSpec extends GlobalSettingsSpec with NettyIntegrationSpecification trait GlobalSettingsSpec extends PlaySpecification with WsTestClient with ServerIntegrationSpecification { sequential def withServer[T](applicationGlobal: Option[String])(uri: String)(block: String => T) = { implicit val port = testServerPort val additionalSettings = applicationGlobal.fold(Map.empty[String, String]) { s: String => Map("application.global" -> s"play.it.bindings.$s") } import play.api.inject._ import play.api.routing.sird._ lazy val app: Application = new GuiceApplicationBuilder() .configure(additionalSettings) .overrides(bind[Router].to(Router.from { case p"/scala" => Action { request => Ok(request.headers.get("X-Foo").getOrElse("null")) } case p"/java" => JAction(app, JavaAction) })).build() running(TestServer(port, app)) { val response = await(wsUrl(uri).get()) block(response.body) } } "GlobalSettings filters" should { "not have X-Foo header when no Global is configured" in withServer(None)("/scala") { body => body must_== "null" } "have X-Foo header when Scala Global with filters is configured" in withServer(Some("FooFilteringScalaGlobal"))("/scala") { body => body must_== "filter-constructor-called-by-scala-global" } "have X-Foo header when Java Global with filters is configured" in withServer(Some("FooFilteringJavaGlobal"))("/scala") { body => body must_== "filter-default-constructor" } "allow intercepting by Java GlobalSettings.onRequest" in withServer(Some("OnRequestJavaGlobal"))("/java") { body => body must_== "intercepted" } } } /** Inserts an X-Foo header with a custom value. */ class FooFilter(headerValue: String) extends EssentialFilter { def this() = this("filter-default-constructor") def apply(next: EssentialAction) = EssentialAction { request => val fooBarHeaders = request.copy(headers = request.headers.add("X-Foo" -> headerValue)) next(fooBarHeaders) } } /** Scala GlobalSettings object that uses a filter */ object FooFilteringScalaGlobal extends play.api.GlobalSettings { override def doFilter(next: EssentialAction): EssentialAction = { Filters(super.doFilter(next), new FooFilter("filter-constructor-called-by-scala-global")) } } /** Java GlobalSettings class that uses a filter */ class FooFilteringJavaGlobal extends play.GlobalSettings { override def filters[T]() = Array[Class[T]](classOf[FooFilter].asInstanceOf[Class[T]]) } class OnRequestJavaGlobal extends play.GlobalSettings { override def onRequest(request: Http.Request, actionMethod: Method) = { new play.mvc.Action.Simple { def call(ctx: Context) = CompletableFuture.completedFuture(play.mvc.Results.ok("intercepted")) } } } object JavaAction extends MockController { def action = play.mvc.Results.ok(Option(request.getHeader("X-Foo")).getOrElse("null")) }
bjfletcher/playframework
documentation/manual/working/javaGuide/main/forms/code/javaguide/forms/html/routes/package.scala
<filename>documentation/manual/working/javaGuide/main/forms/code/javaguide/forms/html/routes/package.scala /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package javaguide.forms.html package object routes { val Application = javaguide.forms.controllers.routes.Application }
bjfletcher/playframework
framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/AlgorithmsSpec.scala
<filename>framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/AlgorithmsSpec.scala /* * * * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> * */ package play.api.libs.ws.ssl import org.specs2.mutable._ import java.security.{ SecureRandom, KeyPairGenerator } import org.joda.time.Instant import play.core.server.ssl.CertificateGenerator import sun.security.x509.AlgorithmId object AlgorithmsSpec extends Specification { import Algorithms._ "keySize" should { "show a keysize of 1024 for RSA" in { val dn = "cn=Common Name, ou=eng ineering, o=company, c=US" val from = Instant.now val to = from.plus(5000000) // Use RSA with a SHA1 certificate signing algoirthm. val keyGen = KeyPairGenerator.getInstance("RSA") keyGen.initialize(1024, new SecureRandom()) val pair = keyGen.generateKeyPair() val cert = CertificateGenerator.generateCertificate(dn, pair, from.toDate, to.toDate, "SHA1WithRSA", AlgorithmId.sha1WithRSAEncryption_oid) // RSA is getModulus.bitLength keySize(cert.getPublicKey) must_== Some(1024) } "show a keysize of 1024 for DSA" in { val dn = "cn=Common Name, ou=engineering, o=company, c=US" val from = Instant.now val to = from.plus(5000000) // Use RSA with a DSA certificate signing algoirthm. val keyGen = KeyPairGenerator.getInstance("DSA") keyGen.initialize(1024, new SecureRandom()) val pair = keyGen.generateKeyPair() val cert = CertificateGenerator.generateCertificate(dn, pair, from.toDate, to.toDate, "SHA1WithDSA", AlgorithmId.sha1WithDSA_oid) // DSA is getP.bitLength keySize(cert.getPublicKey) must_== Some(1024) } } "decompose" should { "decompose MD5" in { decomposes("MD5WithRSA") must containTheSameElementsAs(Seq("MD5", "RSA")) } "decompose MD2" in { decomposes("MD2WithRSA") must containTheSameElementsAs(Seq("MD2", "RSA")) } "decompose SHA1" in { decomposes("SHA1WithRSA") must containTheSameElementsAs(Seq("SHA1", "SHA-1", "RSA")) } "map SHA-1 to SHA1" in { decomposes("SHA-1WithRSA") must containTheSameElementsAs(Seq("SHA1", "SHA-1", "RSA")) } "decompose SHA256" in { decomposes("SHA256WithRSA") must containTheSameElementsAs(Seq("SHA256", "RSA")) } } }
bjfletcher/playframework
documentation/manual/working/scalaGuide/main/tests/code/specs2/ExamplePlaySpecificationSpec.scala
<gh_stars>0 /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package scalaguide.tests.specs2 import play.api.test._ // #scalafunctionaltest-playspecification object ExamplePlaySpecificationSpec extends PlaySpecification { "The specification" should { "have access to HeaderNames" in { USER_AGENT must be_===("User-Agent") } "have access to Status" in { OK must be_===(200) } } } // #scalafunctionaltest-playspecification
bjfletcher/playframework
framework/src/sbt-plugin/src/main/scala/play/sbt/routes/RoutesCompiler.scala
<reponame>bjfletcher/playframework /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.sbt.routes import play.core.PlayVersion import play.routes.compiler.{ RoutesGenerator, RoutesCompilationError } import play.routes.compiler.RoutesCompiler.{ RoutesCompilerTask, GeneratedSource } import sbt._ import sbt.Keys._ import com.typesafe.sbt.web.incremental._ import play.api.PlayException import sbt.plugins.JvmPlugin import xsbti.Position import scala.language.implicitConversions object RoutesKeys { val routesCompilerTasks = TaskKey[Seq[RoutesCompilerTask]]("playRoutesTasks", "The routes files to compile") val routes = TaskKey[Seq[File]]("playRoutes", "Compile the routes files") val routesImport = SettingKey[Seq[String]]("playRoutesImports", "Imports for the router") val routesGenerator = SettingKey[RoutesGenerator]("playRoutesGenerator", "The routes generator") val generateReverseRouter = SettingKey[Boolean]("playGenerateReverseRouter", "Whether the reverse router should be generated. Setting to false may reduce compile times if it's not needed.") val namespaceReverseRouter = SettingKey[Boolean]("playNamespaceReverseRouter", "Whether the reverse router should be namespaced. Useful if you have many routers that use the same actions.") /** * This class is used to avoid infinite recursions when configuring aggregateReverseRoutes, since it makes the * ProjectReference a thunk. */ class LazyProjectReference(ref: => ProjectReference) { def project: ProjectReference = ref } object LazyProjectReference { implicit def fromProjectReference(ref: => ProjectReference): LazyProjectReference = new LazyProjectReference(ref) implicit def fromProject(project: => Project): LazyProjectReference = new LazyProjectReference(project) } val aggregateReverseRoutes = SettingKey[Seq[LazyProjectReference]]("playAggregateReverseRoutes", "A list of projects that reverse routes should be aggregated from.") val InjectedRoutesGenerator = play.routes.compiler.InjectedRoutesGenerator val StaticRoutesGenerator = play.routes.compiler.StaticRoutesGenerator } object RoutesCompiler extends AutoPlugin { import RoutesKeys._ override def trigger = noTrigger override def requires = JvmPlugin val autoImport = RoutesKeys override def projectSettings = defaultSettings ++ inConfig(Compile)(routesSettings) ++ inConfig(Test)(routesSettings) def routesSettings = Seq( sources in routes := Nil, routesCompilerTasks <<= Def.taskDyn { // Aggregate all the routes file tasks that we want to compile the reverse routers for. aggregateReverseRoutes.value.map { agg => routesCompilerTasks in (agg.project, configuration.value) }.join.map { aggTasks: Seq[Seq[RoutesCompilerTask]] => // Aggregated tasks need to have forwards router compilation disabled and reverse router compilation enabled. val reverseRouterTasks = aggTasks.flatten.map { task => task.copy(forwardsRouter = false, reverseRouter = true) } // Find the routes compile tasks for this project val thisProjectTasks = (sources in routes).value.map { file => RoutesCompilerTask(file, routesImport.value, forwardsRouter = true, reverseRouter = generateReverseRouter.value, namespaceReverseRouter = namespaceReverseRouter.value) } thisProjectTasks ++ reverseRouterTasks } }, watchSources in Defaults.ConfigGlobal <++= sources in routes, target in routes := crossTarget.value / "routes" / Defaults.nameForSrc(configuration.value.name), routes <<= compileRoutesFiles, sourceGenerators <+= routes, managedSourceDirectories <+= target in routes ) def defaultSettings = Seq( routesImport := Nil, aggregateReverseRoutes := Nil, // Generate reverse router defaults to true if this project is not aggregated by any of the projects it depends on // aggregateReverseRoutes projects. Otherwise, it will be false, since another project will be generating the // reverse router for it. generateReverseRouter <<= Def.settingDyn { val projectRef = thisProjectRef.value val dependencies = buildDependencies.value.classpathTransitiveRefs(projectRef) // Go through each dependency of this project dependencies.map { dep => // Get the aggregated reverse routes projects for the dependency, if defined Def.optional(aggregateReverseRoutes in dep)(_.map(_.map(_.project)).getOrElse(Nil)) }.join.apply { aggregated: Seq[Seq[ProjectReference]] => val localProject = LocalProject(projectRef.project) // Return false if this project is aggregated by one of our dependencies !aggregated.flatten.contains(localProject) } }, namespaceReverseRouter := false, routesGenerator := InjectedRoutesGenerator, // changed from StaticRoutesGenerator in 2.5.0 sourcePositionMappers += routesPositionMapper ) private val compileRoutesFiles = Def.task[Seq[File]] { compileRoutes(routesCompilerTasks.value, routesGenerator.value, (target in routes).value, streams.value.cacheDirectory, state.value.log) } def compileRoutes(tasks: Seq[RoutesCompilerTask], generator: RoutesGenerator, generatedDir: File, cacheDirectory: File, log: Logger): Seq[File] = { val ops = tasks.map(task => RoutesCompilerOp(task, generator.id, PlayVersion.current)) val (products, errors) = syncIncremental(cacheDirectory, ops) { opsToRun: Seq[RoutesCompilerOp] => val results = opsToRun.map { op => op -> play.routes.compiler.RoutesCompiler.compile(op.task, generator, generatedDir) } val opResults = results.map { case (op, Right(inputs)) => op -> OpSuccess(Set(op.task.file), inputs.toSet) case (op, Left(_)) => op -> OpFailure }.toMap val errors = results.collect { case (_, Left(e)) => e }.flatten (opResults, errors) } if (errors.nonEmpty) { val exceptions = errors.map { case RoutesCompilationError(source, message, line, column) => reportCompilationError(log, RoutesCompilationException(source, message, line, column.map(_ - 1))) } throw exceptions.head } products.to[Seq] } private def reportCompilationError(log: Logger, error: PlayException.ExceptionSource) = { // log the source file and line number with the error message log.error(Option(error.sourceName).getOrElse("") + Option(error.line).map(":" + _).getOrElse("") + ": " + error.getMessage) Option(error.interestingLines(0)).map(_.focus).flatMap(_.headOption) map { line => // log the line log.error(line) Option(error.position).map { pos => // print a carat under the offending character val spaces = (line: Seq[Char]).take(pos).map { case '\t' => '\t' case x => ' ' } log.error(spaces.mkString + "^") } } error } val routesPositionMapper: Position => Option[Position] = position => { position.sourceFile collect { case GeneratedSource(generatedSource) => { new xsbti.Position { lazy val line = { position.line.flatMap(l => generatedSource.mapLine(l.asInstanceOf[Int])).map(l => xsbti.Maybe.just(l.asInstanceOf[java.lang.Integer])).getOrElse(xsbti.Maybe.nothing[java.lang.Integer]) } lazy val lineContent = { line flatMap { lineNo => sourceFile.flatMap { file => IO.read(file).split('\n').lift(lineNo - 1) } } getOrElse "" } val offset = xsbti.Maybe.nothing[java.lang.Integer] val pointer = xsbti.Maybe.nothing[java.lang.Integer] val pointerSpace = xsbti.Maybe.nothing[String] val sourceFile = xsbti.Maybe.just(generatedSource.source.get) val sourcePath = xsbti.Maybe.just(sourceFile.get.getCanonicalPath) } } } } } private case class RoutesCompilerOp(task: RoutesCompilerTask, generatorId: String, playVersion: String) case class RoutesCompilationException(source: File, message: String, atLine: Option[Int], column: Option[Int]) extends PlayException.ExceptionSource( "Compilation error", message) with FeedbackProvidedException { def line = atLine.map(_.asInstanceOf[java.lang.Integer]).orNull def position = column.map(_.asInstanceOf[java.lang.Integer]).orNull def input = IO.read(source) def sourceName = source.getAbsolutePath }
bjfletcher/playframework
framework/src/play/src/test/scala/play/libs/FSpec.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.libs import java.util.Arrays import java.util.concurrent.{ LinkedBlockingQueue, TimeoutException } import java.util.concurrent.TimeUnit.{ MILLISECONDS, SECONDS } import org.specs2.mutable._ import play.api.libs.iteratee.ExecutionSpecification import scala.collection.JavaConverters import scala.concurrent.{ Future, Promise } import java.util.function.{ Consumer, Function, Predicate, Supplier } object FSpec extends Specification with ExecutionSpecification { sequential "An F.Promise" should { "wrap a Scala Future" in { val f = Promise.successful(1).future val fp = F.Promise.wrap(f) fp.wrapped() must equalTo(f) } "yield its value" in { val fp = F.Promise.pure(1) fp.get(5000) must equalTo(1) fp.get(5, SECONDS) must equalTo(1) } "throw its exception" in { val e = new RuntimeException("x") val fp = F.Promise.throwing[Int](e) fp.get(5000) must throwA(e) fp.get(5, SECONDS) must throwA(e) fp.get(5, SECONDS) must throwA(e) } "be able to be created from a function (with default ExecutionContext)" in { F.Promise.promise(new Supplier[Int] { def get() = 1 }).get(5, SECONDS) must equalTo(1) } "be able to be created from a function (with explicit ExecutionContext)" in { mustExecute(1) { ec => F.Promise.promise(new Supplier[Int] { def get() = 1 }, ec).get(5, SECONDS) must equalTo(1) } } "be able to be created after a delay (with default ExecutionContext)" in { F.Promise.delayed(new Supplier[Int] { def get() = 1 }, 1, MILLISECONDS).get(5, SECONDS) must equalTo(1) } "be able to be created after a delay (with explicit ExecutionContext)" in { mustExecute(1) { ec => F.Promise.delayed(new Supplier[Int] { def get() = 1 }, 1, MILLISECONDS, ec).get(5, SECONDS) must equalTo(1) } } "redeem with its value (with default ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) val invocations = new LinkedBlockingQueue[Int]() fp.onRedeem(new Consumer[Int] { def accept(x: Int) { invocations.offer(x) } }) p.success(99) invocations.poll(5, SECONDS) must equalTo(99) } "redeem with its value (with explicit ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) mustExecute(1) { ec => val invocations = new LinkedBlockingQueue[Int]() fp.onRedeem(new Consumer[Int] { def accept(x: Int) { invocations.offer(x) } }, ec) p.success(99) invocations.poll(5, SECONDS) must equalTo(99) } } "map its value (with default ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) val mapped = fp.map(new Function[Int, Int] { def apply(x: Int) = 2 * x }) p.success(111) mapped.get(5, SECONDS) must equalTo(222) } "map its value (with explicit ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) mustExecute(1) { ec => val mapped = fp.map(new Function[Int, Int] { def apply(x: Int) = 2 * x }, ec) p.success(111) mapped.get(5, SECONDS) must equalTo(222) } } "recover from a thrown exception (with default ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) val recovered = fp.recover(new Function[Throwable, Int] { def apply(x: Throwable): Int = 99 }) p.failure(new RuntimeException("x")) recovered.get(5, SECONDS) must equalTo(99) } "recover from a thrown exception (with explicit ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) mustExecute(1) { ec => val recovered = fp.recover(new Function[Throwable, Int] { def apply(x: Throwable): Int = 99 }, ec) p.failure(new RuntimeException("x")) recovered.get(5, SECONDS) must equalTo(99) } } "recoverWith from a thrown exception (with default ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) val recovered = fp.recoverWith(new Function[Throwable, F.Promise[Int]] { def apply(x: Throwable) = F.Promise.pure(99) }) p.failure(new RuntimeException("x")) recovered.get(5, SECONDS) must equalTo(99) } "recoverWith from a thrown exception (with explicit ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) mustExecute(1) { ec => val recovered = fp.recoverWith(new Function[Throwable, F.Promise[Int]] { def apply(x: Throwable) = F.Promise.pure(99) }, ec) p.failure(new RuntimeException("x")) recovered.get(5, SECONDS) must equalTo(99) } } "fallbackTo another promise" in { val p1 = F.Promise.throwing[Int](new RuntimeException("x")) val p2 = p1.fallbackTo(F.Promise.pure(42)) p2.get(5, SECONDS) must equalTo(42) } "don't fallbackTo on success" in { val p1 = F.Promise.pure(1) val p2 = p1.fallbackTo(F.Promise.pure(2)) p2.get(5, SECONDS) must equalTo(1) } "keep first failure when fallbackTo also fails" in { val p1 = F.Promise.throwing[Int](new RuntimeException("1")) val p2 = p1.fallbackTo(F.Promise.throwing[Int](new RuntimeException("2"))) p2.get(5, SECONDS) must throwA[RuntimeException]("1") } "flatMap its value (with default ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) val flatMapped = fp.flatMap(new Function[Int, F.Promise[Int]] { def apply(x: Int) = F.Promise.wrap(Future.successful(2 * x)) }) p.success(111) flatMapped.get(5, SECONDS) must equalTo(222) } "flatMap its value (with explicit ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) mustExecute(1) { ec => val flatMapped = fp.flatMap(new Function[Int, F.Promise[Int]] { def apply(x: Int) = F.Promise.wrap(Future.successful(2 * x)) }, ec) p.success(111) flatMapped.get(5, SECONDS) must equalTo(222) } } "filter its value (with default ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) val filtered = fp.filter(new Predicate[Int] { def test(x: Int) = x > 0 }) p.success(1) filtered.get(5, SECONDS) must equalTo(1) } "filter its value (with explicit ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) mustExecute(1) { ec => val filtered = fp.filter(new Predicate[Int] { def test(x: Int) = x > 0 }, ec) p.success(1) filtered.get(5, SECONDS) must equalTo(1) } } "filter to failure (with default ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) val filtered = fp.filter(new Predicate[Int] { def test(x: Int) = x > 0 }) p.success(-1) filtered.get(5, SECONDS) must throwA[NoSuchElementException] } "filter to failure (with explicit ExecutionContext)" in { val p = Promise[Int]() val fp = F.Promise.wrap(p.future) mustExecute(1) { ec => val filtered = fp.filter(new Predicate[Int] { def test(x: Int) = x > 0 }, ec) p.success(-1) filtered.get(5, SECONDS) must throwA[NoSuchElementException] } } "transform its successful value (with default ExecutionContext)" in { val p = F.Promise.pure(1) val mapped = p.transform( new Function[Int, Int] { def apply(x: Int) = 2 * x }, new Function[Throwable, Throwable] { def apply(t: Throwable) = t } ) mapped.get(5, SECONDS) must equalTo(2) } "transform its successful value (with explicit ExecutionContext)" in { val p = F.Promise.pure(1) mustExecute(1) { ec => val mapped = p.transform( new Function[Int, Int] { def apply(x: Int) = 2 * x }, new Function[Throwable, Throwable] { def apply(t: Throwable) = t }, ec ) mapped.get(5, SECONDS) must equalTo(2) } } "transform its failed throwable (with default ExecutionContext)" in { val p = F.Promise.throwing[Int](new RuntimeException("1")) val mapped = p.transform( new Function[Int, Int] { def apply(x: Int) = x }, new Function[Throwable, Throwable] { def apply(t: Throwable) = new RuntimeException("2") } ) mapped.get(5, SECONDS) must throwA[RuntimeException]("2") } "transform its failed throwable (with explicit ExecutionContext)" in { val p = F.Promise.throwing[Int](new RuntimeException("1")) mustExecute(1) { ec => val mapped = p.transform( new Function[Int, Int] { def apply(x: Int) = x }, new Function[Throwable, Throwable] { def apply(t: Throwable) = new RuntimeException("2") }, ec ) mapped.get(5, SECONDS) must throwA[RuntimeException]("2") } } "yield a timeout value" in { F.Promise.timeout(1, 2).get(1, SECONDS) must equalTo(1) F.Promise.timeout(1, 2, MILLISECONDS).get(1, SECONDS) must equalTo(1) } "throw a promise timeout exception" in { //F.Promise.timeout().get(15, SECONDS) must throwA[TimeoutException] // Too slow to run for normal testing F.Promise.timeout(2).get(1, SECONDS) must throwA[F.PromiseTimeoutException] F.Promise.timeout(2, MILLISECONDS).get(1, SECONDS) must throwA[F.PromiseTimeoutException] } "combine a sequence of promises from a vararg" in { mustExecute(3) { ec => import F.Promise.pure F.Promise.sequence[Int](ec, pure(1), pure(2), pure(3)).get(5, SECONDS) must equalTo(Arrays.asList(1, 2, 3)) } } "combine a sequence of promises from an iterable" in { mustExecute(3) { ec => import F.Promise.pure F.Promise.sequence[Int](Arrays.asList(pure(1), pure(2), pure(3)), ec).get(5, SECONDS) must equalTo(Arrays.asList(1, 2, 3)) } } "zip with another promise" in { val pa = F.Promise.pure(1) val pb = F.Promise.pure("hello") val tup = pa.zip(pb).get(1, SECONDS) tup._1 must equalTo(1) tup._2 must equalTo("hello") } def orDriver(): (Promise[Int], Promise[String], F.Promise[F.Either[Int, String]]) = { val pl = Promise[Int]() val pr = Promise[String]() val por = F.Promise.wrap(pl.future).or(F.Promise.wrap(pr.future)) (pl, pr, por) } "combine with another promise with 'or'" in { val (pl, pr, por) = orDriver() por.wrapped.isCompleted must beFalse pl.success(1) val result = por.get(1, SECONDS) result.left.get must equalTo(1) result.right.isPresent must beFalse } "combine with another promise with 'or'" in { val (pl, pr, por) = orDriver() por.wrapped.isCompleted must beFalse pr.success("x") val result = por.get(1, SECONDS) result.left.isPresent must beFalse result.right.get must equalTo("x") } } }
bjfletcher/playframework
documentation/manual/working/scalaGuide/main/json/code/ScalaJsonAutomatedSpec.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package scalaguide.json import play.api.data.validation.ValidationError import play.api.libs.json.Json import org.junit.runner.RunWith import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner @RunWith(classOf[JUnitRunner]) class ScalaJsonAutomatedSpec extends Specification { //#model case class Resident(name: String, age: Int, role: Option[String]) //#model val sampleJson = Json.parse( """{ "name" : "Fiver", "age" : 4 }""" ) val sampleData = Resident("Fiver", 4, None) "Scala JSON automated" should { "produce a working Reads" in { //#auto-reads import play.api.libs.json._ implicit val residentReads = Json.reads[Resident] //#auto-reads sampleJson.as[Resident] must_=== sampleData } "do the same thing as a manual Reads" in { //#manual-reads import play.api.libs.json._ import play.api.libs.functional.syntax._ implicit val residentReads = ( (__ \ "name").read[String] and (__ \ "age").read[Int] and (__ \ "role").readNullable[String] )(Resident) //#manual-reads sampleJson.as[Resident] must_=== sampleData } "produce a working Writes" in { //#auto-writes import play.api.libs.json._ implicit val residentWrites = Json.writes[Resident] //#auto-writes Json.toJson(sampleData) must_=== sampleJson } "produce a working Format" in { //#auto-format import play.api.libs.json._ implicit val residentFormat = Json.format[Resident] //#auto-format sampleJson.as[Resident] must_=== sampleData Json.toJson(sampleData) must_=== sampleJson } } }
bjfletcher/playframework
documentation/manual/working/javaGuide/main/ws/code/javaguide/ws/JavaWSSpec.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package javaguide.ws import org.specs2.mutable._ import play.api.test._ import play.api.mvc._ import play.api.libs.json._ import play.test.Helpers._ import play.api.test.FakeApplication import play.api.libs.json.JsObject import javaguide.testhelpers.MockJavaActionHelper import play.api.http.Status object JavaWSSpec extends Specification with Results with Status { // It's much easier to test this in Scala because we need to set up a // fake application with routes. def fakeApplication = FakeApplication(withRoutes = { case ("GET", "/feed") => Action { val obj: JsObject = Json.obj( "title" -> "foo", "commentsUrl" -> "http://localhost:3333/comments" ) Ok(obj) } case ("GET", "/comments") => Action { val obj: JsObject = Json.obj( "count" -> "10" ) Ok(obj) } case (_, _) => Action { BadRequest("no binding found") } }) "The Java WS class" should { "call WS correctly" in new WithServer(app = fakeApplication, port = 3333) { val result = MockJavaActionHelper.call(app.injector.instanceOf[JavaWS.Controller1], fakeRequest()) result.status() must equalTo(OK) } "compose WS calls successfully" in new WithServer(app = fakeApplication, port = 3333) { val result = MockJavaActionHelper.call(app.injector.instanceOf[JavaWS.Controller2], fakeRequest()) result.status() must equalTo(OK) contentAsString(result) must beEqualTo("Number of comments: 10") } } }
bjfletcher/playframework
framework/src/play-ws/src/main/scala/play/api/libs/ws/ahc/Streamed.scala
package play.api.libs.ws.ahc import akka.stream.scaladsl.Source import akka.util.ByteString import org.asynchttpclient.AsyncHandler.State import org.asynchttpclient.{ AsyncHttpClient, HttpResponseBodyPart, HttpResponseHeaders, HttpResponseStatus, Request } import org.asynchttpclient.handler.StreamedAsyncHandler import org.reactivestreams.{ Publisher, Subscriber, Subscription } import play.api.libs.iteratee.Enumerator import play.api.libs.streams.Streams import play.api.libs.ws.{ DefaultWSResponseHeaders, StreamedResponse, WSResponseHeaders } import scala.concurrent.{ Future, Promise } private[play] object Streamed { def execute(client: AsyncHttpClient, request: Request): Future[StreamedResponse] = { val promise = Promise[(WSResponseHeaders, Publisher[HttpResponseBodyPart])]() client.executeRequest(request, new DefaultStreamedAsyncHandler(promise)) import play.api.libs.iteratee.Execution.Implicits.trampoline promise.future.map { case (headers, publisher) => // this transformation is not part of `DefaultStreamedAsyncHandler.onCompleted` because // a reactive-streams `Publisher` needs to be returned to implement `execute2`. Though, // once `execute2` is removed, we should move the code here inside // `DefaultStreamedAsyncHandler.onCompleted`. val source = Source.fromPublisher(publisher).map(bodyPart => ByteString(bodyPart.getBodyPartBytes)) StreamedResponse(headers, source) } } // This method was introduced because in Play we have utilities that makes it easy to convert a `Publisher` into an `Enumerator`, // while it's not as easy to convert an akka-stream Source to a reactive-streams `Publisher` (as it requires materialization of // the stream). This is why `DefaultStreamedAsyncHandler`'s constructor takes a `Promise[(WSResponseHeaders, Publisher[HttpResponseBodyPart])]` // and not a `Promise[(WSResponseHeaders, Source[ByteString])]`. In fact, the moment this method is removed, we should refactor the // `DefaultStreamedAsyncHandler`' constructor parameter's type to the latter. // This method is `deprecated` because we should remember to remove it together with `AhcWSRequest.streamWithEnumerator`. @deprecated("2.5", "Use `execute()` instead.") def execute2(client: AsyncHttpClient, request: Request): Future[(WSResponseHeaders, Enumerator[Array[Byte]])] = { val promise = Promise[(WSResponseHeaders, Publisher[HttpResponseBodyPart])]() client.executeRequest(request, new DefaultStreamedAsyncHandler(promise)) import play.api.libs.iteratee.Execution.Implicits.trampoline promise.future.map { case (headers, publisher) => val enumerator = Streams.publisherToEnumerator(publisher).map(_.getBodyPartBytes) (headers, enumerator) } } private class DefaultStreamedAsyncHandler(promise: Promise[(WSResponseHeaders, Publisher[HttpResponseBodyPart])]) extends StreamedAsyncHandler[Unit] { private var statusCode: Int = _ private var responseHeaders: WSResponseHeaders = _ private var publisher: Publisher[HttpResponseBodyPart] = _ def onStream(publisher: Publisher[HttpResponseBodyPart]): State = { if (this.publisher != null) State.ABORT else { this.publisher = publisher promise.success((responseHeaders, publisher)) State.CONTINUE } } override def onStatusReceived(status: HttpResponseStatus): State = { if (this.publisher != null) State.ABORT else { statusCode = status.getStatusCode State.CONTINUE } } override def onHeadersReceived(h: HttpResponseHeaders): State = { if (this.publisher != null) State.ABORT else { val headers = h.getHeaders responseHeaders = DefaultWSResponseHeaders(statusCode, AhcWSRequest.ahcHeadersToMap(headers)) State.CONTINUE } } override def onBodyPartReceived(bodyPart: HttpResponseBodyPart): State = throw new IllegalStateException("Should not have received body part") override def onCompleted(): Unit = { // EmptyPublisher can be replaces with `Source.empty` when we carry out the refactoring // mentioned in the `execute2` method. promise.trySuccess((responseHeaders, EmptyPublisher)) } override def onThrowable(t: Throwable): Unit = promise.tryFailure(t) } private case object EmptyPublisher extends Publisher[HttpResponseBodyPart] { def subscribe(s: Subscriber[_ >: HttpResponseBodyPart]): Unit = { if (s eq null) throw new NullPointerException("Subscriber must not be null, rule 1.9") s.onSubscribe(CancelledSubscription) s.onComplete() } private case object CancelledSubscription extends Subscription { override def request(elements: Long): Unit = () override def cancel(): Unit = () } } }
bjfletcher/playframework
documentation/manual/working/scalaGuide/main/tests/code-scalatestplus-play/allbrowserspersuite/ExampleSpec.scala
<reponame>bjfletcher/playframework<filename>documentation/manual/working/scalaGuide/main/tests/code-scalatestplus-play/allbrowserspersuite/ExampleSpec.scala /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package scalaguide.tests.scalatest.allbrowserspersuite import play.api.test._ import org.scalatest._ import org.scalatestplus.play._ import play.api.test.Helpers._ import play.api.libs.ws._ import play.api.mvc._ import Results._ // #scalafunctionaltest-allbrowserspersuite class ExampleSpec extends PlaySpec with OneServerPerSuite with AllBrowsersPerSuite { // Override app if you need a FakeApplication with other than // default parameters. implicit override lazy val app: FakeApplication = FakeApplication( additionalConfiguration = Map("ehcacheplugin" -> "disabled"), withRoutes = { case ("GET", "/testing") => Action( Results.Ok( "<html>" + "<head><title>Test Page</title></head>" + "<body>" + "<input type='button' name='b' value='Click Me' onclick='document.title=\"scalatest\"' />" + "</body>" + "</html>" ).as("text/html") ) } ) def sharedTests(browser: BrowserInfo) = { "The AllBrowsersPerSuite trait" must { "provide a web driver " + browser.name in { go to (s"http://localhost:$port/testing") pageTitle mustBe "Test Page" click on find(name("b")).value eventually { pageTitle mustBe "scalatest" } } } } } // #scalafunctionaltest-allbrowserspersuite
bjfletcher/playframework
framework/src/play/src/main/scala/play/utils/InlineCache.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.utils import java.lang.ref.WeakReference /** * Creates a wrapper for a function that uses an inline cache to * optimize calls to the function. The function's result for a * a *single* input will be cached. If the input changes, the * function will be called again and its new output will be cached. * * Even though this function only caches a single value, it can * be useful for functions where the input only changes occasionally. * Because it only caches a single value, the cached value can * be accessed very quickly. * * This class will improve performance when the function input changes * infrequently and the function is somewhat expensive to call. * Even when the input changes, this class will still function * correctly and return the correct value, although it will be less * efficient than an unwrapped function because it will update * the cache. * * The cached input and output will be wrapped by a WeakReference * so that they can be garbage collected. This may mean that the * cache needs to be repopulated after garbage collection has * been run. * * The function may sometimes be called again for the same input. * In the current implementation this happens in order to avoid * synchronizing the cached value across threads. It may also * happen when the weakly-referenced cache is cleared by the * garbage collector. * * Reference equality is used to compare inputs, for speed and * to be conservative. */ private[play] final class InlineCache[A <: AnyRef, B](f: A => B) extends (A => B) { /** * For performance, don't synchronize this value. Instead, let * the cache be updated on different threads. If the input value * is stable then the value of this variable will eventually * reach the same value. If the input value is different, then * there's no point sharing the value across threads anyway. */ var cache: WeakReference[(A, B)] = null override def apply(a: A): B = { // Get the current value of the cache into a local variable. // If it's null then this is our first call to the function // (on this thread) so get a fresh value. val cacheSnapshot = cache if (cacheSnapshot == null) return fresh(a) // Get cached input/output pair out of the WeakReference. // If the pair is null then the reference has been collected // and we need a fresh value. val inputOutput = cacheSnapshot.get if (inputOutput == null) return fresh(a) // If the inputs don't match then we need a fresh value. if (inputOutput._1 ne a) return fresh(a) // We got the cached value, return it. inputOutput._2 } /** Get a fresh value and update the cache with it. */ private def fresh(a: A): B = { val b = f(a) cache = new WeakReference((a, b)) b } }
bjfletcher/playframework
framework/src/play-cache/src/main/scala/play/api/cache/SerializableResult.scala
<filename>framework/src/play-cache/src/main/scala/play/api/cache/SerializableResult.scala<gh_stars>0 /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.cache import java.io._ import akka.util.ByteString import play.api.http.HttpEntity import play.api.mvc._ import scala.annotation.tailrec /** * Wraps a Result to make it Serializable. */ private[play] final class SerializableResult(constructorResult: Result) extends Externalizable { assert(Option(constructorResult).forall(_.body.isInstanceOf[HttpEntity.Strict]), "Only strict entities can be cached, streamed entities cannot be cached") /** * Create an empty object. Must call `readExternal` after calling * this method. This constructor is invoked by the Java * deserialization code. */ def this() = this(null) /** * Hold the Result. Will either be supplied by the constructor or * set by `readExternal`. */ private var cachedResult: Result = constructorResult def result: Result = { assert(cachedResult != null, "Result should have been provided in constructor or when deserializing") cachedResult } override def readExternal(in: ObjectInput): Unit = { assert(in.readByte() == SerializableResult.encodingVersion, "Result was serialised from a different version of Play") val status = in.readInt() val headerMap = { val headerLength = in.readInt() val mapBuilder = Map.newBuilder[String, String] for (_ <- 0 until headerLength) { val name = in.readUTF() val value = in.readUTF() mapBuilder += ((name, value)) } mapBuilder.result() } val body = { val hasContentType = in.readBoolean() val contentType = if (hasContentType) { Some(in.readUTF()) } else { None } val sizeOfBody: Int = in.readInt() val buffer = new Array[Byte](sizeOfBody) @tailrec def readBytes(offset: Int, length: Int): Unit = { if (length > 0) { val readLength = in.read(buffer, offset, length) readBytes(offset + readLength, length - readLength) } } readBytes(0, sizeOfBody) HttpEntity.Strict(ByteString(buffer), contentType) } cachedResult = Result( header = ResponseHeader(status, headerMap), body = body ) } override def writeExternal(out: ObjectOutput): Unit = { out.writeByte(SerializableResult.encodingVersion) out.writeInt(cachedResult.header.status) { val headerMap = cachedResult.header.headers out.writeInt(headerMap.size) for ((name, value) <- headerMap) { out.writeUTF(name) out.writeUTF(value) } } { out.writeBoolean(cachedResult.body.contentType.nonEmpty) cachedResult.body.contentType.foreach { ct => out.writeUTF(ct) } val body = cachedResult.body match { case HttpEntity.Strict(data, _) => data case other => throw new IllegalStateException("Non strict body cannot be materialized") } out.writeInt(body.length) out.write(body.toArray) } } } private[play] object SerializableResult { val encodingVersion = 2.toByte }
bjfletcher/playframework
framework/src/iteratees/src/test/scala/play/api/libs/concurrent/NonBlockingMutexSpec.scala
<reponame>bjfletcher/playframework /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.concurrent import scala.language.reflectiveCalls import org.specs2.mutable._ import java.io.OutputStream import java.util.concurrent.{ CountDownLatch, Executors, TimeUnit } import java.util.concurrent.atomic.AtomicInteger import scala.concurrent.{ ExecutionContext, Promise, Future, Await } import scala.concurrent.duration.{ Duration, SECONDS } import scala.util.{ Failure, Success, Try } object NonBlockingMutexSpec extends Specification { val waitTime = Duration(2, SECONDS) trait Tester { def run(body: => Unit): Unit } class MutexTester extends Tester { val mutex = new NonBlockingMutex() def run(body: => Unit) = mutex.exclusive(body) } class NaiveTester extends Tester { def run(body: => Unit) = body } def countOrderingErrors(runs: Int, tester: Tester)(implicit ec: ExecutionContext): Future[Int] = { val result = Promise[Int]() val runCount = new AtomicInteger(0) val orderingErrors = new AtomicInteger(0) for (i <- 0 until runs) { tester.run { val observedRunCount = runCount.getAndIncrement() // We see observedRunCount != i then this task was run out of order if (observedRunCount != i) { orderingErrors.incrementAndGet() // Record the error } // If this is the last task, complete our result promise if ((observedRunCount + 1) >= runs) { result.success(orderingErrors.get) } } } result.future } "NonBlockingMutex" should { "run a single operation" in { val p = Promise[Int]() val mutex = new NonBlockingMutex() mutex.exclusive { p.success(1) } Await.result(p.future, waitTime) must_== (1) } "run two operations" in { val p1 = Promise[Unit]() val p2 = Promise[Unit]() val mutex = new NonBlockingMutex() mutex.exclusive { p1.success(()) } mutex.exclusive { p2.success(()) } Await.result(p1.future, waitTime) must_== (()) Await.result(p2.future, waitTime) must_== (()) } "run code in order" in { import ExecutionContext.Implicits.global def percentageOfRunsWithOrderingErrors(runSize: Int, tester: Tester): Int = { val results: Seq[Future[Int]] = for (i <- 0 until 9) yield { countOrderingErrors(runSize, tester) } Await.result(Future.sequence(results), waitTime).filter(_ > 0).size * 10 } // Iteratively increase the run size until we get observable errors 90% of the time // We want a high error rate because we want to then use the MutexTester // on the same run size and know that it is fixing up some problems. If the run size // is too small then the MutexTester probably isn't doing anything. We use // dynamic run sizing because the actual size that produces errors will vary // depending on the environment in which this test is run. var runSize = 8 // This usually reaches 8192 on my dev machine with 10 simultaneous queues var errorPercentage = 0 while (errorPercentage < 90 && runSize < 1000000) { runSize = runSize << 1 errorPercentage = percentageOfRunsWithOrderingErrors(runSize, new NaiveTester()) } //println(s"Got $errorPercentage% ordering errors on run size of $runSize") // Now show that this run length works fine with the MutexTester percentageOfRunsWithOrderingErrors(runSize, new MutexTester()) must_== 0 } } }
bjfletcher/playframework
framework/src/play-ws/src/test/scala/play/api/libs/ws/WSConfigParserSpec.scala
/* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.ws import org.specs2.mutable._ import com.typesafe.config.ConfigFactory import play.api.Environment import play.api.test.WithApplication import scala.concurrent.duration._ object WSConfigParserSpec extends Specification { "WSConfigParser" should { def parseThis(input: String)(implicit app: play.api.Application) = { val config = play.api.Configuration(ConfigFactory.parseString(input).withFallback(ConfigFactory.defaultReference())) val parser = new WSConfigParser(config, app.injector.instanceOf[Environment]) parser.parse() } "parse ws base section" in new WithApplication { val actual = parseThis(""" |play.ws.timeout.connection = 9999 ms |play.ws.timeout.idle = 666 ms |play.ws.timeout.request = 1234 ms |play.ws.followRedirects = false |play.ws.useProxyProperties = false |play.ws.useragent = "FakeUserAgent" """.stripMargin) actual.connectionTimeout must_== 9999.millis actual.idleTimeout must_== 666.millis actual.requestTimeout must_== 1234.millis // default: true actual.followRedirects must beFalse // default: true actual.useProxyProperties must beFalse actual.userAgent must beSome.which(_ must_== "FakeUserAgent") } } }
bjfletcher/playframework
framework/src/play-streams/src/test/scala/play/api/libs/streams/StreamsSpec.scala
<reponame>bjfletcher/playframework /* * Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.streams import play.api.libs.streams.impl._ import org.specs2.mutable.Specification import scala.concurrent.Future class StreamsSpec extends Specification { "Streams helper interface" should { // TODO: Better tests needed, these are only here to ensure Streams compiles "create a Publisher from a Future" in { val pubr = Streams.futureToPublisher(Future.successful(1)) pubr must haveClass[FuturePublisher[Int]] } } }