repo_name
stringlengths
6
97
path
stringlengths
3
341
text
stringlengths
8
1.02M
streamline-eu/dynamic-flink
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/TimeWindowPropertyCollector.scala
<reponame>streamline-eu/dynamic-flink /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.aggregate import org.apache.calcite.runtime.SqlFunctions import org.apache.flink.types.Row import org.apache.flink.util.Collector /** * Adds TimeWindow properties to specified fields of a row before it emits the row to a wrapped * collector. */ class TimeWindowPropertyCollector(windowStartOffset: Option[Int], windowEndOffset: Option[Int]) extends Collector[Row] { var wrappedCollector: Collector[Row] = _ var windowStart:Long = _ var windowEnd:Long = _ override def collect(record: Row): Unit = { val lastFieldPos = record.getArity - 1 if (windowStartOffset.isDefined) { record.setField( lastFieldPos + windowStartOffset.get, SqlFunctions.internalToTimestamp(windowStart)) } if (windowEndOffset.isDefined) { record.setField( lastFieldPos + windowEndOffset.get, SqlFunctions.internalToTimestamp(windowEnd)) } wrappedCollector.collect(record) } override def close(): Unit = wrappedCollector.close() }
streamline-eu/dynamic-flink
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/DataSetSessionWindowAggReduceGroupFunction.scala
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.aggregate import java.lang.Iterable import org.apache.flink.api.common.functions.RichGroupReduceFunction import org.apache.flink.types.Row import org.apache.flink.configuration.Configuration import org.apache.flink.table.codegen.{Compiler, GeneratedAggregationsFunction} import org.apache.flink.util.Collector import org.slf4j.LoggerFactory /** * It wraps the aggregate logic inside of * [[org.apache.flink.api.java.operators.GroupReduceOperator]]. It is used for Session time-window * on batch. * * Note: * * This can handle two input types (depending if input is combined or not): * * 1. when partial aggregate is not supported, the input data structure of reduce is * |groupKey1|groupKey2|sum1|count1|sum2|count2|rowTime| * 2. when partial aggregate is supported, the input data structure of reduce is * |groupKey1|groupKey2|sum1|count1|sum2|count2|windowStart|windowEnd| * * @param genAggregations Code-generated [[GeneratedAggregations]] * @param keysAndAggregatesArity The total arity of keys and aggregates * @param finalRowWindowStartPos The relative window-start field position. * @param finalRowWindowEndPos The relative window-end field position. * @param gap Session time window gap. */ class DataSetSessionWindowAggReduceGroupFunction( genAggregations: GeneratedAggregationsFunction, keysAndAggregatesArity: Int, finalRowWindowStartPos: Option[Int], finalRowWindowEndPos: Option[Int], gap: Long, isInputCombined: Boolean) extends RichGroupReduceFunction[Row, Row] with Compiler[GeneratedAggregations] { private var collector: TimeWindowPropertyCollector = _ private val intermediateRowWindowStartPos = keysAndAggregatesArity private val intermediateRowWindowEndPos = keysAndAggregatesArity + 1 private var output: Row = _ private var accumulators: Row = _ val LOG = LoggerFactory.getLogger(this.getClass) private var function: GeneratedAggregations = _ override def open(config: Configuration) { LOG.debug(s"Compiling AggregateHelper: $genAggregations.name \n\n " + s"Code:\n$genAggregations.code") val clazz = compile( getClass.getClassLoader, genAggregations.name, genAggregations.code) LOG.debug("Instantiating AggregateHelper.") function = clazz.newInstance() output = function.createOutputRow() accumulators = function.createAccumulators() collector = new TimeWindowPropertyCollector(finalRowWindowStartPos, finalRowWindowEndPos) } /** * For grouped intermediate aggregate Rows, divide window according to the window-start * and window-end, merge data (within a unified window) into an aggregate buffer, calculate * aggregated values output from aggregate buffer, and then set them into output * Row based on the mapping relationship between intermediate aggregate data and output data. * * @param records Grouped intermediate aggregate Rows iterator. * @param out The collector to hand results to. * */ override def reduce(records: Iterable[Row], out: Collector[Row]): Unit = { var windowStart: java.lang.Long = null var windowEnd: java.lang.Long = null var currentRowTime: java.lang.Long = null // reset accumulator function.resetAccumulator(accumulators) val iterator = records.iterator() while (iterator.hasNext) { val record = iterator.next() currentRowTime = record.getField(intermediateRowWindowStartPos).asInstanceOf[Long] // initial traversal or opening a new window if (null == windowEnd || (null != windowEnd && currentRowTime > windowEnd)) { // calculate the current window and open a new window if (null != windowEnd) { // evaluate and emit the current window's result. doEvaluateAndCollect(out, windowStart, windowEnd) // reset accumulator function.resetAccumulator(accumulators) } else { // set keys to output function.setForwardedFields(record, output) } windowStart = record.getField(intermediateRowWindowStartPos).asInstanceOf[Long] } function.mergeAccumulatorsPair(accumulators, record) windowEnd = if (isInputCombined) { // partial aggregate is supported record.getField(intermediateRowWindowEndPos).asInstanceOf[Long] } else { // partial aggregate is not supported, window-start equal rowtime + gap currentRowTime + gap } } // evaluate and emit the current window's result. doEvaluateAndCollect(out, windowStart, windowEnd) } /** * Evaluate and emit the data of the current window. * * @param out the collection of the aggregate results * @param windowStart the window's start attribute value is the min (rowtime) of all rows * in the window. * @param windowEnd the window's end property value is max (rowtime) + gap for all rows * in the window. */ def doEvaluateAndCollect( out: Collector[Row], windowStart: Long, windowEnd: Long): Unit = { // set value for the final output function.setAggregationResults(accumulators, output) // adds TimeWindow properties to output then emit output if (finalRowWindowStartPos.isDefined || finalRowWindowEndPos.isDefined) { collector.wrappedCollector = out collector.windowStart = windowStart collector.windowEnd = windowEnd collector.collect(output) } else { out.collect(output) } } }
streamline-eu/dynamic-flink
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/scala/stream/sql/WindowAggregateTest.scala
<gh_stars>0 /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api.scala.stream.sql import org.apache.flink.api.scala._ import org.apache.flink.table.api.TableException import org.apache.flink.table.api.scala._ import org.apache.flink.table.plan.logical.{EventTimeTumblingGroupWindow, ProcessingTimeSessionGroupWindow, ProcessingTimeSlidingGroupWindow} import org.apache.flink.table.utils.TableTestUtil._ import org.apache.flink.table.utils.{StreamTableTestUtil, TableTestBase} import org.junit.Test class WindowAggregateTest extends TableTestBase { private val streamUtil: StreamTableTestUtil = streamTestUtil() streamUtil.addTable[(Int, String, Long)]("MyTable", 'a, 'b, 'c) @Test def testNonPartitionedProcessingTimeBoundedWindow() = { val sqlQuery = "SELECT a, Count(c) OVER (ORDER BY procTime()" + "RANGE BETWEEN INTERVAL '10' SECOND PRECEDING AND CURRENT ROW) AS countA " + "FROM MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "PROCTIME() AS $2") ), term("orderBy", "PROCTIME"), term("range", "BETWEEN 10000 PRECEDING AND CURRENT ROW"), term("select", "a", "c", "PROCTIME", "COUNT(c) AS w0$o0") ), term("select", "a", "w0$o0 AS $1") ) streamUtil.verifySql(sqlQuery, expected) } @Test def testPartitionedProcessingTimeBoundedWindow() = { val sqlQuery = "SELECT a, AVG(c) OVER (PARTITION BY a ORDER BY procTime()" + "RANGE BETWEEN INTERVAL '2' HOUR PRECEDING AND CURRENT ROW) AS avgA " + "FROM MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "PROCTIME() AS $2") ), term("partitionBy","a"), term("orderBy", "PROCTIME"), term("range", "BETWEEN 7200000 PRECEDING AND CURRENT ROW"), term("select", "a", "c", "PROCTIME", "COUNT(c) AS w0$o0", "$SUM0(c) AS w0$o1") ), term("select", "a", "/(CASE(>(w0$o0, 0)", "CAST(w0$o1), null), w0$o0) AS avgA") ) streamUtil.verifySql(sqlQuery, expected) } @Test def testTumbleFunction() = { val sql = "SELECT " + " COUNT(*), " + " TUMBLE_START(rowtime(), INTERVAL '15' MINUTE), " + " TUMBLE_END(rowtime(), INTERVAL '15' MINUTE)" + "FROM MyTable " + "GROUP BY TUMBLE(rowtime(), INTERVAL '15' MINUTE)" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "1970-01-01 00:00:00 AS $f0") ), term("window", EventTimeTumblingGroupWindow(Some('w$), 'rowtime, 900000.millis)), term("select", "COUNT(*) AS EXPR$0, start('w$) AS w$start, end('w$) AS w$end") ), term("select", "EXPR$0, CAST(w$start) AS w$start, CAST(w$end) AS w$end") ) streamUtil.verifySql(sql, expected) } @Test def testHoppingFunction() = { val sql = "SELECT COUNT(*), " + " HOP_START(proctime(), INTERVAL '15' MINUTE, INTERVAL '1' HOUR), " + " HOP_END(proctime(), INTERVAL '15' MINUTE, INTERVAL '1' HOUR) " + "FROM MyTable " + "GROUP BY HOP(proctime(), INTERVAL '15' MINUTE, INTERVAL '1' HOUR)" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "1970-01-01 00:00:00 AS $f0") ), term("window", ProcessingTimeSlidingGroupWindow(Some('w$), 3600000.millis, 900000.millis)), term("select", "COUNT(*) AS EXPR$0, start('w$) AS w$start, end('w$) AS w$end") ), term("select", "EXPR$0, CAST(w$start) AS w$start, CAST(w$end) AS w$end") ) streamUtil.verifySql(sql, expected) } @Test def testSessionFunction() = { val sql = "SELECT " + " COUNT(*), " + " SESSION_START(proctime(), INTERVAL '15' MINUTE), " + " SESSION_END(proctime(), INTERVAL '15' MINUTE) " + "FROM MyTable " + "GROUP BY SESSION(proctime(), INTERVAL '15' MINUTE)" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "1970-01-01 00:00:00 AS $f0") ), term("window", ProcessingTimeSessionGroupWindow(Some('w$), 900000.millis)), term("select", "COUNT(*) AS EXPR$0, start('w$) AS w$start, end('w$) AS w$end") ), term("select", "EXPR$0, CAST(w$start) AS w$start, CAST(w$end) AS w$end") ) streamUtil.verifySql(sql, expected) } @Test(expected = classOf[TableException]) def testTumbleWindowNoOffset(): Unit = { val sqlQuery = "SELECT SUM(a) AS sumA, COUNT(b) AS cntB " + "FROM MyTable " + "GROUP BY TUMBLE(proctime(), INTERVAL '2' HOUR, TIME '10:00:00')" streamUtil.verifySql(sqlQuery, "n/a") } @Test(expected = classOf[TableException]) def testHopWindowNoOffset(): Unit = { val sqlQuery = "SELECT SUM(a) AS sumA, COUNT(b) AS cntB " + "FROM MyTable " + "GROUP BY HOP(proctime(), INTERVAL '1' HOUR, INTERVAL '2' HOUR, TIME '10:00:00')" streamUtil.verifySql(sqlQuery, "n/a") } @Test(expected = classOf[TableException]) def testSessionWindowNoOffset(): Unit = { val sqlQuery = "SELECT SUM(a) AS sumA, COUNT(b) AS cntB " + "FROM MyTable " + "GROUP BY SESSION(proctime(), INTERVAL '2' HOUR, TIME '10:00:00')" streamUtil.verifySql(sqlQuery, "n/a") } @Test(expected = classOf[TableException]) def testVariableWindowSize() = { val sql = "SELECT COUNT(*) FROM MyTable GROUP BY TUMBLE(proctime(), c * INTERVAL '1' MINUTE)" streamUtil.verifySql(sql, "n/a") } @Test(expected = classOf[TableException]) def testMultiWindow() = { val sql = "SELECT COUNT(*) FROM MyTable GROUP BY " + "FLOOR(rowtime() TO HOUR), FLOOR(rowtime() TO MINUTE)" val expected = "" streamUtil.verifySql(sql, expected) } @Test(expected = classOf[TableException]) def testInvalidWindowExpression() = { val sql = "SELECT COUNT(*) FROM MyTable GROUP BY FLOOR(localTimestamp TO HOUR)" val expected = "" streamUtil.verifySql(sql, expected) } @Test def testUnboundPartitionedProcessingWindowWithRange() = { val sql = "SELECT " + "c, " + "count(a) OVER (PARTITION BY c ORDER BY ProcTime() RANGE UNBOUNDED preceding) as cnt1, " + "sum(a) OVER (PARTITION BY c ORDER BY ProcTime() RANGE UNBOUNDED preceding) as cnt2 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "PROCTIME() AS $2") ), term("partitionBy", "c"), term("orderBy", "PROCTIME"), term("range", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"), term("select", "a", "c", "PROCTIME", "COUNT(a) AS w0$o0", "$SUM0(a) AS w0$o1") ), term("select", "c", "w0$o0 AS cnt1", "CASE(>(w0$o0, 0)", "CAST(w0$o1), null) AS cnt2") ) streamUtil.verifySql(sql, expected) } @Test def testUnboundPartitionedProcessingWindowWithRow() = { val sql = "SELECT " + "c, " + "count(a) OVER (PARTITION BY c ORDER BY ProcTime() ROWS BETWEEN UNBOUNDED preceding AND " + "CURRENT ROW) as cnt1 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "PROCTIME() AS $2") ), term("partitionBy", "c"), term("orderBy", "PROCTIME"), term("rows", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"), term("select", "a", "c", "PROCTIME", "COUNT(a) AS w0$o0") ), term("select", "c", "w0$o0 AS $1") ) streamUtil.verifySql(sql, expected) } @Test def testUnboundNonPartitionedProcessingWindowWithRange() = { val sql = "SELECT " + "c, " + "count(a) OVER (ORDER BY ProcTime() RANGE UNBOUNDED preceding) as cnt1, " + "sum(a) OVER (ORDER BY ProcTime() RANGE UNBOUNDED preceding) as cnt2 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "PROCTIME() AS $2") ), term("orderBy", "PROCTIME"), term("range", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"), term("select", "a", "c", "PROCTIME", "COUNT(a) AS w0$o0", "$SUM0(a) AS w0$o1") ), term("select", "c", "w0$o0 AS cnt1", "CASE(>(w0$o0, 0)", "CAST(w0$o1), null) AS cnt2") ) streamUtil.verifySql(sql, expected) } @Test def testUnboundNonPartitionedProcessingWindowWithRow() = { val sql = "SELECT " + "c, " + "count(a) OVER (ORDER BY ProcTime() ROWS BETWEEN UNBOUNDED preceding AND " + "CURRENT ROW) as cnt1 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "PROCTIME() AS $2") ), term("orderBy", "PROCTIME"), term("rows", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"), term("select", "a", "c", "PROCTIME", "COUNT(a) AS w0$o0") ), term("select", "c", "w0$o0 AS $1") ) streamUtil.verifySql(sql, expected) } @Test def testUnboundNonPartitionedEventTimeWindowWithRange() = { val sql = "SELECT " + "c, " + "count(a) OVER (ORDER BY RowTime() RANGE UNBOUNDED preceding) as cnt1, " + "sum(a) OVER (ORDER BY RowTime() RANGE UNBOUNDED preceding) as cnt2 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "ROWTIME() AS $2") ), term("orderBy", "ROWTIME"), term("range", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"), term("select", "a", "c", "ROWTIME", "COUNT(a) AS w0$o0", "$SUM0(a) AS w0$o1") ), term("select", "c", "w0$o0 AS cnt1", "CASE(>(w0$o0, 0)", "CAST(w0$o1), null) AS cnt2") ) streamUtil.verifySql(sql, expected) } @Test def testUnboundPartitionedEventTimeWindowWithRange() = { val sql = "SELECT " + "c, " + "count(a) OVER (PARTITION BY c ORDER BY RowTime() RANGE UNBOUNDED preceding) as cnt1, " + "sum(a) OVER (PARTITION BY c ORDER BY RowTime() RANGE UNBOUNDED preceding) as cnt2 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "ROWTIME() AS $2") ), term("partitionBy", "c"), term("orderBy", "ROWTIME"), term("range", "BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW"), term("select", "a", "c", "ROWTIME", "COUNT(a) AS w0$o0", "$SUM0(a) AS w0$o1") ), term("select", "c", "w0$o0 AS cnt1", "CASE(>(w0$o0, 0)", "CAST(w0$o1), null) AS cnt2") ) streamUtil.verifySql(sql, expected) } @Test def testBoundPartitionedRowTimeWindowWithRow() = { val sql = "SELECT " + "c, " + "count(a) OVER (PARTITION BY c ORDER BY RowTime() ROWS BETWEEN 5 preceding AND " + "CURRENT ROW) as cnt1 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "ROWTIME() AS $2") ), term("partitionBy", "c"), term("orderBy", "ROWTIME"), term("rows", "BETWEEN 5 PRECEDING AND CURRENT ROW"), term("select", "a", "c", "ROWTIME", "COUNT(a) AS w0$o0") ), term("select", "c", "w0$o0 AS $1") ) streamUtil.verifySql(sql, expected) } @Test def testBoundNonPartitionedRowTimeWindowWithRow() = { val sql = "SELECT " + "c, " + "count(a) OVER (ORDER BY RowTime() ROWS BETWEEN 5 preceding AND " + "CURRENT ROW) as cnt1 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "ROWTIME() AS $2") ), term("orderBy", "ROWTIME"), term("rows", "BETWEEN 5 PRECEDING AND CURRENT ROW"), term("select", "a", "c", "ROWTIME", "COUNT(a) AS w0$o0") ), term("select", "c", "w0$o0 AS $1") ) streamUtil.verifySql(sql, expected) } @Test def testBoundPartitionedRowTimeWindowWithRange() = { val sql = "SELECT " + "c, " + "count(a) OVER (PARTITION BY c ORDER BY RowTime() " + "RANGE BETWEEN INTERVAL '1' SECOND preceding AND CURRENT ROW) as cnt1 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "ROWTIME() AS $2") ), term("partitionBy", "c"), term("orderBy", "ROWTIME"), term("range", "BETWEEN 1000 PRECEDING AND CURRENT ROW"), term("select", "a", "c", "ROWTIME", "COUNT(a) AS w0$o0") ), term("select", "c", "w0$o0 AS $1") ) streamUtil.verifySql(sql, expected) } @Test def testBoundNonPartitionedRowTimeWindowWithRange() = { val sql = "SELECT " + "c, " + "count(a) OVER (ORDER BY RowTime() " + "RANGE BETWEEN INTERVAL '1' SECOND preceding AND CURRENT ROW) as cnt1 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "ROWTIME() AS $2") ), term("orderBy", "ROWTIME"), term("range", "BETWEEN 1000 PRECEDING AND CURRENT ROW"), term("select", "a", "c", "ROWTIME", "COUNT(a) AS w0$o0") ), term("select", "c", "w0$o0 AS $1") ) streamUtil.verifySql(sql, expected) } @Test def testBoundNonPartitionedProcTimeWindowWithRowRange() = { val sql = "SELECT " + "c, " + "count(a) OVER (ORDER BY procTime() ROWS BETWEEN 2 preceding AND " + "CURRENT ROW) as cnt1 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "PROCTIME() AS $2") ), term("orderBy", "PROCTIME"), term("rows", "BETWEEN 2 PRECEDING AND CURRENT ROW"), term("select", "a", "c", "PROCTIME", "COUNT(a) AS w0$o0") ), term("select", "c", "w0$o0 AS $1") ) streamUtil.verifySql(sql, expected) } @Test def testBoundPartitionedProcTimeWindowWithRowRange() = { val sql = "SELECT " + "c, " + "count(a) OVER (PARTITION BY c ORDER BY procTime() ROWS BETWEEN 2 preceding AND " + "CURRENT ROW) as cnt1 " + "from MyTable" val expected = unaryNode( "DataStreamCalc", unaryNode( "DataStreamOverAggregate", unaryNode( "DataStreamCalc", streamTableNode(0), term("select", "a", "c", "PROCTIME() AS $2") ), term("partitionBy", "c"), term("orderBy", "PROCTIME"), term("rows", "BETWEEN 2 PRECEDING AND CURRENT ROW"), term("select", "a", "c", "PROCTIME", "COUNT(a) AS w0$o0") ), term("select", "c", "w0$o0 AS $1") ) streamUtil.verifySql(sql, expected) } }
muchas/reactive-scala
src/main/scala/shop/ProductCatalogRouter.scala
package shop; import akka.actor.{Actor, Props, Terminated} import akka.routing.{ActorRefRoutee, RoundRobinRoutingLogic, Router} import shop.ProductCatalogManager.SearchRequest class ProductCatalogRouter extends Actor { private def createCatalogManager() = { context.actorOf(Props(new ProductCatalogManager("./query_result") )) } var router = { val routees = Vector.fill(3) { val r = createCatalogManager() context watch r ActorRefRoutee(r) } Router(RoundRobinRoutingLogic(), routees) } def receive = { case w: SearchRequest ⇒ router.route(w, sender()) case Terminated(a) ⇒ router = router.removeRoutee(a) val r = createCatalogManager() context watch r router = router.addRoutee(r) } }
muchas/reactive-scala
src/main/scala/shop/WebServer.scala
<filename>src/main/scala/shop/WebServer.scala package shop import akka.actor.{ActorSystem, Props} import akka.http.scaladsl.Http import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import akka.http.scaladsl.server.Directives import akka.pattern.ask import akka.stream.ActorMaterializer import akka.util.Timeout import spray.json._ import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContextExecutor} import scala.io.StdIn final case class ItemsEnvelope(items: List[Item]) trait JsonSupport extends SprayJsonSupport with DefaultJsonProtocol { import ItemJsonProtocol._ implicit val itemsFormat: RootJsonFormat[Item] = jsonFormat4(Item) implicit val envelopeFormat: RootJsonFormat[ItemsEnvelope] = jsonFormat1(ItemsEnvelope) } object WebServer extends Directives with JsonSupport { import ProductCatalogManager._ def main(args: Array[String]) { implicit val system: ActorSystem = ActorSystem() implicit val materializer: ActorMaterializer = ActorMaterializer() // needed for the future flatMap/onComplete in the end implicit val executionContext: ExecutionContextExecutor = system.dispatcher implicit val timeout = Timeout(120 seconds) val catalog = system.actorOf(Props[ProductCatalogRouter]) val route = path("products") { parameters('search) { (search) => get { val futureItems = catalog ? SearchRequest(search) val response = Await.result(futureItems, timeout.duration).asInstanceOf[SearchResponse] complete(ItemsEnvelope(response.items)) } } } val bindingFuture = Http().bindAndHandle(route, "localhost", 8080) println(s"Server online at http://localhost:8080/\nPress RETURN to stop...") StdIn.readLine() // let it run until user presses return bindingFuture .flatMap(_.unbind()) // trigger unbinding from the port .onComplete(_ => system.terminate()) // and shutdown when done } }
muchas/reactive-scala
src/main/scala/shop/ProductCatalog.scala
package shop import java.net.URI case class ProductCatalog(items: Map[URI, (Item, Int)]) { def addItem(it: Item, count: Int): ProductCatalog = { val currentCount = if (items contains it.id) items(it.id)._2 else 0 copy(items = items.updated(it.id, (it, currentCount + count) )) } def removeItem(id: URI, count: Int): ProductCatalog = { val item = if (items contains id) items(id)._1 else null val newCount = if (items contains id) Math.max(items(id)._2 - count, 0) else 0 if(newCount <= 0) { copy(items = items - id) } else if (item != null) { copy(items = items.updated(id, (item, newCount))) } else { copy(items) } } def itemsCount(): Int = items.values.map(x => x._2).sum def itemsList(): Seq[Item] = items.values.map(x => x._1).toList private def countInKeywords(item: Item, query: String): Int = { val words = query.split(" ").map(_.toLowerCase) words .toStream .map(x => (item.name contains x, item.brand contains x)) .map(x => if(x._1 && x._2) 2 else if(x._1 || x._2) 1 else 0) .sum } def search(query: String, limit: Int): List[Item] = items.values .toStream .filter(_._2 > 0) .map(_._1) .sortBy(-countInKeywords(_, query)) .take(limit) .toList } object ProductCatalog { val empty = ProductCatalog(Map.empty) def fromCSV(path: String): ProductCatalog = { var catalog = ProductCatalog.empty val items = scala.io.Source.fromFile(path) .getLines .toStream .tail .map(_.split(",").map(_.replace("\"", ""))) .filter(_.length > 2) .map( x => Item( URI.create(x(0)), x(1).trim, 10, x(2).trim) ) .filterNot(x => x.brand contains "NULL") for(item <- items) catalog = catalog.addItem(item, 1) catalog } }
muchas/reactive-scala
src/test/scala/shopTest/CartSpec.scala
package shopTest import java.net.URI import akka.actor.ActorSystem import akka.testkit.TestKit import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, WordSpecLike} import shop._ class CartSpec extends TestKit(ActorSystem("CartSpec")) with WordSpecLike with BeforeAndAfterAll with BeforeAndAfterEach { var item: Item = _ var cart: Cart = Cart.empty override def beforeEach(): Unit = { cart = Cart.empty item = Item(URI.create("example"), "name", 1) } override def afterAll(): Unit = { system.terminate } "A Cart" must { "add item" in { cart = cart.addItem(item, 1) assert (cart.items.size == 1) assert (cart.itemsCount() == 1) assert {cart.items contains item.id} } "remove item" in { cart = cart.addItem(item, 1) cart = cart.removeItem(item.id, 1) assert (cart.items.size == 0) assert (cart.itemsCount() == 0) assert {!(cart.items contains item.id)} } "be empty at start" in { assert (cart.itemsCount() == 0) } "add multiple items" in { val item2 = Item(URI.create("example3"), "name", 1) val item3 = Item(URI.create("example4"), "name", 1) cart = cart.addItem(item, 1) cart = cart.addItem(item2, 1) cart = cart.addItem(item3, 1) assert (cart.items.size == 3) assert (cart.itemsCount() == 3) } "remove multiple items" in { val item2 = Item(URI.create("example3"), "name", 1) val item3 = Item(URI.create("example4"), "name", 1) cart = cart.addItem(item, 1) cart = cart.addItem(item2, 1) cart = cart.addItem(item3, 1) cart = cart.removeItem(item.id, 1) cart = cart.removeItem(item2.id, 1) assert (cart.items.size == 1) assert (cart.itemsCount() == 1) assert (!(cart.items contains item.id)) assert (!(cart.items contains item2.id)) } "add same item with count > 0" in { cart = cart.addItem(item, 4) assert (cart.items.size == 1) assert (cart.itemsCount() == 4) } "add same item multiple times" in { cart = cart.addItem(item, 4) cart = cart.addItem(item, 3) cart = cart.addItem(item, 2) assert (cart.items.size == 1) assert (cart.itemsCount() == 9) } "add many items multiple times" in { val item2 = Item(URI.create("example3"), "name", 1) val item3 = Item(URI.create("example4"), "name", 1) cart = cart.addItem(item, 4) cart = cart.addItem(item2, 3) cart = cart.addItem(item3, 2) assert (cart.items.size == 3) assert (cart.itemsCount() == 9) } "remove only added number of items" in { cart = cart.addItem(item, 1) cart = cart.removeItem(item.id, 5) assert (cart.items.size == 0) assert (cart.itemsCount() == 0) } "tolerate removal of non-existent item" in { cart = cart.addItem(item, 1) cart = cart.removeItem(URI.create("unknownId"), 10) assert (cart.items.size == 1) assert (cart.itemsCount() == 1) } } }
muchas/reactive-scala
src/test/scala/shopTest/CartManagerAsyncSpec.scala
package shopTest import java.net.URI import akka.actor.{ActorRef, ActorSystem, PoisonPill, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, WordSpecLike} import shop.CartManager.GetItemsResponse import shop._ class CartManagerAsyncSpec extends TestKit(ActorSystem("CartManagerAsyncSpec")) with WordSpecLike with BeforeAndAfterAll with BeforeAndAfterEach with ImplicitSender { var customer: TestProbe = _ var checkout: ActorRef = _ var item: Item = _ var cart: ActorRef = _ private def createCartManagerActor(id: String): ActorRef = { system.actorOf(Props(new CartManager(customer.ref, id) { override def createCheckout(): ActorRef = checkout })) } override def beforeEach(): Unit = { customer = TestProbe() checkout = TestProbe().ref item = Item(URI.create("predefinedItem"), "name", 1) } override def afterAll(): Unit = { TestKit.shutdownActorSystem(system) } "A Cart" must { "add item" in { cart = createCartManagerActor("ct-001") cart ! CartManager.AddItem(item, 1) expectMsg(CartManager.ItemAdded(item, 1)) } "remove item" in { cart = createCartManagerActor("ct-002") cart ! CartManager.AddItem(item, 1) expectMsg(CartManager.ItemAdded(item, 1)) cart ! CartManager.RemoveItem(item, 1) expectMsg(CartManager.ItemRemoved(item, 1)) } "inform about checkout start" in { cart = createCartManagerActor("ct-003") cart ! CartManager.AddItem(item, 1) expectMsg(CartManager.ItemAdded(item, 1)) cart ! CartManager.StartCheckout customer.expectMsg(CartManager.CheckoutStarted(checkout)) } "inform about empty cart" in { cart = createCartManagerActor("ct-004") cart ! CartManager.AddItem(item, 1) expectMsg(CartManager.ItemAdded(item, 1)) cart ! CartManager.RemoveItem(item, 1) expectMsg(CartManager.ItemRemoved(item, 1)) customer.expectMsg(CartManager.CartEmpty) } "inform about empty cart when checkout closed" in { cart = createCartManagerActor("ct-005") cart ! CartManager.AddItem(item, 1) expectMsg(CartManager.ItemAdded(item, 1)) cart ! CartManager.StartCheckout customer.expectMsg(CartManager.CheckoutStarted(checkout)) cart ! CartManager.CheckoutClosed customer.expectMsg(CartManager.CartEmpty) } "inform about empty cart when timer expired" in { cart = createCartManagerActor("ct-006") cart ! CartManager.AddItem(item, 1) expectMsg(CartManager.ItemAdded(item, 1)) cart ! CartManager.CartTimeExpired customer.expectMsg(CartManager.CartEmpty) } "add an item to the shopping cart and preserve it after restart" in { val cartManagerId = "ct-007" cart = createCartManagerActor(cartManagerId) cart ! CartManager.AddItem(item, 1) expectMsg(CartManager.ItemAdded(item, 1)) cart ! PoisonPill val cart2 = createCartManagerActor(cartManagerId) cart2 ! CartManager.GetItemsRequest expectMsg(GetItemsResponse(Seq(item))) } } }
muchas/reactive-scala
src/main/scala/reactive2/fsm/Cart.scala
package reactive2.fsm import akka.actor.{ActorRef, FSM, Props} import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ final case class ItemAdded(item: Item) final case class ItemRemoved(item: Item) final case class CheckoutCreated(checkout: ActorRef) case object CheckoutStarted case object CheckoutClosed case object CheckoutCancelled sealed trait CartState case object Empty extends CartState case object NonEmpty extends CartState case object InCheckout extends CartState sealed trait CartData case object None extends CartData final case class ItemStore(checkout: ActorRef, items: ListBuffer[Item]) extends CartData case class Item(name: String) class Cart extends FSM[CartState, CartData] { var checkout: ActorRef = null startWith(Empty, None) when(Empty) { case Event(ItemAdded(item), None) => goto(NonEmpty) using ItemStore(null, ListBuffer(item)) } when(NonEmpty, stateTimeout = 5 seconds) { case Event(ItemAdded(item), store @ ItemStore(_, items)) => items += item stay using ItemStore(null, items) case Event(ItemRemoved(item), store @ ItemStore(_, items)) if items.length > 1 => items -= item stay using ItemStore(null, items) case Event(ItemRemoved(item), store @ ItemStore(_, items)) if items.length == 1 => goto(Empty) using None case Event(CheckoutStarted, store @ ItemStore(_, items)) => checkout = context.actorOf(Props(new Checkout(self)), "checkout") sender ! CheckoutCreated(checkout) goto(InCheckout) using ItemStore(checkout, items) case Event(StateTimeout, _) => goto(Empty) using None } when(InCheckout) { case Event(CheckoutCancelled, store: ItemStore) => goto(NonEmpty) using store case Event(CheckoutClosed, _) => goto(Empty) using None } initialize() }
muchas/reactive-scala
src/main/scala/shop/PaymentService.scala
<filename>src/main/scala/shop/PaymentService.scala<gh_stars>0 package shop import akka.actor.SupervisorStrategy.{Restart, Stop} import akka.actor.{Actor, ActorLogging, ActorRef, OneForOneStrategy, Props, Terminated} import akka.event.LoggingReceive import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpRequest, HttpResponse, StatusCodes} import akka.stream.{ActorMaterializer, ActorMaterializerSettings} import akka.util.ByteString import scala.concurrent.duration._ import scala.concurrent.{Await, Future} object PaymentService { case class DoPayment(paymentType: String) case object PaymentConfirmed case object PaymentReceived } class PaymentService(customer: ActorRef, checkout: ActorRef) extends Actor with ActorLogging { val MAX_RETRIES = 5 var current_retry = 1 override val supervisorStrategy: OneForOneStrategy = OneForOneStrategy(loggingEnabled = false) { case _: BadRequestException => log.warning("Bad request") if(current_retry <= MAX_RETRIES) { current_retry += 1 Restart } else { Stop } case _: InvalidPaymentException => log.warning("Payment Invalid") if(current_retry <= MAX_RETRIES) { current_retry += 1 Restart } else { Stop } case e => log.error("Unexpected failure: {}", e.getMessage) Stop } def receive = LoggingReceive { case PaymentService.DoPayment(paymentType: String) => context.actorOf(Props(new PaymentHTTPWorker(paymentType, customer, checkout))) } } object PaymentHTTPWorker { case object PaymentConfirmed } class PaymentHTTPWorker(paymentType: String, customer: ActorRef, checkout: ActorRef) extends Actor with ActorLogging { import PaymentService._ final implicit val materializer: ActorMaterializer = ActorMaterializer(ActorMaterializerSettings(context.system)) import akka.pattern.pipe import context.dispatcher val http = Http(context.system) override def preStart(): Unit = { http.singleRequest(HttpRequest(uri = "http://httpbin.org/status/500")) .pipeTo(self) } def receive: Receive = LoggingReceive { case resp @ HttpResponse(StatusCodes.OK, headers, entity, _) => entity.dataBytes.runFold(ByteString(""))(_ ++ _).foreach { body => println("Got response, body: " + body.utf8String) resp.discardEntityBytes() println("Success! Confirming payment!") customer ! PaymentConfirmed checkout ! Checkout.PaymentReceived shutdown() } case resp @ HttpResponse(StatusCodes.BadRequest, _, _, _) => println("Bad request") resp.discardEntityBytes() throw new BadRequestException shutdown() case resp @ HttpResponse(code, _, _, _) => println("Request failed, response code: " + code) resp.discardEntityBytes() throw new InvalidPaymentException shutdown() } def shutdown(): Future[Terminated] = { Await.result(http.shutdownAllConnectionPools(), Duration.Inf) context.system.terminate() } } class BadRequestException extends Exception("Bad request") class InvalidPaymentException extends Exception("Repeat")
muchas/reactive-scala
src/main/scala/shop/Checkout.scala
<filename>src/main/scala/shop/Checkout.scala package shop import akka.actor.{ActorRef, Props, Timers} import akka.event.LoggingReceive import akka.persistence.PersistentActor import scala.concurrent.duration._ object Checkout { // Protocol case object DeliveryMethodRequest case class DeliveryMethodResponse(method: String) case object PaymentMethodRequest case class PaymentMethodResponse(method: String) case object StateRequest case class StateResponse(state: CheckoutState) case object Cancelled case object CheckoutTimerExpired case object PaymentTimerExpired case object PaymentReceived case class PaymentServiceStarted(payment: ActorRef) // States sealed trait CheckoutState case class SelectingDelivery(timestamp: Long) extends CheckoutState case class SelectingPaymentMethod(timestamp: Long) extends CheckoutState case class ProcessingPayment(timestamp: Long) extends CheckoutState case object CheckoutCancelled extends CheckoutState case object CheckoutClosed extends CheckoutState // Events sealed trait Event case class PaymentSelected(method: String) extends Event case class DeliveryMethodSelected(method: String) extends Event private case class StateChanged(state: CheckoutState) extends Event } class Checkout(customer: ActorRef, cart: ActorRef, id: String) extends PersistentActor with Timers { import Checkout._ val checkoutTimeout: FiniteDuration = 120 seconds val paymentTimeout: FiniteDuration = 120 seconds var paymentMethod = "default" var deliveryMethod = "default" override def persistenceId: String = id def this(customer: ActorRef, cart: ActorRef) = { this(customer, cart, "persistent-checkout-id-1") } private def startCheckoutTimer(timestamp: Long, time: FiniteDuration): Unit = { timers.startSingleTimer("checkout-timer-" + timestamp, CheckoutTimerExpired, time) } private def startPaymentTimer(timestamp: Long, time: FiniteDuration): Unit = { timers.startSingleTimer("payment-timer-" + timestamp, PaymentTimerExpired, time) } private def cancelTimers(): Unit = { timers.cancelAll() } private def calculateElapsedTime(timestamp: Long): FiniteDuration = { val now = System.currentTimeMillis() val diff = Math.max((now - timestamp) / 1000.0, 0) diff.seconds } private def updateState(event: Event): Unit = { event match { case DeliveryMethodSelected(method) => deliveryMethod = method case PaymentSelected(method) => paymentMethod = method case StateChanged(state) => state match { case SelectingDelivery(timestamp) => cancelTimers() startCheckoutTimer(timestamp, checkoutTimeout - calculateElapsedTime(timestamp)) context become selectingDelivery case SelectingPaymentMethod(timestamp) => cancelTimers() startCheckoutTimer(timestamp, checkoutTimeout - calculateElapsedTime(timestamp)) context become selectingPaymentMethod case ProcessingPayment(timestamp) => cancelTimers() startPaymentTimer(timestamp, paymentTimeout - calculateElapsedTime(timestamp)) context become processingPayment case CheckoutCancelled => context.stop(self) case _ => } } } override def preStart(): Unit = { super.preStart() startCheckoutTimer(System.currentTimeMillis(), checkoutTimeout) } def selectingDelivery: Receive = LoggingReceive { case DeliveryMethodSelected(method) => val now = System.currentTimeMillis() persist(DeliveryMethodSelected(method)) { event => updateState(event) persist(StateChanged(SelectingPaymentMethod(now))) { event => updateState(event) customer ! DeliveryMethodSelected(method) } } case (Cancelled | CheckoutTimerExpired) => persist(StateChanged(CheckoutCancelled)) { event => cart ! CartManager.CheckoutCancelled updateState(event) } case StateRequest => sender ! StateResponse(SelectingDelivery(0)) } def selectingPaymentMethod: Receive = LoggingReceive { case PaymentSelected(method) => val now = System.currentTimeMillis() persist(PaymentSelected(method)) { event => updateState(event) persist(StateChanged(ProcessingPayment(now))) { event => updateState(event) customer ! PaymentServiceStarted(createPayment()) } } case (Cancelled | CheckoutTimerExpired) => persist(StateChanged(CheckoutCancelled)) { event => cart ! CartManager.CheckoutCancelled updateState(event) } case DeliveryMethodRequest => sender ! DeliveryMethodResponse(deliveryMethod) case StateRequest => sender ! StateResponse(SelectingPaymentMethod(0)) } def createPayment(): ActorRef = { context.actorOf(Props(new PaymentService(customer, self)), "paymentsService") } def processingPayment: Receive = LoggingReceive { case PaymentReceived => persist(StateChanged(CheckoutClosed)) { event => customer ! CartManager.CheckoutClosed cart ! CartManager.CheckoutClosed updateState(event) } case (Cancelled | PaymentTimerExpired) => persist(StateChanged(CheckoutCancelled)) { event => cart ! CartManager.CheckoutCancelled updateState(event) } case DeliveryMethodRequest => sender ! DeliveryMethodResponse(deliveryMethod) case PaymentMethodRequest => sender ! PaymentMethodResponse(paymentMethod) case StateRequest => sender ! StateResponse(ProcessingPayment(0)) } override def receiveCommand: Receive = selectingDelivery override def receiveRecover: Receive = LoggingReceive { case event: Event => updateState(event) } }
muchas/reactive-scala
src/main/scala/shop/Item.scala
package shop import java.net.URI case class Item(id: URI, name: String, price: BigDecimal, brand: String = "")
muchas/reactive-scala
src/main/scala/shop/CartManager.scala
package shop import akka.actor.{ActorRef, Props, Timers} import akka.event.LoggingReceive import akka.persistence.{PersistentActor, SnapshotOffer} import scala.concurrent.duration._ object CartManager { sealed trait CartState case object Empty extends CartState case class NonEmpty(timestamp: Long) extends CartState case object InCheckout extends CartState case object StartCheckout case class CheckoutStarted(checkout: ActorRef) case object CheckoutCancelled case object CheckoutClosed case object CartTimeExpired case object CartEmpty sealed trait Event // Protocol case object GetItemsRequest case class GetItemsResponse(items: Seq[Item]) // Events case class AddItem(item: Item, count: Int) extends Event case class ItemAdded(item: Item, count: Int) case class RemoveItem(item: Item, count: Int) extends Event case class ItemRemoved(item: Item, count: Int) case object ClearCart extends Event private case class ChangeState(state: CartState) extends Event } class CartManager(customer: ActorRef, id: String, var shoppingCart: Cart) extends PersistentActor with Timers { import CartManager._ val cartTimeout: FiniteDuration = 120 seconds override def persistenceId: String = id def this(customer: ActorRef) = { this(customer, "persistent-cart-manager-id-1", Cart.empty) } def this(customer: ActorRef, id: String) = { this(customer, id, Cart.empty) } private def startTimer(timestamp: Long, time: FiniteDuration): Unit = { timers.startSingleTimer("cart-timer-" + timestamp, CartTimeExpired, time) } private def cancelTimer(): Unit = { timers.cancelAll() } private def updateState(event: Event): Unit = { event match { case ClearCart => shoppingCart = Cart.empty case AddItem(item, count) => shoppingCart = shoppingCart.addItem(item, count) case RemoveItem(item, count) => shoppingCart = shoppingCart.removeItem(item.id, count) case ChangeState(state) => state match { case Empty => cancelTimer() context become empty case NonEmpty(timestamp) => val now = System.currentTimeMillis() val diff = Math.max((now - timestamp) / 1000.0, 0) startTimer(timestamp, cartTimeout - diff.seconds) context become nonEmpty case InCheckout => cancelTimer() context become inCheckout } } } private def becomeEmpty(): Unit = { persist(ClearCart) { event => updateState(event) customer ! CartEmpty persist(ChangeState(Empty)) { event => updateState(event)} } } private def becomeNonEmpty(): Unit = { val now = System.currentTimeMillis() persist(ChangeState(NonEmpty(now))) { event => updateState(event) } } def empty: Receive = LoggingReceive { case AddItem(item, count) => persist(AddItem(item, count)) { event => updateState(event) sender ! ItemAdded(item, count) becomeNonEmpty() } case GetItemsRequest => sender ! GetItemsResponse(items = shoppingCart.itemsList()) } def nonEmpty: Receive = LoggingReceive { case CartTimeExpired => becomeEmpty() case ClearCart => becomeEmpty() case AddItem(item, count) => persist(AddItem(item, count)) { event => updateState(event) sender ! ItemAdded(item, count) } case RemoveItem(item, count) if shoppingCart.items contains item.id => persist(RemoveItem(item, count)) { event => updateState(event) sender ! ItemRemoved(item, count) if (shoppingCart.itemsCount() == 0) becomeEmpty() } case StartCheckout => customer ! CheckoutStarted(createCheckout()) persist(ChangeState(InCheckout)) { event => updateState(event) } case GetItemsRequest => sender ! GetItemsResponse(items = shoppingCart.itemsList()) } def createCheckout(): ActorRef = { context.actorOf(Props(new Checkout(customer, self)), "checkout") } def inCheckout: Receive = LoggingReceive { case CheckoutClosed => becomeEmpty() case CheckoutCancelled => becomeNonEmpty() case GetItemsRequest => sender ! GetItemsResponse(items = shoppingCart.itemsList()) } override def receiveCommand: Receive = empty override def receiveRecover: Receive = LoggingReceive { case event: Event => updateState(event) case SnapshotOffer(_, snapshot: Cart) => shoppingCart = snapshot } }
muchas/reactive-scala
src/test/scala/shopTest/CheckoutSpec.scala
package shopTest import akka.actor.{ActorRef, ActorSystem, PoisonPill, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, WordSpecLike} import shop._ class CheckoutSpec extends TestKit(ActorSystem("CheckoutSpec")) with WordSpecLike with BeforeAndAfterAll with BeforeAndAfterEach with ImplicitSender { var customer: TestProbe = _ var cart: TestProbe = _ var paymentService: ActorRef = _ var checkout: ActorRef = _ private def createCheckoutActor(id: String): ActorRef = { system.actorOf(Props(new Checkout(customer.ref, cart.ref, id) { override def createPayment(): ActorRef = paymentService })) } override def beforeEach(): Unit = { customer = TestProbe() cart = TestProbe() paymentService = TestProbe().ref } override def afterAll(): Unit = { system.terminate } "A Checkout" must { "inform about cart close" in { checkout = createCheckoutActor("checkout-test-01") val deliveryMethod = "dhl" customer.send(checkout, Checkout.DeliveryMethodSelected(deliveryMethod)) customer.expectMsg(Checkout.DeliveryMethodSelected(deliveryMethod)) customer.send(checkout, Checkout.PaymentSelected("paypal")) customer.expectMsg(Checkout.PaymentServiceStarted(paymentService)) checkout ! Checkout.PaymentReceived cart.expectMsg(CartManager.CheckoutClosed) } "inform about cart cancel when checkout cancelled" in { checkout = createCheckoutActor("checkout-test-02") val deliveryMethod = "dhl" customer.send(checkout, Checkout.DeliveryMethodSelected(deliveryMethod)) customer.expectMsg(Checkout.DeliveryMethodSelected(deliveryMethod)) customer.send(checkout, Checkout.PaymentSelected("paypal")) customer.expectMsg(Checkout.PaymentServiceStarted(paymentService)) checkout ! Checkout.Cancelled cart.expectMsg(CartManager.CheckoutCancelled) } "inform about cart cancel when payment expired" in { checkout = createCheckoutActor("checkout-test-03") val deliveryMethod = "dhl" customer.send(checkout, Checkout.DeliveryMethodSelected(deliveryMethod)) customer.expectMsg(Checkout.DeliveryMethodSelected(deliveryMethod)) customer.send(checkout, Checkout.PaymentSelected("paypal")) customer.expectMsg(Checkout.PaymentServiceStarted(paymentService)) checkout ! Checkout.PaymentTimerExpired cart.expectMsg(CartManager.CheckoutCancelled) } "inform about cart cancel when checkout expired" in { checkout = createCheckoutActor("checkout-test-04") val deliveryMethod = "dhl" customer.send(checkout, Checkout.DeliveryMethodSelected(deliveryMethod)) customer.expectMsg(Checkout.DeliveryMethodSelected(deliveryMethod)) checkout ! Checkout.CheckoutTimerExpired cart.expectMsg(CartManager.CheckoutCancelled) } "select delivery method and and preserve it after restart" in { val checkoutId = "checkout-test-010" val deliveryMethod = "dhl" checkout = createCheckoutActor(checkoutId) customer.send(checkout, Checkout.DeliveryMethodSelected(deliveryMethod)) customer.expectMsg(Checkout.DeliveryMethodSelected(deliveryMethod)) checkout ! PoisonPill val checkout2 = createCheckoutActor(checkoutId) checkout2 ! Checkout.DeliveryMethodRequest expectMsg(Checkout.DeliveryMethodResponse(deliveryMethod)) checkout2 ! Checkout.StateRequest expectMsg(Checkout.StateResponse(Checkout.SelectingPaymentMethod(0))) } "select payment method and and preserve it after restart" in { val checkoutId = "checkout-test-011" val deliveryMethod = "dhl" val paymentMethod = "stripe" checkout = createCheckoutActor(checkoutId) customer.send(checkout, Checkout.DeliveryMethodSelected(deliveryMethod)) customer.expectMsg(Checkout.DeliveryMethodSelected(deliveryMethod)) customer.send(checkout, Checkout.PaymentSelected(paymentMethod)) customer.expectMsg(Checkout.PaymentServiceStarted(paymentService)) checkout ! PoisonPill val checkout2 = createCheckoutActor(checkoutId) checkout2 ! Checkout.DeliveryMethodRequest expectMsg(Checkout.DeliveryMethodResponse(deliveryMethod)) checkout2 ! Checkout.PaymentMethodRequest expectMsg(Checkout.PaymentMethodResponse(paymentMethod)) checkout2 ! Checkout.StateRequest expectMsg(Checkout.StateResponse(Checkout.ProcessingPayment(0))) } } }
muchas/reactive-scala
src/main/scala/shop/ProductCatalogManager.scala
<filename>src/main/scala/shop/ProductCatalogManager.scala package shop import akka.actor.Actor import akka.event.LoggingReceive object ProductCatalogManager { // Protocol case class AddItemRequest(item: Item, quantity: Int = 1) case class AddItemResponse(item: Item, quantity: Int = 1) case class RemoveItemRequest(item: Item, quantity: Int = 1) case class RemoveItemResponse(item: Item, quantity: Int = 1) case class SearchRequest(query: String, limit: Int = 10) case class SearchResponse(items: List[Item]) } class ProductCatalogManager(var productCatalog: ProductCatalog) extends Actor { import ProductCatalogManager._ def this(path: String) { this(ProductCatalog.fromCSV(path)) } override def receive: Receive = LoggingReceive { case AddItemRequest(item, quantity) => productCatalog = productCatalog.addItem(item, quantity) sender ! AddItemResponse(item, quantity) case RemoveItemRequest(item, quantity) => productCatalog = productCatalog.removeItem(item.id, quantity) sender ! RemoveItemResponse(item, quantity) case SearchRequest(query, limit) => val items = productCatalog.search(query, limit) sender ! SearchResponse(items) } }
muchas/reactive-scala
src/main/scala/reactive2/ToggleActor.scala
<filename>src/main/scala/reactive2/ToggleActor.scala package reactive2 //////////////////// // Actor context // //////////////////// /* * trait ActorContext { * def become(behavior: Receive, discardOld: Boolean = true): Unit * def unbecome(): Unit * def actorOf(p: Props, name: String): ActorRef * def stop(a: ActorRef): Unit * ... * } * * trait Actor { * implicit val context: ActorContext * ... * } * */ import akka.actor._ import akka.actor.Props import akka.event.LoggingReceive import scala.concurrent.Await import scala.concurrent.duration._ class Toggle extends Actor { def happy: Receive = LoggingReceive { case "How are you?" => sender ! "happy" context become sad case "Done" => sender ! "Done" context.stop(self) } def sad: Receive = LoggingReceive { case "How are you?" => sender ! "sad" context become happy case "Done" => sender ! "Done" context.stop(self) } def receive = happy } class ToggleMain extends Actor { val toggle = context.actorOf(Props[Toggle], "toggle") def receive = LoggingReceive { case "StartShopping" => toggle ! "How are you?" toggle ! "How are you?" toggle ! "How are you?" toggle ! "Done" case "Done" => context.system.terminate case msg: String => println(s" received: $msg") } } object ToggleApp extends App { val system = ActorSystem("Reactive2") val mainActor = system.actorOf(Props[ToggleMain], "mainActor") mainActor ! "StartShopping" Await.result(system.whenTerminated, Duration.Inf) }
muchas/reactive-scala
src/main/scala/shop/ItemJsonProtocol.scala
<filename>src/main/scala/shop/ItemJsonProtocol.scala package shop; import java.net.URI import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat, deserializationError} object ItemJsonProtocol extends DefaultJsonProtocol { implicit object URIJsonFormat extends RootJsonFormat[URI] { def write(uri: URI) = JsString(uri.toString) def read(value: JsValue) = value match { case JsString(uri) => URI.create(uri) case _ => deserializationError("URI expected") } } }
muchas/reactive-scala
src/main/scala/shop/ShopApp.scala
package shop import akka.actor.{ActorSystem, Props} import com.typesafe.config.ConfigFactory import scala.concurrent.Await import scala.concurrent.duration.Duration object ShopApp extends App { val config = ConfigFactory.load() val catalogSystem = ActorSystem("catalogSystem", config.getConfig("catalog_app").withFallback(config)) val system = ActorSystem("shopSystem", config.getConfig("shop_app").withFallback(config)) val catalogActor = catalogSystem.actorOf(Props(new ProductCatalogManager("./query_result")), "productCatalogManager") val mainActor = system.actorOf(Props[Customer], "mainActor") // mainActor ! Customer.Search mainActor ! Customer.Init Await.result(system.whenTerminated, Duration.Inf) }
muchas/reactive-scala
src/main/scala/reactive2/fsm/ShopApp.scala
package reactive2.fsm import akka.actor.{Actor, ActorRef, ActorSystem, Props} import akka.event.LoggingReceive import scala.concurrent.Await import scala.concurrent.duration.Duration object Customer { case object StartShopping } class Customer extends Actor { import Customer._ val cart = context.actorOf(Props[Cart], "cart") def receive: Receive = LoggingReceive { case StartShopping => val watch = new Item("zegarek") val shoes = new Item("buty") val game = new Item("game") cart ! ItemAdded(watch) cart ! ItemRemoved(watch) cart ! ItemAdded(shoes) cart ! ItemAdded(watch) cart ! ItemRemoved(watch) cart ! ItemAdded(game) cart ! CheckoutStarted case CheckoutCreated(checkout: ActorRef) => checkout ! DeliveryMethodSelected("dhl") checkout ! PaymentSelected("visa") checkout ! PaymentReceived } } object ShopApp extends App { val system = ActorSystem("Reactive2") val mainActor = system.actorOf(Props[Customer], "mainActor") mainActor ! Customer.StartShopping Await.result(system.whenTerminated, Duration.Inf) }
muchas/reactive-scala
build.sbt
<filename>build.sbt name := """reactive-lab2""" version := "1.1" scalaVersion := "2.12.3" libraryDependencies ++= Seq( "com.typesafe.akka" %% "akka-actor" % "2.5.6", "com.typesafe.akka" %% "akka-persistence" % "2.5.4", "com.typesafe.akka" %% "akka-remote" % "2.5.4", "com.typesafe.akka" %% "akka-http" % "10.0.10", "com.typesafe.akka" %% "akka-http-spray-json" % "10.0.10" , "com.typesafe.akka" %% "akka-stream" % "2.5.6", "org.iq80.leveldb" % "leveldb" % "0.9", "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.8", "com.typesafe.akka" %% "akka-testkit" % "2.5.6" % "test", "org.scalatest" %% "scalatest" % "3.0.1" % "test")
muchas/reactive-scala
src/main/scala/shop/Cart.scala
package shop import java.net.URI import scala.collection.mutable.ListBuffer case class Cart(items: Map[URI, (Item, Int)]) { def addItem(it: Item, count: Int): Cart = { val currentCount = if (items contains it.id) items(it.id)._2 else 0 copy(items = items.updated(it.id, (it, currentCount + count) )) } def removeItem(id: URI, count: Int): Cart = { val item = if (items contains id) items(id)._1 else null val newCount = if (items contains id) Math.max(items(id)._2 - count, 0) else 0 if(newCount <= 0) { copy(items = items - id) } else if (item != null) { copy(items = items.updated(id, (item, newCount))) } else { copy(items) } } def itemsCount(): Int = { var sum = 0 for ((_, (_, count)) <- items) sum += count sum } def itemsList(): Seq[Item] = { val list = new ListBuffer[Item] for ((_, (item, _)) <- items) list += item list } } object Cart { val empty = Cart(Map.empty) }
muchas/reactive-scala
src/main/scala/reactive2/fsm/Checkout.scala
<filename>src/main/scala/reactive2/fsm/Checkout.scala package reactive2.fsm import akka.actor.{ActorRef, FSM} import shop.Item import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ case class DeliveryMethodSelected(method: String) case object Cancelled case object CheckoutTimerExpired case object PaymentTimerExpired case class PaymentSelected(method: String) case object PaymentReceived sealed trait CheckoutState case object ProcessingPayment extends CheckoutState case object SelectingPaymentMethod extends CheckoutState case object SelectingDelivery extends CheckoutState sealed trait CheckoutData case object Uninitialized extends CheckoutData case class CheckoutInfo(paymentMethod: String, deliveryMethod: String) extends CheckoutData class Checkout(cart: ActorRef) extends FSM[CheckoutState, CheckoutData] { startWith(SelectingDelivery, Uninitialized) when(SelectingDelivery) { case Event(DeliveryMethodSelected(method), Uninitialized) => print("Selecting Delivery" + method + " \n") goto(SelectingPaymentMethod) using CheckoutInfo("", method) } when(SelectingPaymentMethod) { case Event(PaymentSelected(method), info @ CheckoutInfo(_, delivery)) => print("Selecting Payment Method" + method + " \n") goto(ProcessingPayment) using CheckoutInfo(method, delivery) } when(ProcessingPayment) { case Event(PaymentReceived, info: CheckoutInfo) => print("Payment Received\n") cart ! CheckoutClosed stop() case Event(PaymentTimerExpired, _) => cart ! CheckoutCancelled stop() } whenUnhandled { case Event(Cancelled, _) => print("Checkout cancelled") cart ! CheckoutCancelled stop() case Event(CheckoutTimerExpired, _) => cart ! CheckoutCancelled stop() } onTransition { case _ -> SelectingDelivery => setTimer("checkoutTimer", CheckoutTimerExpired, 3 seconds) case SelectingPaymentMethod -> ProcessingPayment => cancelTimer("checkoutTimer") setTimer("paymentTimer", PaymentTimerExpired, 3 seconds) } initialize() }
muchas/reactive-scala
src/main/scala/shop/Customer.scala
package shop import java.net.URI import akka.actor.{Actor, ActorRef, ActorSelection, Props} import akka.event.LoggingReceive object Customer { case object Init case object Search } class Customer extends Actor { import Customer._ val cart: ActorRef = context.actorOf(Props(new CartManager(self)), "cart") val catalog: ActorSelection = context.system.actorSelection("akka.tcp://catalogSystem@127.0.0.1:2553/user/productCatalogManager") def receive: Receive = LoggingReceive { // Search Queries case Search => catalog ! ProductCatalogManager.SearchRequest("cheese", 3) case ProductCatalogManager.SearchResponse(items) => items.foreach(println) // Shop Queries case Init => val watch = Item(URI.create("1"), "zegarek", 5.12) val shoes = Item(URI.create("2"), "buty", 1.12) val game = Item(URI.create("3"), "kapcie", 6.20) cart ! CartManager.AddItem(watch, 1) cart ! CartManager.RemoveItem(watch, 1) cart ! CartManager.AddItem(shoes, 1) cart ! CartManager.AddItem(watch, 1) cart ! CartManager.RemoveItem(watch, 1) cart ! CartManager.AddItem(game, 1) cart ! CartManager.StartCheckout case CartManager.CheckoutStarted(checkout: ActorRef) => checkout ! Checkout.DeliveryMethodSelected("dhl") checkout ! Checkout.PaymentSelected("visa") case CartManager.CheckoutClosed => print("Customer: checkout closed!") case CartManager.CartEmpty => print("Customer: cart is empty!") case Checkout.PaymentServiceStarted(payment: ActorRef) => payment ! PaymentService.DoPayment("paypal") case PaymentService.PaymentConfirmed => print("Customer: payment confirmed!") } }
reid-spencer/hotspot-profiler
project/plugins.sbt
<reponame>reid-spencer/hotspot-profiler resolvers += "Sonatype respository" at "https://oss.sonatype.org/content/repositories/releases/" libraryDependencies += "org.slf4j" % "slf4j-simple" % "1.7.25" addSbtPlugin("com.reactific" % "sbt-reactific" % "2.1.0")
reid-spencer/hotspot-profiler
build.sbt
/* * Copyright © 2015 Reactific Software LLC. All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import sbt._ import sbt.Keys._ import scala.language.postfixOps val classesIgnoredByScoverage : String = Seq[String]( "<empty>", // Avoids warnings from scoverage "EmptyTree$/null" ).mkString(";") lazy val root = sbt.Project("hotspot-profiler", file(".")). enablePlugins(ReactificPlugin/*, ScoverageSbtPlugin */). settings( organization := "com.reactific", copyrightHolder := "Reactific Software LLC", startYear := Some(2015), developerUrl := url("http://reactific.com/"), titleForDocs := "Hot Spot Profiler", codePackage := "com.reactific.hsp", libraryDependencies += "org.slf4j" % "slf4j-api" % "1.7.25", libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.2.3" % "test", libraryDependencies += "org.specs2" %% "specs2-core" % "3.9.5" % "test", scalastyleConfig := file("project/scalastyle-config.xml"), // coverageFailOnMinimum := true, // coverageExcludedPackages := classesIgnoredByScoverage, // coverageMinimum := 85, // coverallsToken := Some("<KEY>"), logLevel := Level.Info )
reid-spencer/hotspot-profiler
src/test/scala/com/reactific/hsp/ProfilerSpec.scala
<gh_stars>0 package com.reactific.hsp import org.slf4j.LoggerFactory import org.specs2.mutable.Specification import scala.concurrent.{Await, Future} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import org.slf4j.Logger class ProfilerSpec extends Specification with ProfilerTestTools { val logger: Logger = LoggerFactory.getLogger("com.reactific.hsp.ProfilerSpec") logger.debug("ProfilerSpec instantiated") sequential "Profiler" should { "report less than 50 microseconds for empty function" in ifSuitableForTimingTest( "50 µs maximum" ) { () ⇒ val profiler = Profiler("empty function") val attempts = 1000 for (i ← 1 to attempts) { profiler.profile("empty") {} } profiler.log_profile_summary(logger, "empty function") val (count, sum) = profiler.get_one_item("empty") count must beEqualTo(attempts) (sum must beLessThan(attempts * 50000.0)).toResult } "have profile overhead < 10 μs" in ifSuitableForTimingTest("10 μs max") { () ⇒ val profiler = Profiler("overhead") val attempts = 1000 var sum = 0.0D for (_ ← 1 to attempts) { val t0 = System.nanoTime() profiler.profile("a") {} val t1 = System.nanoTime() sum += t1 - t0 } val avg = sum / attempts val microseconds = avg / 1000.0D logger.debug(s"Average profiler overhead = $microseconds μs") (microseconds must beLessThan(10.0)).toResult } "nested profilers should not block" in { val profiler = Profiler("non-blocking") val fourty_two = profiler.profile("a") { profiler.profile("b") { 42 } } fourty_two must beEqualTo(42) } "reset_profile_data works" in { val profiler = Profiler("reset_profile_data") val fourty_two = profiler.profile("forty-two") { 42 } fourty_two must beEqualTo(42) profiler.format_one_item("forty-two").contains("count=1") must beTrue profiler.reset_profile_data() profiler.format_one_item("forty-two").contains("count=0") must beTrue } "format_one_item on disabled Profiler should return empty" in { val profiler = Profiler("format_one_item", enabled = false) val fourty_two = profiler.profile("item") { 42 } fourty_two must beEqualTo(42) profiler.format_one_item("item").isEmpty must beTrue } "get_one_item on disabled Profiler should return zeros" in { val profiler = Profiler("get_one_item", enabled = false) val fourty_two = profiler.profile("item") { 42 } fourty_two must beEqualTo(42) profiler.get_one_item("item") must beEqualTo(0 → 0.0) } "log_profile_summary works" in { val profiler = Profiler("log_profile_summary", enabled = false) val fourty_two = profiler.profile("item") { 42 } fourty_two must beEqualTo(42) profiler.log_profile_summary(logger) success } "format_profile_data works" in { val profiler = Profiler("format_profile_data") val fourty_two = profiler.profile("a") { profiler.profile("b") { 42 } } fourty_two must beEqualTo(42) val msg = profiler.format_profile_data.toString logger.info(s"format_profile_data yielded:\n$msg") msg.contains("a\n") must beTrue msg.contains("b\n") must beTrue } "log_profile_data works" in { val profiler = Profiler("log_profile_data") val fourty_two = profiler.profile("a") { profiler.profile("b") { 42 } } fourty_two must beEqualTo(42) profiler.log_profile_data(logger) success } "print_profile_data works" in { val profiler = Profiler("print_profile_data") val fourty_two = profiler.profile("a") { profiler.profile("b") { 42 } } fourty_two must beEqualTo(42) profiler.print_profile_data(System.out) success } "Extracting In Profiler.profile context is not permitted" in { val profiler = Profiler() profiler.profile("oops") { profiler.format_profile_summary } must throwA[ IllegalStateException ] } "capture wait and processing time of a Future.map" in { val profiler = Profiler() val attempts = 1000 for (i ← 1 to attempts) { val future: Future[Double] = profiler.futureMap("test", Future.successful(42)) { x ⇒ x.toDouble * 2.0 } val double = Await.result(future, 1.second) double must beEqualTo(84.0) } val (wait_count, _) = profiler.get_one_item("test.wait") val (count, sum) = profiler.get_one_item("test") count must beEqualTo(attempts) wait_count must beEqualTo(attempts) profiler.log_profile_summary(logger) sum must beLessThan(100000.0 * attempts) } "process a Future.map with profiling disabled" in { val profiler = Profiler(enabled = false) val future: Future[Double] = profiler.futureMap("test", Future.successful(42)) { x ⇒ x.toDouble * 2.0 } val double = Await.result(future, 1.second) double must beEqualTo(84.0) } "handle async timing" in { val profiler = Profiler("async") val start = profiler.asyncStart start must beGreaterThan(0L) profiler.asyncEnd("must", start) val (count, sum) = profiler.get_one_item("must") count must beEqualTo(1) sum must beGreaterThan(0.0D) } "handle a Future" in { val profiler = Profiler("of a Future") val future = profiler.profileF("future") { Future[Unit] { Thread.sleep(42) () } } Await.result(future, 2.second) val (count, sum) = profiler.get_one_item("future") profiler.log_profile_summary(logger) count must beEqualTo(1) sum must beGreaterThan(4.0E+7) sum must beLessThan(4.9E+7) } "timedTest with logging works" in { val profiler = Profiler("timedTest With Logging") timedTest(100000000, "timedTest", profiler, Some(logger)) { (profiler) ⇒ Thread.sleep(10) success } } "timedTest with printing works" in { val profiler = Profiler("timedTest With Printing") timedTest(100000000, "timedTest", profiler, print = true) { (profiler) ⇒ Thread.sleep(10) success } } } }
reid-spencer/hotspot-profiler
src/test/scala/com/reactific/hsp/ProfilerTestTools.scala
package com.reactific.hsp import java.lang.management.ManagementFactory import org.slf4j.Logger import org.specs2.execute.{Failure, Skipped, Result} /** Test Tools For Profiler. * This trait can be mixed into test case software to obtain some tools that are useful for working with * the Profiler in the case of */ trait ProfilerTestTools { /** Determine if the system is viable for doing timing tests */ private lazy val (suitableForTimingTests: Boolean, unsuitabilityReason: String) = { if (System.getenv("TRAVIS") != null) { false → "TRAVIS environment variable is present (Travis CI execution)" } else { val os = ManagementFactory.getOperatingSystemMXBean val processors = os.getAvailableProcessors val avg = os.getSystemLoadAverage val limit = processors / 2 (avg < limit) → s"CPU Average ($avg) must be less than $limit (less than 1/2 the machine's CPUs)" } } def ifSuitableForTimingTest(name: String)(func: () ⇒ Result): Result = { if (suitableForTimingTests) func() else Skipped( s"Test '$name' not run because system is unsuitable for timing tests because $unsuitabilityReason") } /** Profiler wrapper for doing a timed test */ def timedTest( maxNanoSeconds: Double, name: String, profiler: Profiler = Profiler, logger: Option[Logger] = None, print: Boolean = false )(func: (Profiler) ⇒ Result): Result = { if (suitableForTimingTests) { val result: Result = profiler.profile(name) { func(profiler) } val (_, time) = Profiler.get_one_item(name) logger match { case Some(log) ⇒ profiler.log_profile_summary(log, name) case _ ⇒ } if (print) { profiler.print_profile_summary(System.out) println() } if (time > maxNanoSeconds) { Failure( s"Test '$name' took ${time}ns which exceeds the limit of ${maxNanoSeconds}ns") } else { result } } else { Skipped( s"Test '$name' not run because system is unsuitable for timing tests because $unsuitabilityReason") } } }
reid-spencer/hotspot-profiler
src/main/scala/com/reactific/hsp/Profiler.scala
/* * Copyright 2015-2017 Reactific Software LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.reactific.hsp import java.io.PrintStream import org.slf4j.Logger import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future} object Profiler extends Profiler("Profiler", true) /** Profiler Module For Manual Code Instrumentation * This module provides thread aware profiling at microsecond level * granularity with manually inserted code Instrumentation. You can * instrument a * block of code by wrapping it in Profiling.profile("any name you like"). * For example, like this: * {{{ * Profiling.profile("Example Code") { "example code" } * }}} * The result of the Profiling.profile call will be whatever the block returns * ("example code" in this case). * You can print the results out with print_profile_data or log it with * log_profile_data. Once printed or logged, the data is reset and a new * capture starts. The printout is grouped by threads and nested * Profiling.profile calls are accounted for. Note that when profiling_enabled * is false, there is almost zero overhead. In particular, expressions * involving computing the argument to profile method will not be computed * because it is a functional by-name argument that is not evaluated if * profiling is disabled. */ case class Profiler(name: String = "Default", enabled: Boolean = true) { private case class ProfileItem( id: Int, t0: Long, t1: Long, what: String, depth: Int) private class ThreadInfo { var profile_data: mutable.Queue[ProfileItem] = { // scalastyle:ignore mutable.Queue.empty[ProfileItem] } var depth_tracker: Int = 0 // scalastyle:ignore def record(id: Int, t0: Long, t1: Long, what: String, depth: Int): Unit = { require(depth_tracker > 0) depth_tracker -= 1 profile_data.enqueue(ProfileItem(id, t0, t1, what, depth)) } } private val thread_infos = mutable.Map.empty[Thread, ThreadInfo] private final val tinfo = new ThreadLocal[ThreadInfo] { override def initialValue(): ThreadInfo = { val result = new ThreadInfo() thread_infos.put(Thread.currentThread(), result) result } override def remove(): Unit = { super.remove() val _ = thread_infos.remove(Thread.currentThread()) } } final def in_profile_context: Boolean = { val ti = tinfo.get() ti.depth_tracker > 0 } private final def require_non_profile_context(where: String): Unit = { if (in_profile_context) { throw new IllegalStateException( s"$where cannot be called from profile context" ) } } private var id_counter: Int = 0 // scalastyle:ignore @inline private def nextThreadInfo(): (ThreadInfo, Int, Int) = { val ti = tinfo.get() Profiler.synchronized { id_counter += 1 } val depth = ti.depth_tracker ti.depth_tracker += 1 (ti, id_counter, depth) } def profileF[R]( what: => String )(block: => Future[R] )(implicit ec: ExecutionContext ): Future[R] = { if (enabled) { val (ti, id, depth) = nextThreadInfo() val t0 = System.nanoTime() block.map { r => ti.record(id, t0, System.nanoTime(), what, depth) r }.recover { case x: Throwable => ti.record(id, t0, System.nanoTime(), what, depth) throw x } } else { block } } def profile[R](what: ⇒ String)(block: ⇒ R): R = { if (enabled) { val (ti, id, depth) = nextThreadInfo() var t0 = 0L // scalastyle:ignore var t1 = 0L // scalastyle:ignore try { t0 = System.nanoTime() val r: R = block // call-by-name t1 = System.nanoTime() r } finally { ti.record(id, t0, t1, what, depth) } } else { block } } /* trait FilterMonadic[+A, +Repr] extends Any { def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That def map[A,Repr,B,That](what : ⇒ String, coll: FilterMonadic[A,Repr]) ( block : A ⇒ B) (implicit bf: CanBuildFrom[Repr, B, That]) : That = { if (profiling_enabled) { val (ti, id, depth) = nextThreadInfo() } else { coll.map(block) } } */ def futureMap[S, B]( what: ⇒ String, future: ⇒ Future[S] )(block: S ⇒ B )(implicit ec: ExecutionContext ): Future[B] = { if (enabled) { val t0 = System.nanoTime() future.map { x ⇒ val t1 = System.nanoTime() val r = block(x) val t2 = System.nanoTime() val (ti1, id, depth) = nextThreadInfo() ti1.record(id, t0, t1, what + ".wait", depth) val (ti2, id2, depth2) = nextThreadInfo() ti2.record(id2, t1, t2, what, depth2) r } } else { future.map(block) } } def asyncStart: Long = { System.nanoTime() } def asyncEnd(what: ⇒ String, t0: Long): Unit = { if (enabled) { val t1 = System.nanoTime() val (ti, id, depth) = nextThreadInfo() ti.record(id, t0, t1, what, depth) } } def get_one_item(itemName: String): (Int, Double) = { require_non_profile_context("get_one_item") if (enabled) { var count: Int = 0 // scalastyle:ignore var sum: Double = 0.0 // scalastyle:ignore for { (_, ti) ← thread_infos } { for { info ← ti.profile_data if info.what == itemName } { count += 1 sum += (info.t1 - info.t0) } } (count, sum) } else { (0, 0.0) } } def format_one_item(itemName: String): String = { require_non_profile_context("format_one_item") if (enabled) { val (count, sum) = get_one_item(itemName) val normalized_sum = sum / 1000000.0D val sumF = normalized_sum.formatted("%1$ 10.3f") val avgF = (normalized_sum / count).formatted("%1$ 10.3f") s"count=$count, sum=$sumF, avg=$avgF (" + itemName + ")" } else { "" } } type SummaryMap = Seq[(String, Int, Int, Double, Double, Double)] def summarize_profile_data: SummaryMap = { require_non_profile_context("summarize_profile_data") val mb = new mutable.HashMap[(Int, String), (Int, Int, Double, Double, Double)]() for { (_, ti) ← thread_infos } { for { info ← ti.profile_data.sortBy(_.id) } { val time_len: Double = (info.t1 - info.t0).toDouble mb.get(info.depth → info.what) match { case Some((_id, count, sum, min, max)) ⇒ mb.put( info.depth → info.what, ( _id, count + 1, sum + time_len, Math.min(min, time_len), Math.max(max, time_len) ) ) case None ⇒ mb.put( info.depth → info.what, (info.id, 1, time_len, time_len, time_len) ) } } } mb.view.toSeq.sortBy { case ((_, _), (id, _, _, _, _)) ⇒ id }.map { case ((depth, msg), (_, count, sum, min, max)) ⇒ (msg, depth, count, sum, min, max) } } def format_profile_summary: String = { require_non_profile_context("format_profile_summary") val initialSize: Int = 4096 val sb = new StringBuilder(initialSize) for { (msg, depth, count, sum, min, max) ← summarize_profile_data } { sb.append((sum / 1000000.0D).formatted("%1$ 12.3f")) .append(" ms / ") .append(count.formatted("%1$ 7d")) .append(" = ") .append((sum / 1000000.0D / count).formatted("%1$ 10.3f")) .append(", min=") .append((min / 1000000.0D).formatted("%1$ 10.3f")) .append(", max=") .append((max / 1000000.0D).formatted("%1$ 10.3f")) .append(" - ") for (_ ← 1 to depth) sb.append(".") sb.append(msg).append("\n") } sb.toString() } def print_profile_summary(out: PrintStream): Unit = { require_non_profile_context("print_profile_summary") out.print(s"Profiling Summary of $name:\n$format_profile_summary") } def log_profile_summary(log: Logger): Unit = { require_non_profile_context("log_profile_summary") log.debug(s"Profiling Summary of $name:\n$format_profile_summary") } def log_profile_summary(log: Logger, summary_name: String): Unit = { require_non_profile_context("log_profile_summary") log.debug( s"Profiling Summary of $summary_name($name):\n$format_profile_summary" ) } def format_profile_data: StringBuilder = { require_non_profile_context("format_profile_data") val initialSize: Int = 4096 val str = new StringBuilder(initialSize) if (enabled) { for { (thread, ti) ← thread_infos } { str .append("\nTHREAD(") .append(thread.getId) .append("): ") .append(thread.getName) str.append("[").append(thread.getThreadGroup).append("]\n") for (info ← ti.profile_data.sortBy(_.id)) { val time_len: Double = (info.t1 - info.t0) / 1000000.0D str .append((info.t0 / 1000000000.0D).formatted("%1$ 12.3f")) .append(" - ") .append(time_len.formatted("%1$ 10.3f")) .append(" ") for (_ ← 1 to info.depth) str.append(".") str.append(info.what).append("\n") } } } reset_profile_data() str } def reset_profile_data(): Unit = { thread_infos.clear() } def log_profile_data(logger: Logger): Unit = { require_non_profile_context("log_profile_data") if (enabled) { logger.debug(s"Profiling Data For $name:\n$format_profile_data") } } def print_profile_data(out: PrintStream): Unit = { require_non_profile_context("print_profile_data") if (enabled) { out.print(s"Profiling Data For $name:\n$format_profile_data") } } }
poslegm/monix-kafka
kafka-1.0.x/src/main/scala/monix/kafka/config/SSLProtocol.scala
/* * Copyright (c) 2014-2019 by The Monix Project Developers. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.kafka.config import java.security.NoSuchAlgorithmException import javax.net.ssl.SSLContext import com.typesafe.config.ConfigException.BadValue /** Represents the available protocols to use for * SSL connections. * * Available values: * * - [[SSLProtocol.TLSv12]] * - [[SSLProtocol.TLSv11]] * - [[SSLProtocol.TLSv1]] * - [[SSLProtocol.TLS]] * - [[SSLProtocol.SSLv3]] (prefer only for older JVMs) * - [[SSLProtocol.SSLv2]] (prefer only for older JVMs, no longer available for Java 8) * - [[SSLProtocol.SSL]] (prefer only for older JVMs) */ sealed trait SSLProtocol extends Serializable { def id: String def getInstance(): Option[SSLContext] = try Some(SSLContext.getInstance(id)) catch { case _: NoSuchAlgorithmException => None } } object SSLProtocol { @throws(classOf[BadValue]) def apply(id: String): SSLProtocol = { val algorithm = id match { case TLSv12.id => TLSv12 case TLSv11.id => TLSv11 case TLSv1.id => TLSv1 case TLS.id => TLS case SSLv3.id => SSLv3 case SSLv2.id => SSLv2 case SSL.id => SSL case _ => throw new BadValue("kafka.ssl.enabled.protocols", s"Invalid value: $id") } algorithm.getInstance() match { case Some(_) => algorithm case None => throw new BadValue("kafka.ssl.enabled.protocols", s"Unsupported SSL protocol: $id") } } case object TLSv12 extends SSLProtocol { val id = "TLSv1.2" } case object TLSv11 extends SSLProtocol { val id = "TLSv1.1" } case object TLSv1 extends SSLProtocol { val id = "TLSv1" } case object TLS extends SSLProtocol { val id = "TLS" } /** WARNING: deprecated, might not work on recent versions * of the JVM. Prefer TLS. */ case object SSLv3 extends SSLProtocol { val id = "SSLv3" } /** WARNING: deprecated, might not work on recent versions * of the JVM. Prefer TLS. */ case object SSLv2 extends SSLProtocol { val id = "SSLv2" } /** WARNING: deprecated, might not work on recent versions * of the JVM. Prefer TLS. */ case object SSL extends SSLProtocol { val id = "SSL" } }
poslegm/monix-kafka
kafka-1.0.x/src/main/scala/monix/kafka/config/AutoOffsetReset.scala
<reponame>poslegm/monix-kafka /* * Copyright (c) 2014-2019 by The Monix Project Developers. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.kafka.config import com.typesafe.config.ConfigException.BadValue /** What to do when there is no initial offset in Kafka or if the * current offset does not exist any more on the server * (e.g. because that data has been deleted). * * Available choices: * * - [[AutoOffsetReset.Earliest]] * - [[AutoOffsetReset.Latest]] * - [[AutoOffsetReset.Throw]] */ sealed trait AutoOffsetReset extends Serializable { def id: String } object AutoOffsetReset { @throws(classOf[BadValue]) def apply(id: String): AutoOffsetReset = id.trim.toLowerCase match { case Earliest.id => Earliest case Latest.id => Latest case Throw.id => Throw case _ => throw new BadValue("kafka.auto.offset.reset", s"Invalid value: $id") } /** Automatically reset the offset to the earliest offset. */ case object Earliest extends AutoOffsetReset { val id = "earliest" } /** Automatically reset the offset to the latest offset. */ case object Latest extends AutoOffsetReset { val id = "latest" } /** Throw exception to the consumer if no previous offset * is found for the consumer's group. */ case object Throw extends AutoOffsetReset { val id = "none" } }
poslegm/monix-kafka
kafka-1.0.x/src/main/scala/monix/kafka/config/ClassName.scala
/* * Copyright (c) 2014-2019 by The Monix Project Developers. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.kafka.config import scala.reflect.ClassTag abstract class ClassName[T](implicit T: ClassTag[T]) extends Serializable { def className: String val classType: Class[_ <: T] = Class.forName(className).asInstanceOf[Class[_ <: T]] require( findClass(classType :: Nil, T.runtimeClass), s"Given type $className does not implement ${T.runtimeClass}" ) private def findClass(stack: List[Class[_]], searched: Class[_]): Boolean = stack match { case Nil => false case x :: xs => if (x == searched) true else { val superClass: List[Class[_]] = Option(x.getSuperclass).toList val rest = superClass ::: x.getInterfaces.toList ::: xs findClass(rest, searched) } } }
poslegm/monix-kafka
project/plugins.sbt
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.2.0") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.5") addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.3.0") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.0")
poslegm/monix-kafka
kafka-1.0.x/src/test/scala/monix/kafka/ConfigTest.scala
package monix.kafka import org.scalatest.FunSuite class ConfigTest extends FunSuite { test("overwrite properties with values from producer config") { val config = KafkaProducerConfig.default .copy(bootstrapServers = List("localhost:9092"), properties = Map("bootstrap.servers" -> "127.0.0.1:9092")) assert( config.toProperties.getProperty("bootstrap.servers") == "localhost:9092" ) } test("overwrite properties with values from consumer config") { val config = KafkaConsumerConfig.default .copy(bootstrapServers = List("localhost:9092"), properties = Map("bootstrap.servers" -> "127.0.0.1:9092")) assert( config.toProperties.getProperty("bootstrap.servers") == "localhost:9092" ) } test("convert to Java map from producer config and filter null values") { val config = KafkaProducerConfig.default.toJavaMap assert(!config.containsValue(null)) } test("convert to Java map from consumer config and filter null values") { val config = KafkaConsumerConfig.default.toJavaMap assert(!config.containsValue(null)) } }
poslegm/monix-kafka
kafka-1.0.x/src/main/scala/monix/kafka/config/ObservableCommitOrder.scala
<gh_stars>1-10 /* * Copyright (c) 2014-2019 by The Monix Project Developers. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.kafka.config import com.typesafe.config.ConfigException.BadValue /** Specifies the consumer commit order, to use by the * [[monix.kafka.KafkaConsumerObservable KafkaConsumerObservable]] * in case `kafka.enable.auto.commit` is set to `false`. * * Available options: * * - [[ObservableCommitOrder.BeforeAck]] specifies to do a commit * before acknowledgement is received from downstream * - [[ObservableCommitOrder.AfterAck]] specifies to do a commit * after acknowledgement is received from downstream * - [[ObservableCommitOrder.NoAck]] specifies to skip committing */ sealed trait ObservableCommitOrder extends Serializable { def id: String def isBefore: Boolean = this match { case ObservableCommitOrder.BeforeAck => true case _ => false } def isAfter: Boolean = this match { case ObservableCommitOrder.AfterAck => true case _ => false } } object ObservableCommitOrder { @throws(classOf[BadValue]) def apply(id: String): ObservableCommitOrder = id match { case BeforeAck.id => BeforeAck case AfterAck.id => AfterAck case NoAck.id => NoAck case _ => throw new BadValue("kafka.monix.observable.commit.order", s"Invalid value: $id") } /** Do a `commit` in the Kafka Consumer before * receiving an acknowledgement from downstream. */ case object BeforeAck extends ObservableCommitOrder { val id = "before-ack" } /** Do a `commit` in the Kafka Consumer after * receiving an acknowledgement from downstream. */ case object AfterAck extends ObservableCommitOrder { val id = "after-ack" } /** Do not `commit` in the Kafka Consumer. */ case object NoAck extends ObservableCommitOrder { val id = "no-ack" } }
poslegm/monix-kafka
kafka-0.11.x/src/test/scala/monix/kafka/KafkaTestKit.scala
package monix.kafka import net.manub.embeddedkafka.EmbeddedKafka import org.scalatest.Suite trait KafkaTestKit extends EmbeddedKafka { self: Suite => sys.addShutdownHook { EmbeddedKafka.stop() } }
poslegm/monix-kafka
kafka-1.0.x/src/main/scala/monix/kafka/config/SecurityProtocol.scala
<reponame>poslegm/monix-kafka<gh_stars>1-10 /* * Copyright (c) 2014-2019 by The Monix Project Developers. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.kafka.config import com.typesafe.config.ConfigException.BadValue /** The `security.protocol` setting for the Kafka Producer. * * Represents the protocol used to communicate with brokers. * * Valid values are: * * - [[SecurityProtocol.PLAINTEXT]] * - [[SecurityProtocol.SSL]] * - [[SecurityProtocol.SASL_PLAINTEXT]] * - [[SecurityProtocol.SASL_SSL]] */ sealed trait SecurityProtocol extends Serializable { def id: String } object SecurityProtocol { @throws(classOf[BadValue]) def apply(id: String): SecurityProtocol = id match { case PLAINTEXT.id => PLAINTEXT case SSL.id => SSL case SASL_PLAINTEXT.id => SASL_PLAINTEXT case SASL_SSL.id => SASL_SSL case _ => throw new BadValue("kafka.security.protocol", s"Invalid value: $id") } case object PLAINTEXT extends SecurityProtocol { val id = "PLAINTEXT" } case object SSL extends SecurityProtocol { val id = "SSL" } case object SASL_PLAINTEXT extends SecurityProtocol { val id = "SASL_PLAINTEXT" } case object SASL_SSL extends SecurityProtocol { val id = "SASL_SSL" } }
poslegm/monix-kafka
kafka-1.0.x/src/main/scala/monix/kafka/config/ObservableCommitType.scala
<gh_stars>1-10 /* * Copyright (c) 2014-2019 by The Monix Project Developers. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.kafka.config import com.typesafe.config.ConfigException.BadValue /** Specifies the consumer commit type, to use by the * [[monix.kafka.KafkaConsumerObservable KafkaConsumerObservable]] * in case `kafka.enable.auto.commit` is set to `false`. * * Available options: * * - [[ObservableCommitType.Sync]] * - [[ObservableCommitType.Async]] */ sealed trait ObservableCommitType extends Serializable { def id: String } object ObservableCommitType { @throws(classOf[BadValue]) def apply(id: String): ObservableCommitType = id match { case Sync.id => Sync case Async.id => Async case _ => throw new BadValue("kafka.monix.observable.commit.type", s"Invalid value: $id") } /** Uses `consumer.commitSync()` after each batch * if `enable.auto.commit` is `false`. */ case object Sync extends ObservableCommitType { val id = "sync" } /** Uses `consumer.commitAsync()` after each batch * if `enable.auto.commit` is `false`. */ case object Async extends ObservableCommitType { val id = "async" } }
morgen-peschke/sql2json
src/test/scala/sql2json/cat/EqInstancesTest.scala
<reponame>morgen-peschke/sql2json<filename>src/test/scala/sql2json/cat/EqInstancesTest.scala package sql2json package cat import testing.laws.EqLaws final class EqStringInstancesTest extends EqLaws[String] final class EqIntInstancesTest extends EqLaws[Int] final class EqLongInstancesTest extends EqLaws[Long] final class EqBooleanInstancesTest extends EqLaws[Boolean] final class EqEitherInstancesTest extends EqLaws[Either[Int,String]] final class EqListInstancesTest extends EqLaws[List[Boolean]]
morgen-peschke/sql2json
src/main/scala/sql2json/jdbc/Database.scala
<gh_stars>0 package sql2json package jdbc import cat.Applicative.given import cat.MonadError, MonadError.given import cat.ApplicativeError, ApplicativeError.given import types.Convertible.given import types.validation.Errors, Errors.given import types.validation.FailFast, FailFast.Validated.given opaque type Database = String object Database def apply[C[_]](raw: String)(given AE: ApplicativeError[C, Errors]): C[Database] = raw.trim.pure[FailFast.Validated] .ensure("Database label cannot be empty".as[Errors])(_.nonEmpty) .prevent("Database lable cannot start with '-'".as[Errors])(_.startsWith("_")) .as[C[Database]] given cat.Show[Database] = identity(_) given Ordering[Database] = _ compare _
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/MonadErrorLaws.scala
package sql2json package testing package laws import cat.{ApplicativeError, Eq, Show, Functor, Monad, MonadError} import cat.ApplicativeError.given import cat.Functor.given import cat.Monad.given import cat.MonadError.given import cat.Applicative.{~, given} import testing.Arbitrary import testing.Arbitrary.forAll import testing.Result.given import org.junit.Test abstract class MonadErrorLaws[F[_], E, A, B](given ME: MonadErrorLaws.Givens[F, E, A, B]) @Test def monadErrorLeftZero(): Unit = ME.run { forAll[E ~ (A => F[B])]("monadError left zero") { case e ~ f => e.raise[F, A].flatMap(f) <-> e.raise[F, B] } } @Test def monadErrorEnsureConsistency(): Unit = ME.run { forAll[F[A] ~ E ~ (A => Boolean)]("monadError ensure consistency") { case fa ~ e ~ p => fa.ensure(e)(p) <-> fa.flatMap(a => if (p(a)) a.pure[F] else e.raise[F,A]) } } @Test def monadErrorEnsureOrConsistency(): Unit = ME.run { forAll[F[A] ~ (A => E) ~ (A => Boolean)]("monadError ensureOr consistency") { case fa ~ ae ~ p => fa.ensureOr(ae)(p) <-> fa.flatMap(a => if (p(a)) a.pure[F] else ae(a).raise[F,A]) } } object MonadErrorLaws class Givens[F[_], E, A, B]( given MonadError[F,E], Eq[F[A]], Show[F[A]], Eq[F[B]], Show[F[B]], Arbitrary[E], Arbitrary[F[A]], Arbitrary[A => E], Arbitrary[E => E], Arbitrary[A => Boolean], Arbitrary[A => F[B]] ) with def run(body: ( given MonadError[F, E], Eq[F[A]], Show[F[A]], Eq[F[B]], Show[F[B]], Arbitrary[E], Arbitrary[F[A]], Arbitrary[A => E], Arbitrary[E => E], Arbitrary[A => Boolean], Arbitrary[A => F[B]] ) => Unit ): Unit = body.apply given[F[_], E, A, B]( given MonadError[F,E], Eq[F[A]], Show[F[A]], Eq[F[B]], Show[F[B]], Arbitrary[E], Arbitrary[F[A]], Arbitrary[A => E], Arbitrary[E => E], Arbitrary[A => Boolean], Arbitrary[A => F[B]] ): Givens[F,E,A,B]
morgen-peschke/sql2json
src/main/scala/sql2json/Main.scala
<filename>src/main/scala/sql2json/Main.scala package sql2json import java.nio.file.Path import cat.ApplicativeError.given import cat.Show, Show.show import cli.Arguments import types.Convertible, Convertible.given import types.validation.Accumulate, Accumulate.{Invalid, given} import types.validation.FailFast, FailFast.given import types.Done, Done.given import types.Generator import types.Generator.Result.given import types.Generator.Action.given import jdbc.Sql, Sql.executeAll import java.io.{BufferedReader, InputStreamReader} import java.util.stream.Collector given Show[Path] def show(a: Path): String = a.getFileName.toString object Main @main def run(args: String*): Unit = Arguments .parse[Accumulate.Validated](args) .toEither match case Left(helpText) => helpText.toList.foreach(System.out.println) System.exit(1) case Right(config) => val sqlReader = new BufferedReader(new InputStreamReader(System.in)) try sqlReader.lines.collect(Sql.collector) .executeAll()(given config.dbConfig, config.format) .foreach { result => println(result.show).done.continue } .as[Accumulate.Validated[Done]] match case Invalid(errors) => errors.toList.foreach(System.err.println(_)) System.exit(10) case _ => () finally sqlReader.close()
morgen-peschke/sql2json
project/plugins.sbt
addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.3.4") addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.12")
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/EqLaws.scala
package sql2json package testing package laws import cat.Eq import cat.Eq.given import cat.Show import cat.Applicative.~ import testing.Arbitrary import testing.Arbitrary.{forAll, given} import testing.Result.given import org.junit.Test abstract class EqLaws[A](given givensForEqLaws: EqLaws.Givens[A]) @Test def reflexitivityLaw(): Unit = givensForEqLaws.run { forAll[A]("reflexive equality")(x => x <-> x) } @Test def symmetryLaw(): Unit = givensForEqLaws.run { forAll[A ~ A]("symmetric equality") { case x ~ y => (x === y) <-> (y === x) } } @Test def transitivityLaw(): Unit = givensForEqLaws.run { forAll[A ~ A ~ A]("transitive equality"){ case x ~ y ~ z => ((x =!= y || y =!= z) || ((x === y) && (y === z) && (x === z))) <-> true } } object EqLaws class Givens[A](given Eq[A], Show[A], Arbitrary[A]) def run(body: (given Eq[A], Show[A], Arbitrary[A]) => Unit): Unit = body.apply given [A: Eq: Show: Arbitrary]: Givens[A]
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Semigroup.scala
<reponame>morgen-peschke/sql2json package sql2json package cat trait Semigroup[A] def combine (a: A, b: A): A trait SemigroupProviders given[C[_], A] (given S: SemigroupK[C]): Semigroup[C[A]] = S.semigroup[A] given[A] (given M: Monoid[A]): Semigroup[A] = M.semigroup object Semigroup extends SemigroupProviders given ops[A]: AnyRef def (a: A) combine (b: A)(given S: Semigroup[A]): A = S.combine(a,b) given forInt: Semigroup[Int] = _ + _ given forLong: Semigroup[Long] = _ + _
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Eq.scala
<reponame>morgen-peschke/sql2json package sql2json package cat import scala.annotation.tailrec trait Eq[A] def equiv(a: A, b: A): Boolean object Eq given ops[A]: AnyRef def (a: A) === (b: A)(given E: Eq[A]): Boolean = E.equiv(a, b) def (a: A) =!= (b: A)(given E: Eq[A]): Boolean = !E.equiv(a, b) given Eq[String] = _ == _ given Eq[Int] = _ == _ given Eq[Long] = _ == _ given Eq[Boolean] = _ == _ given [L: Eq, R: Eq]: Eq[Either[L,R]] = (_, _) match case (Left(l1), Left(l2)) => l1 === l2 case (Right(r1), Right(r2)) => r1 === r2 case _ => false given[A] (given EA: Eq[A]): Eq[List[A]] = @tailrec def loop(l: List[A], r: List[A]): Boolean = l match case Nil => r.isEmpty case lh :: lt => r match case Nil => false case rh :: rt => lh === rh && loop(lt, rt) loop(_, _) given[A: Eq]: Eq[Option[A]] = (_, _) match case (None, None) => true case (Some(a), Some(b)) => a === b case _ => false
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/MonoidLaws.scala
<reponame>morgen-peschke/sql2json<gh_stars>0 package sql2json package testing package laws import cat.{Show, Eq, Monoid} import cat.Applicative.{~, given} import cat.Semigroup.given import cat.Monoid.empty import testing.Arbitrary import testing.Arbitrary.forAll import testing.Result.given import org.junit.Test abstract class MonoidLaws[A](given MG: MonoidLaws.Givens[A]) @Test def leftIdentity(): Unit = MG.run { forAll[A]("combine left identity check") { a => (empty[A] combine a) <-> a } } @Test def rightIdentity(): Unit = MG.run { forAll[A]("combine right identity check") { a => (a combine empty[A]) <-> a } } object MonoidLaws class Givens[A](given Monoid[A], Arbitrary[A], Eq[A], Show[A] ) with def run(body: (given Monoid[A], Arbitrary[A], Eq[A], Show[A] ) => Unit): Unit = body.apply given[A]( given Monoid[A], Arbitrary[A], Eq[A], Show[A] ): Givens[A]
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Applicative.scala
<reponame>morgen-peschke/sql2json package sql2json package cat import Applicative.~ import Functor.given import scala.compiletime.summonFrom trait Applicative[C[_]](given val functor: Functor[C]) def pure[A](a: A): C[A] def ap[A, B](cf: C[A => B], ca: C[A]): C[B] def product[A,B](ca: C[A], cb: C[B]): C[A ~ B] = ap(ca.map(a => (b: B) => (a,b)),cb) def productR[A,B](ca: C[A], cb: C[B]): C[B] = ap(ca.map(_ => identity(_: B)), cb) def productL[A,B](ca: C[A], cb: C[B]): C[A] = ap(ca.map(a => (_: B) => a), cb) trait MonadAlsoProvidesApplicative given[F[_]] (given M: Monad[F]): Applicative[F] = M.applicative trait ApplicativeErrorProvidesApplicative extends MonadAlsoProvidesApplicative given[F[_]] (given AE: ApplicativeError[F, ?]): Applicative[F] = AE.applicative object Applicative extends ApplicativeErrorProvidesApplicative type ~[A,B] = (A,B) object ~ def unapply[A,B](ab: (A,B)): (A,B) = ab given lifts[A]: AnyRef def[C[_]](a: A) pure (given C: Applicative[C]): C[A] = C.pure(a) given ops[C[_], A]: AnyRef def[B](cf: C[A => B]) ap (ca: C[A])(given AP: Applicative[C]): C[B] = AP.ap(cf, ca) def[B](cf: C[A => B]) <*> (ca: C[A])(given AP: Applicative[C]): C[B] = AP.ap(cf, ca) def[B](ca: C[A]) |@| (cb: C[B])(given AP: Applicative[C]): C[A ~ B] = AP.product(ca, cb) def[B](ca: C[A]) *> (cb: C[B])(given AP: Applicative[C]): C[B] = AP.productR(ca, cb) def[B](ca: C[A]) <* (cb: C[B])(given AP: Applicative[C]): C[A] = AP.productL(ca, cb) given Applicative[List] def pure[A](a: A): List[A] = a :: Nil def ap[A, B](cf: List[A => B], ca: List[A]): List[B] = cf.flatMap(ca.map(_))
morgen-peschke/sql2json
src/main/scala/sql2json/cat/MonadError.scala
package sql2json package cat import Applicative.given import ApplicativeError.given import Monad.given trait MonadError[C[_], E](given val monad: Monad[C], val applicativeError: ApplicativeError[C, E]) def ensure[A](fa: C[A], error: => E, predicate: A => Boolean): C[A] = fa.flatMap(a => if (predicate(a)) a.pure[C] else error.raise) def ensureOr[A](fa: C[A], error: A => E, predicate: A => Boolean): C[A] = fa.flatMap(a => if (predicate(a)) a.pure[C] else error(a).raise) def prevent[A](fa: C[A], error: => E, predicate: A => Boolean): C[A] = ensure(fa, error, a => !predicate(a)) def preventOr[A](fa: C[A], error: A => E, predicate: A => Boolean): C[A] = ensureOr(fa, error, a => !predicate(a)) def catchOnly[T <: Throwable]: ApplicativeError.CatchOnlyPartiallyApplied[C,E,T] = applicativeError.catchOnly[T] object MonadError class DerivedMonadError[C[_], E](given Monad[C], ApplicativeError[C,E]) extends MonadError[C,E] def derived[C[_], E](given Monad[C], ApplicativeError[C,E]): MonadError[C, E] = new DerivedMonadError[C,E] given ops[C[_],A]: AnyRef def[E] (fa: C[A]) ensure (error: => E)(predicate: A => Boolean)(given ME: MonadError[C,E]): C[A] = ME.ensure(fa, error, predicate) def[E] (fa: C[A]) ensureOr(error: A => E)(predicate: A => Boolean)(given ME: MonadError[C,E]): C[A] = ME.ensureOr(fa, error, predicate) def[E] (fa: C[A]) prevent (error: => E)(predicate: A => Boolean)(given ME: MonadError[C,E]): C[A] = ME.prevent(fa, error, predicate) def[E] (fa: C[A]) preventOr(error: A => E)(predicate: A => Boolean)(given ME: MonadError[C,E]): C[A] = ME.preventOr(fa, error, predicate)
morgen-peschke/sql2json
src/main/scala/sql2json/cat/ApplicativeError.scala
package sql2json package cat import Applicative.given import types.Convertible import Applicative.given import types.Convertible import Convertible.given import scala.quoted.{Type, Expr, QuoteContext} import scala.reflect.ClassTag trait ApplicativeError[C[_], E](given val applicative: Applicative[C]) def raise[A](error: E): C[A] def recover[A](ca: C[A], f: E => A): C[A] def fold[A, B] (ca: C[A], fe: E => B, fa: A => B): B def mapError[A, EO <: E](cae: C[A], fe: E => EO): C[A] = fold(cae, e => raise(fe(e)), _.pure) def toEither[A](ca: C[A]): Either[E, A] = fold(ca, Left(_), Right(_)) def fromEither[A](either: Either[E,A]): C[A] = either match case Left(e) => raise[A](e) case Right(a) => a.pure[C] def catchOnly[T <: Throwable]: ApplicativeError.CatchOnlyPartiallyApplied[C,E,T] = new ApplicativeError.CatchOnlyPartiallyApplied[C,E,T](given this) trait MonadErrorProvidesApplicativeError given [F[_], E] (given ME: MonadError[F, E]): ApplicativeError[F, E] = ME.applicativeError object ApplicativeError extends MonadErrorProvidesApplicativeError final class CatchOnlyPartiallyApplied[C[_], E, T <: Throwable](given AE: ApplicativeError[C,E]) extends AnyVal def apply[A] (body: => A)(given CTE: Convertible[T,E], CT: ClassTag[T]): C[A] = try AE.applicative.pure(body) catch case CT(ex) => AE.raise(CTE.cast(ex)) given lifts[E]: AnyRef def[C[_], A] (error: E) raise (given AE: ApplicativeError[C,E]): C[A] = AE.raise[A](error) given eitherLifts[E,A]: AnyRef def[C[_]] (either: Either[E,A]) liftToError (given AE: ApplicativeError[C, E]): C[A] = AE.fromEither(either) given ops[C[_],A]: AnyRef def[E] (ca: C[A]) recover (f: E => A)(given AE: ApplicativeError[C,E]): C[A] = AE.recover(ca, f) def[B, E] (ca: C[A]) fold(fe: E => B, fa: A => B)(given AE: ApplicativeError[C,E]): B = AE.fold(ca, fe, fa) def[E] (cae: C[A]) mapError (fe: E => E)(given AE: ApplicativeError[C,E]): C[A] = AE.mapError[A, E](cae, fe) def[E] (cae: C[A]) toEither (given AE: ApplicativeError[C,E]): Either[E, A] = AE.toEither(cae) given[C0[_], C1[_], E](given AE1: ApplicativeError[C0, E], AE2: ApplicativeError[C1, E]): types.ConvertibleK[C0, C1] def castK[A](a: C0[A]): C1[A] = a.toEither.liftToError[C1]
morgen-peschke/sql2json
src/main/scala/sql2json/types/Convertible.scala
package sql2json package types import cat.Functor import cat.Cofunctor, Cofunctor.given trait Convertible[A,B] def cast(a: A): B def map[B1](f: B => B1): Convertible[A,B1] = a => f(cast(a)) def comap[A0](f: A0 => A): Convertible[A0, B] = a0 => cast(f(a0)) trait ConvertibleKProvidesConvertible given[A[_],B[_],C](given CAB: ConvertibleK[A,B]): Convertible[A[C],B[C]] = CAB.convertible[C] object Convertible extends ConvertibleKProvidesConvertible def instance[A,B](f: A => B): Convertible[A,B] = f(_) given ops[A]: AnyRef def[B](a: A) as (given CAB: Convertible[A,B]): B = CAB.cast(a) given [A]: Convertible[A,A] = identity(_) trait ConvertibleK[A[_], B[_]] def castK[C](a: A[C]): B[C] def convertible[C]: Convertible[A[C], B[C]] = castK[C](_) object ConvertibleK given ops[A[_], C]: AnyRef def[B[_]](a: A[C]) asKind (given CAB: ConvertibleK[A,B]): B[C] = CAB.castK(a) given[A[_]]: ConvertibleK[A,A] def castK[C](a: A[C]): A[C] = a
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/FunctorLaws.scala
package sql2json package testing package laws import cat.Functor import cat.Functor.given import cat.Eq import cat.Show import cat.Applicative.~ import testing.Arbitrary import testing.Arbitrary.forAll import testing.Result.given import org.junit.Test abstract class FunctorLaws[F[_], A, B, C](given FG: FunctorLaws.Givens[F, A, B, C]) @Test def identityLaw(): Unit = FG.run { forAll[F[A]]("identity over map") { fa => fa <-> fa.map(identity) } } @Test def compositionLaw(): Unit = FG.run { forAll[F[A] ~ (A => B) ~ (B => C)]("cmposition over map") { case fa ~ a2b ~ b2c => fa.map(a2b).map(b2c) <-> fa.map(a2b andThen b2c) } } object FunctorLaws class Givens[F[_], A, B, C](given Functor[F], Show[F[A]], Show[F[C]], Eq[F[A]], Eq[F[C]], Arbitrary[F[A]], Arbitrary[A => B], Arbitrary[B => C] ) with def run(body: (given Functor[F], Show[F[A]], Show[F[C]], Eq[F[A]], Eq[F[C]], Arbitrary[F[A]], Arbitrary[A => B], Arbitrary[B => C]) => Unit): Unit = body.apply given[F[_], A, B, C](given Functor[F], Show[F[A]], Show[F[C]], Eq[F[A]], Eq[F[C]], Arbitrary[F[A]], Arbitrary[A => B], Arbitrary[B => C] ): Givens[F,A,B,C]
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/MonadLaws.scala
<filename>src/test/scala/sql2json/testing/laws/MonadLaws.scala<gh_stars>0 package sql2json package testing package laws import cat.{Applicative, Functor, Monad, Show, Eq} import cat.Applicative.{~, given} import cat.Functor.given import cat.Monad.given import testing.Arbitrary import testing.Arbitrary.forAll import testing.Result.given import org.junit.Test abstract class MonadLaws[F[_], A, B](given MG: MonadLaws.Givens[F, A, B]) @Test def monadLeftIdentity(): Unit = MG.run { forAll[A ~ (A => F[B])]("monad left identity") { case a ~ f => a.pure[F].flatMap(f) <-> f(a) } } @Test def monadRightIdentity(): Unit = MG.run { forAll[F[A]]("monad right identity") { fa => fa.flatMap(_.pure[F]) <-> fa } } @Test def mapFlatMapCoherence(): Unit = MG.run { forAll[F[A] ~ (A => B)]("monad flatMap is coherent with map") { case fa ~ f => fa.flatMap(a => f(a).pure[F]) <-> fa.map(f) } } object MonadLaws class Givens[F[_], A, B]( given Monad[F], Eq[F[A]], Show[F[A]], Eq[F[B]], Show[F[B]], Arbitrary[A], Arbitrary[F[A]], Arbitrary[A => B], Arbitrary[A => F[B]] ) with def run(body: ( given Monad[F], Applicative[F], Functor[F], Eq[F[A]], Show[F[A]], Eq[F[B]], Show[F[B]], Arbitrary[A], Arbitrary[F[A]], Arbitrary[A => B], Arbitrary[A => F[B]] ) => Unit): Unit = body.apply given[F[_], A, B]( given Monad[F], Eq[F[A]], Show[F[A]], Eq[F[B]], Show[F[B]], Arbitrary[A], Arbitrary[F[A]], Arbitrary[A => B], Arbitrary[A => F[B]] ): Givens[F, A, B]
morgen-peschke/sql2json
src/main/scala/sql2json/cat/macros/ShowMacros.scala
package sql2json package cat package macros import scala.quoted._ import scala.quoted.matching._ trait ShowMacros inline def (sc: StringContext) show(args: =>Any*): String = ${ ShowMacros.showMacroImpl('sc, 'args) } object ShowMacros inline def (sc: StringContext) show(args: =>Any*): String = ${ ShowMacros.showMacroImpl('sc, 'args) } def showMacroImpl(sc: Expr[StringContext], argsExpr: Expr[Seq[Any]])(given qctx: QuoteContext): Expr[String] = def fail[A](msg: String, expr: Expr[?]): Expr[A] = qctx.error(msg, expr) '{???} argsExpr match { case ExprSeq(argExprs) => val newArgsExpr = Expr.ofSeq(argExprs.map { case '{ $arg: $tp } => summonExpr[Show[$tp]] match case Some(showExpr) => '{ $showExpr.show($arg) } case None => fail(s"could not find implicit for Show[${tp.show}]", arg) case arg => fail(s"unexpected format: ${arg.show}", arg) }) '{ $sc.s($newArgsExpr: _*) } case _ => fail(s"Args must be explicit", argsExpr) }
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Empty.scala
<reponame>morgen-peschke/sql2json package sql2json package cat trait Empty[A] def empty: A object Empty def instance[A](zero: A): Empty[A] = new Empty[A] with def empty: A = zero given Empty[Long] = instance[Long](0L)
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Show.scala
package sql2json package cat trait Show[-A] def show(a: A): String object Show extends macros.ShowMacros def[A] (a: A) show(given S: Show[A]): String = S.show(a) //export macros.ShowMacros.show given Show[Nothing] = _ => ??? // Should never be used, but needed for stuff like Nil.show to compile given Show[String] = str => s""""$str"""" given Show[Int] = _.toString given Show[Long] = l => s"${l}L" given Show[Boolean] = _.toString given [L: Show]: Show[Left[L, ?]] = left => show"Left(${left.value})" given [R: Show]: Show[Right[?, R]] = right => show"Right(${right.value})" given [L: Show, R: Show]: Show[Either[L,R]] = _ match case Left(l) => show"Left($l)" case Right(r) => show"Right($r)" given [A: Show]: Show[List[A]] = _.map(_.show).mkString("[", ",", "]")
morgen-peschke/sql2json
src/main/scala/sql2json/jdbc/JdbcUrl.scala
<reponame>morgen-peschke/sql2json<gh_stars>0 package sql2json package jdbc import cat.Show import cat.Applicative.given import cat.MonadError, MonadError.given import cat.ApplicativeError, ApplicativeError.given import types.Convertible.given import types.validation.Errors, Errors.given import types.validation.FailFast, FailFast.Validated.given import java.sql.{Connection, DriverManager, Driver} import java.util.Properties opaque type JdbcUrl = String object JdbcUrl def apply[C[_]](raw: String)(given ApplicativeError[C, Errors]): C[JdbcUrl] = raw.trim.pure[FailFast.Validated] .ensure("JDBC url cannot be empty".as[Errors])(_.nonEmpty) .ensure("Expected JDBC url to start with 'jdbc:'".as[Errors])(_.startsWith("jdbc:")) .as[C[JdbcUrl]] given Show[JdbcUrl] = identity(_) given ops: AnyRef def (driver: Driver) connect (url: JdbcUrl)(props: Properties): Connection = driver.connect(url, props)
morgen-peschke/sql2json
src/main/scala/sql2json/jdbc/Sql.scala
<reponame>morgen-peschke/sql2json package sql2json package jdbc import cat.ApplicativeError import cat.SemigroupK.given import cat.Show, Show.show import types.validation.Errors, Errors.given import types.Generator, Generator.Action.{halt,given} import types.Done, Done.given import types.Convertible.given import types.Json import Username.given import Password.given import config.DBConfig import JdbcUrl.given import Driver.load import SqlResult.{OutputType, Row, given} import java.util.Properties import java.util.stream.Collector import java.sql.{Connection, DriverManager, Statement, ResultSet, ResultSetMetaData, SQLException} given Show[ResultSet] = _.toString /** * Very, very minimal wrapper around JDBC. * * The primary reason this exists is so I don't have to think about mananging * the lifecycle of the various JDBC objects. */ opaque type Sql = String object Sql def apply(q: String): Sql = q given Show[Sql] = identity(_) class SqlCollectorState(var buffer: List[Sql], var builder: StringBuilder) override def toString(): String = s"SqlCollectorState($buffer, $builder)" def append(line: String): Unit = { line match case ";" if builder.isEmpty => () case ";" => buffer = Sql(builder.toString) :: buffer builder = new StringBuilder case notDelim => builder.append(notDelim).append(' ') } def combine(other: SqlCollectorState): SqlCollectorState = throw new IllegalStateException("Combining SqlCollectorState is undefined") def finish: List[Sql] = (Sql(builder.toString) :: buffer).reverse object SqlCollectorState def init: SqlCollectorState = SqlCollectorState(Nil, new StringBuilder) val collector: Collector[String, SqlCollectorState, List[Sql]] = Collector.of[String, SqlCollectorState, List[Sql]]( () => SqlCollectorState.init, _ append _, _ combine _, _.finish ) def (statements: List[Sql]) executeAll() (given DBConfig, OutputType): Generator[Json] = def loop(connection: Connection, remaining: List[Sql]): Generator[Json] = remaining match case Nil => Generator.empty[Json] case last :: Nil => for stmt <- statement(connection) rs <- query(stmt, last) json <- columnInfo(rs).combineK(results(rs)) yield json case notLast :: rest => val genResult = for stmt <- statement(connection) yield try stmt.execute(notLast) catch case ex: Exception => throw new RuntimeException(s"Failed to execute: $notLast", ex) Json.nil genResult combineK loop(connection, rest) connection().flatMap(loop(_, statements)).dropWhile(Json.nil) def (sql: Sql) query ()(given DBConfig, OutputType): Generator[Json] = for conn <- connection() stmt <- statement(conn) rs <- query(stmt, sql) json <- columnInfo(rs).combineK(results(rs)) yield json def connection()(given dbConfig: DBConfig): Generator[Connection] = Generator.ofResource( dbConfig.show, () => { val connection = dbConfig.driver.load.connect(dbConfig.jdbcURL) { new Properties() .username(dbConfig.username) .password(<PASSWORD>) } connection.setAutoCommit(false) connection }, _.close().done ) def statement(connection: Connection): Generator[Statement] = Generator.ofResource( "SqlQuery", () => connection.createStatement(), _.close().done ) def execute[C[_]](statement: Statement, sql: String)(given AE: ApplicativeError[C, Errors]): C[Done] = AE.catchOnly[SQLException](statement.execute(sql).done) def query(statement: Statement, sql: String): Generator[ResultSet] = Generator.ofResource( "Query:ResultSet", () => { try statement.executeQuery(sql) catch case ex: Exception => throw new RuntimeException(s"Failed to run: $sql", ex) }, _.close().done ) def columnInfo(rs: ResultSet)(given outputType: OutputType): Generator[Json] = outputType match case ot @ OutputType.ArrayWithHeader(_) => Generator.one[Json](rs.header.as[Json]) case _ => Generator.empty[Json] def results(rs: ResultSet)(given OutputType): Generator[Json] = Generator.unfold(rs, resultSet => { if resultSet.next() then resultSet.row.as[Json].continue else halt })
morgen-peschke/sql2json
src/test/scala/sql2json/types/NonEmptyListTest.scala
package sql2json package types import testing.laws.{ApplicativeLaws, EqLaws, FunctorLaws, MonadLaws, SemigroupLaws} final class NonEmptyListEqLaws extends EqLaws[NonEmptyList[Long]] final class NonEmptyListFunctorLaws extends FunctorLaws[NonEmptyList, Int, String, Long] final class NonEmptyListApplicativeLaws extends ApplicativeLaws[NonEmptyList, Int, String, Long] final class NonEmptyListMonadLaws extends MonadLaws[NonEmptyList, Int, String]
morgen-peschke/sql2json
src/main/scala/sql2json/types/Json.scala
package sql2json package types import cat.{Show, Eq} import Show.show import Eq.given import Convertible.given /** * Very simple JSON representation. * * Currently supports serialization of arbitrary types to [[Json]] using the * [[sql2Json.types.Convertible]] typeclass, and serialization of [[Json]] * to [[scala.String]] (sorry, no pretty-printing). * * Does not support parsing or mapping [[Json]] to arbitrary types, as that facility isn't * needed for this project. */ enum Json case Nil case Number(value: BigDecimal) case Bool(value: Boolean) case Text(value: String) case Obj(fields: Map[String, Json]) case Arr(elements: Vector[Json]) object Json def obj(fields: (String, Json)*): Json = Json.Obj(fields.toMap) def arr(elements: Json*): Json = Json.Arr(elements.toVector) def nil: Json = Json.Nil private def (c: Char) escaped: String = s"\\$c" private val EscapingMap: Map[Char, String] = Map( '\\' -> '\\'.escaped, '"' -> '"'.escaped, '\b' -> 'b'.escaped, '\t' -> 't'.escaped, '\n' -> 'n'.escaped, '\f' -> 'f'.escaped, '\r' -> 'r'.escaped ) // Thank you Stack Overflow: https://stackoverflow.com/a/16652683/1188897 def (value: String) escaped: String = value .foldLeft(new StringBuilder(value.length).append('"')) { (builder, char) => EscapingMap.get(char) match case Some(escapedValue) => builder.append(escapedValue) case None if char < ' ' => builder.append(s"\\u000${char.toInt.toHexString}".takeRight(4)) case None => builder.append(char) } .append('"') .toString given Eq[Json] = (_, _) match case (Json.Nil, Json.Nil) | (Json.Bool(true), Json.Bool(true)) | (Json.Bool(false), Json.Bool(false)) => true case (Json.Number(a), Json.Number(b)) => a == b case (Json.Text(a), Json.Text(b)) => a == b case (Json.Arr(a), Json.Arr(b)) => a.length == b.length && (a zip b).forall(_ === _) case (Json.Obj(a), Json.Obj(b)) => val aKeys = a.keySet val bKeys = b.keySet aKeys == bKeys && aKeys.forall { k => a.get(k) === b.get(k) } case _ => false given Show[Json] = _ match case Json.Nil => "null" case Json.Bool(true) => "true" case Json.Bool(false) => "false" case Json.Number(value) => value.toString case Json.Text(value) => value.escaped case Json.Arr(elements) => elements.map(_.show).mkString("[", ",", "]") case Json.Obj(fields) => fields.map { case (k, v) => s"${k.escaped}: ${v.show}" } .mkString("{", ",", "}") given Convertible[Boolean, Json] = Json.Bool(_) given Convertible[String, Json] = Json.Text(_) given[A](given Convertible[A, Json]): Convertible[Vector[A], Json] = va => Json.Arr(va.map(_.as[Json])) private val byteEncoder = java.util.Base64.getEncoder given Convertible[Array[Byte], Json] = ba => Json.Text(byteEncoder.encodeToString(ba)) private val bdToJson: Convertible[BigDecimal, Json] = Json.Number(_) given Convertible[BigDecimal, Json] = bdToJson given Convertible[BigInt, Json] = bdToJson.comap(BigDecimal(_)) given Convertible[Int, Json] = bdToJson.comap(BigDecimal(_)) given Convertible[Long, Json] = bdToJson.comap(BigDecimal(_)) given Convertible[Double, Json] = bdToJson.comap(BigDecimal(_)) given Convertible[Float, Json] = bdToJson.comap(f => BigDecimal(f.toDouble))
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Monoid.scala
<filename>src/main/scala/sql2json/cat/Monoid.scala package sql2json package cat trait Monoid[A](given val semigroup: Semigroup[A], E: Empty[A]) def empty: A = E.empty trait EmptyAndSemigroupIsMonoid given[A: Semigroup: Empty]: Monoid[A] = Monoid.instance trait MonoidKCanProvideMonoid extends EmptyAndSemigroupIsMonoid given[M[_], A] (given M: MonoidK[M]): Monoid[M[A]] = M.monoid[A] object Monoid extends MonoidKCanProvideMonoid def instance[A: Semigroup: Empty]: Monoid[A] = new Monoid[A] {} def empty[A](given M: Monoid[A]): A = M.empty
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Functor.scala
package sql2json package cat trait Functor[C[_]] def map [A,B] (fa: C[A], f: A => B): C[B] trait ApplicativeIsFunctor given [C[_]](given A: Applicative[C]): Functor[C] = A.functor object Functor extends ApplicativeIsFunctor given ops[C[_],A]: AnyRef def [B] (fa: C[A]) map (f: A => B)(given F: Functor[C]): C[B] = F.map(fa, f) given Functor[List] def map [A,B] (fa: List[A], f: A => B): List[B] = fa.map(f)
morgen-peschke/sql2json
src/main/scala/sql2json/cli/Arguments.scala
package sql2json package cli import java.nio.file.{Path,Paths} import cat.Show, Show.show import cat.Functor.given import cat.Monad.given import cat.Applicative.given import cat.ApplicativeError, ApplicativeError.given import cat.SemigroupK.given import types.Convertible.given import types.validation.Accumulate, Accumulate.given import types.validation.FailFast, FailFast.given import types.validation.Errors, Errors.given import types.NonEmptyList import config.Config import jdbc.{Database, Sql} import jdbc.SqlResult.OutputType import config.DBConfig final case class Arguments(dbConfig: DBConfig, format: OutputType) object Arguments object ShowHelpText given Show[Arguments] = a => show"Arguments(dbConfig: ${a.dbConfig}, format: ${a.format})" private final val ArrayFmt: String = "array" private final val ObjectFmt: String = "object" final val HelpText = s"""|Usage: sql2json [options] | | Runs a SQL expression and returns it as lines of JSON. | | The query is read from standard input. Because I was not about to write anything close to a SQL | parser, if multiple statements are needed, they must be separate by a line which contains exactly | a single ';'. | | All but the last statement are assumed to be commands, and their result sets are ignored. The final | statement is assumed to be a query, and it's result set is processed and returned. If the final | statement is not a query, you are using the wrong tool. | | Configuration of credentials & dependecies is handled via HOCON config files. | | Options | -d --database name Selects a database from the config file | If omitted, and the config only contains a single entry, it'll chose that. | Otherwise, you'll get an error. | | -f --format fmt Selects the output format, can be '$ObjectFmt' or '$ArrayFmt' (the default) | | -h --header Enables emitting a leading header in '$ArrayFmt' format (disabled by default) | | -v --verbose Enables extra information in the header, when enabled (disabled by default) | | --help Print this help message, then exit |""".stripMargin private def processArgs(config: Config, rest: List[String], dbNameOpt: Option[Database] = None, formatAsObject: Boolean = false, omitHeader: Boolean = true, verbose: Boolean = false): FailFast.Validated[Either[ShowHelpText.type,Arguments]] = rest match case "--help" :: _ => Left(ShowHelpText).validFF case ("-h" | "--header") :: remaining => processArgs(config, remaining, dbNameOpt, formatAsObject, false, verbose) case ("-v" | "--verbose") :: remaining => processArgs(config, remaining, dbNameOpt, formatAsObject, omitHeader, true) case arg @ ("-f" | "--format") :: paramAndRemaining => paramAndRemaining match case Nil => s"Missing <fmt> after $arg".invalidFF case ArrayFmt :: remaining => processArgs(config, remaining, dbNameOpt, false, omitHeader, verbose) case ObjectFmt :: remaining => processArgs(config, remaining, dbNameOpt, true, omitHeader, verbose) case junk :: _ => s"Unrecognized <fmt> after $arg (expected '$ObjectFmt' or '$ArrayFmt'): $junk".invalidFF[Either[ShowHelpText.type,Arguments]] case arg @ ("-d" | "--database") :: paramAndRemaining => paramAndRemaining match case Nil => s"Missing <name> after $arg".invalidFF case rawName :: remaining => Database[FailFast.Validated](rawName).toEither match case Right(dbName) => processArgs(config, remaining, Some(dbName), formatAsObject, omitHeader, verbose) case Left(errors) => errors.map(e => s"Invalid <name> after $arg: $e").invalidFF[Either[ShowHelpText.type,Arguments]] case Nil => dbNameOpt .fold(config.default[FailFast.Validated])(config.forDatabase[FailFast.Validated]) .map { dbConfig => Right(Arguments( dbConfig, if formatAsObject then OutputType.Object else if omitHeader then OutputType.BareArray else OutputType.ArrayWithHeader(verbose) )) } case junk => show"Unrecognized argument, starting at: $junk".invalidFF def parse[C[_]](args: Seq[String])(given ApplicativeError[C, Errors]): C[Arguments] = val results = args.toList match case Nil => "No arguments".as[Errors].raise[FailFast.Validated, Either[ShowHelpText.type,Arguments]] case rest => Config.load[C].as[FailFast.Validated[Config]].flatMap(processArgs(_, rest)) results .mapError[Errors](_ combineK HelpText.pure[NonEmptyList]) .flatMap { case Left(ShowHelpText) => HelpText.as[Errors].raise[FailFast.Validated, Arguments] case Right(args) => args.pure[FailFast.Validated] } .as[C[Arguments]]
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/ApplicativeErrorLaws.scala
package sql2json package testing package laws import cat.{ApplicativeError, Eq, Show, Functor, Monad} import cat.ApplicativeError.given import cat.Functor.given import cat.Monad.given import cat.Applicative.{~, given} import testing.Arbitrary import testing.Arbitrary.forAll import testing.Result.given import org.junit.Test import org.junit.Assert.fail import types.Convertible abstract class ApplicativeErrorLaws[F[_], E, A, B](given AE: ApplicativeErrorLaws.Givens[F, E, A, B]) @Test def toAndFromEitherConsistency(): Unit = AE.run { forAll[Either[E,A]]("applicativeError toEither/fromEither consistency (fix this first)") { either => either.liftToError[F].toEither <-> either } } @Test def raiseConsistencyWithFromEither(): Unit = AE.run { forAll[E]("raise consistency with fromEither on a Left") { e => e.raise[F,A] <-> Left(e).liftToError[F] } } @Test def recoverConsistentWithPure(): Unit = AE.run { forAll[E ~ (E => A)]("recover consistency with pure") { case e ~ f => e.raise[F,A].recover(f) <-> f(e).pure[F] } } @Test def mapErrorIdentity(): Unit = AE.run { forAll[F[A]]("mapError identity") { fa => fa.mapError[E](identity) <-> fa } } @Test def mapErrorConsistencyWithRaise(): Unit = AE.run { forAll[E ~ (E => E)]("mapError consistency with raise") { case e ~ f => e.raise[F,A].mapError[E](f) <-> f(e).raise[F,A] } } @Test def foldCompositionForErrors(): Unit = AE.run { forAll[E ~ (E => B) ~ (A => B)]("fold composes for errors") { case e ~ fe ~ fa => e.raise[F,A].fold(fe, fa) <-> fe(e) } } @Test def foldCompositionForSuccesses(): Unit = AE.run { forAll[A ~ (E => B) ~ (A => B)]("fold composes for errors") { case a ~ fe ~ fa => a.pure[F].fold(fe, fa) <-> fa(a) } } @Test def testCatchOnlyReturningValue(): Unit = AE.run { forAll[A]("catchOnly returning value consistent with pure") { value => given Convertible[IllegalArgumentException, E] = iae => fail(s"Should have returned value, instead caught $iae") ??? summon[ApplicativeError[F, E]].catchOnly[IllegalArgumentException](value) <-> value.pure[F] } } @Test def testCatchOnlyThrowsExpectedException(): Unit = AE.run { forAll[String ~ (String => E)]("catchOnly handling exception consistent with raise") { case exceptionMsg ~ msgToErrorFn => given Convertible[IllegalArgumentException, E] = iae => msgToErrorFn(iae.getMessage) summon[ApplicativeError[F, E]].catchOnly[IllegalArgumentException] { throw new IllegalArgumentException(exceptionMsg) } <-> msgToErrorFn(exceptionMsg).raise[F, A] } } @Test def testCatchOnlyThrowsUnexpectedException(): Unit = AE.run { forAll[String]("catchOnly propagates exceptions of unexpected types") { exceptionMsg => given Convertible[ClassCastException, E] = iae => fail(s"Should have propagated exception, but caught $iae instead") ??? try val result = summon[ApplicativeError[F, E]].catchOnly[ClassCastException] { throw new IllegalArgumentException(exceptionMsg) } fail(s"Should have thrown exception, instead returned $result") ??? catch case e: IllegalArgumentException => e.getMessage <-> exceptionMsg } } object ApplicativeErrorLaws class Givens[F[_], E, A, B]( given ApplicativeError[F,E], Eq[Either[E,A]], Show[Either[E,A]], Eq[F[A]], Show[F[A]], Eq[B], Show[B], Eq[String], Show[String], Arbitrary[F[A]], Arbitrary[A], Arbitrary[E], Arbitrary[A => B], Arbitrary[E => A], Arbitrary[String], Arbitrary[String => E], Arbitrary[E => E], Arbitrary[E => B], Arbitrary[Either[E, A]] ) with def run(body: (given ApplicativeError[F,E], Eq[Either[E,A]], Show[Either[E,A]], Eq[F[A]], Show[F[A]], Eq[B], Show[B], Eq[String], Show[String], Arbitrary[F[A]], Arbitrary[A], Arbitrary[E], Arbitrary[A => B], Arbitrary[E => A], Arbitrary[String], Arbitrary[String => E], Arbitrary[E => E], Arbitrary[E => B], Arbitrary[Either[E, A]] ) => Unit): Unit = body.apply given [F[_], E, A, B]( given ApplicativeError[F,E], Eq[Either[E,A]], Show[Either[E,A]], Eq[F[A]], Show[F[A]], Eq[B], Show[B], Eq[String], Show[String], Arbitrary[F[A]], Arbitrary[A], Arbitrary[E], Arbitrary[A => B], Arbitrary[E => A], Arbitrary[String], Arbitrary[String => E], Arbitrary[E => E], Arbitrary[E => B], Arbitrary[Either[E, A]] ): Givens[F, E, A, B]
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/ApplicativeLaws.scala
package sql2json package testing.laws import cat.{Applicative, Functor, Show, Eq} import cat.Applicative.{~, given} import cat.Functor.given import testing.Arbitrary import testing.Arbitrary.forAll import testing.Result.given import org.junit.Test abstract class ApplicativeLaws[F[_], A, B, C](given AG: ApplicativeLaws.Givens[F, A, B, C]) @Test def applicativeIdentityLaw(): Unit = AG.run { forAll[F[A]]("applicative identity") { fa => (identity[A] _).pure[F] <*> fa <-> fa } } @Test def applicativeHomomorphismLaw(): Unit = AG.run { forAll[A ~ (A => B)]("applicative homomorphism") { case a ~ f => f.pure[F] <*> a.pure[F] <-> f(a).pure[F] } } @Test def applicativeInterchange(): Unit = AG.run { forAll[A ~ F[A => B]]("applicative interchange") { case a ~ ff => (ff <*> a.pure[F]) <-> (((f: A => B) => f(a)).pure[F] <*> ff) } } @Test def applicativeMapLaw(): Unit = AG.run { forAll[F[A] ~ (A => B)]("ap must be consistent with map") { case fa ~ f => (fa map f) <-> (f.pure[F] <*> fa) } } @Test def apProductConsistentLaw(): Unit = AG.run { forAll[F[A] ~ F[A => B]]("ap must be consistent with product") { case fa ~ f => f <*> fa <-> (f |@| fa).map { case (f, a) => f(a) } } } @Test def apCompositionLaw(): Unit = val compose: (B => C) => (A => B) => (A => C) = _.compose AG.run { forAll[F[A] ~ F[A => B] ~ F[B => C]]("ap should compose") { case fa ~ fab ~ fbc => fbc <*> (fab <*> fa) <-> (fbc.map(compose) <*> fab <*> fa) } } @Test def productRConsistencyLaw(): Unit = AG.run { forAll[F[A] ~ F[B]]("productR should behave like product followed by dropping left side with map") { case fa ~ fb => (fa *> fb) <-> (fa |@| fb).map((_, b) => b) } } @Test def productLConsistencyLaw(): Unit = AG.run { forAll[F[A] ~ F[B]]("productR should behave like product followed by droppig right side with map") { case fa ~ fb => (fa <* fb) <-> (fa |@| fb).map((a, _) => a) } } object ApplicativeLaws class Givens[F[_], A, B, C](given Applicative[F], Show[F[A]], Show[F[B]], Show[F[C]], Eq[F[A]], Eq[F[B]], Eq[F[C]], Arbitrary[A], Arbitrary[F[A]], Arbitrary[F[B]], Arbitrary[A => B], Arbitrary[F[A => B]], Arbitrary[F[B => C]] ) with def run(body: (given Applicative[F], Functor[F], Show[F[A]], Show[F[B]], Show[F[C]], Eq[F[A]], Eq[F[B]], Eq[F[C]], Arbitrary[A], Arbitrary[F[A]], Arbitrary[F[B]], Arbitrary[A => B], Arbitrary[F[A => B]], Arbitrary[F[B => C]] ) => Unit): Unit = body.apply given[F[_], A, B, C](given Applicative[F], Show[F[A]], Show[F[B]], Show[F[C]], Eq[F[A]], Eq[F[B]], Eq[F[C]], Arbitrary[A], Arbitrary[F[A]], Arbitrary[F[B]], Arbitrary[A => B], Arbitrary[F[A => B]], Arbitrary[F[B => C]] ): Givens[F,A,B,C]
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/SemigroupLaws.scala
package sql2json package testing package laws import cat.{Show, Eq, Semigroup} import cat.Applicative.{~, given} import cat.Semigroup.given import testing.Arbitrary import testing.Arbitrary.forAll import testing.Result.given import org.junit.Test abstract class SemigroupLaws[A](given SG: SemigroupLaws.Givens[A]) @Test def associativeLaw(): Unit = SG.run { forAll[A ~ A ~ A]("combine is associative") { case a ~ b ~ c => ((a combine b) combine c) <-> (a combine (b combine c)) } } object SemigroupLaws class Givens[A](given Semigroup[A], Arbitrary[A], Eq[A], Show[A] ) with def run(body: (given Semigroup[A], Arbitrary[A], Eq[A], Show[A] ) => Unit): Unit = body.apply given[A](given Semigroup[A], Arbitrary[A], Eq[A], Show[A] ): Givens[A]
morgen-peschke/sql2json
src/test/scala/sql2json/testing/Arbitrary.scala
<filename>src/test/scala/sql2json/testing/Arbitrary.scala package sql2json package testing import cat.{Applicative, Functor, Monad} import cat.Applicative.{~, given} import cat.Functor.given import types.{NonEmptyList, Done} import cat.Monad.given import org.junit.Assert.assertThat import net.java.quickcheck.QuickCheck import net.java.quickcheck.{Generator, Characteristic} import net.java.quickcheck.generator.support.{IntegerGenerator, LongGenerator} import net.java.quickcheck.generator.PrimitiveGenerators import scala.util.Random /** * I don't know if QuickTheories implements it's [[Generator]] using mutable * internal state, so it's simpler to wrap it in [[Arbitrary]] so I * can work with a fresh one each time and not have to worry about it. */ trait Arbitrary[A] def gen: Generator[A] object Arbitrary def apply[A](given A: Arbitrary[A]): Arbitrary[A] = A def forAll[A: Arbitrary](testName: String)(test: A => Result): Unit = QuickCheck.forAll( summon[Arbitrary[A]].gen, new Characteristic[A] { def name: String = testName def setUp(): Unit = () def tearDown(): Unit = () def specify(a: A): Unit = assertThat(test(a), Passes) }) def oneOf[A](arbA0: Arbitrary[A], arbA1: Arbitrary[A], arbAN: Arbitrary[A]*): Arbitrary[A] = val arbAs = arbA0 +: arbA1 +: arbAN.toVector new Arbitrary[A] with def gen: Generator[A] = new Generator[A] with private val genAs: Vector[Generator[A]] = arbAs.map(_.gen) private val indexGen = new IntegerGenerator(0, arbAs.length - 1) def next(): A = genAs(indexGen.next()).next() def choose[A](a0: A, a1: A, aN: A*): Arbitrary[A] = val aVector = a0 +: a1 +: aN.toVector new Arbitrary[A] with def gen: Generator[A] = new Generator[A] with private val indexGen = new IntegerGenerator(0, aVector.length - 1) def next(): A = aVector(indexGen.next()) def between(low: Int, high: Int): Arbitrary[Int] = val end = high max low val start = high min low val size = end - start usingRandom(_.nextInt(size) + start) val longs: Arbitrary[Long] = new Arbitrary[Long] with def gen = new Generator[Long] with private val wrapped: Generator[java.lang.Long] = new LongGenerator() def next(): Long = wrapped.next.toLong def usingRandom[A](factory: Random => A): Arbitrary[A] = longs.map(new Random(_)).map(factory) given Functor[Arbitrary] def map [A,B] (fa: Arbitrary[A], f: A => B): Arbitrary[B] = new Arbitrary[B] with def gen: Generator[B] = new Generator[B] with private val wrapped = fa.gen def next(): B = f(wrapped.next()) given Applicative[Arbitrary] def pure[A](a: A): Arbitrary[A] = new Arbitrary[A] with def gen: Generator[A] = new Generator[A] with def next(): A = a def ap[A, B](cf: Arbitrary[A => B], ca: Arbitrary[A]): Arbitrary[B] = new Arbitrary[B] with def gen: Generator[B] = new Generator[B] with private val genF = cf.gen private val genA = ca.gen def next(): B = genF.next()(genA.next()) given Monad[Arbitrary] def flatMap[A,B](ca: Arbitrary[A], fc: A => Arbitrary[B]): Arbitrary[B] = val genMaker = ca.map(fc).map(_.gen) new Arbitrary[B] with def gen: Generator[B] = genMaker.gen.next() given [A,B](given A: Arbitrary[A], B: Arbitrary[B]): Arbitrary[A ~ B] = A |@| B given Arbitrary[Boolean] = choose[Boolean](true, false) given Arbitrary[String] def gen = PrimitiveGenerators.strings(500) given Arbitrary[Int] def gen = new Generator[Int] with private val wrapped: Generator[java.lang.Integer] = new IntegerGenerator() def next(): Int = wrapped.next().toInt given Arbitrary[Long] = longs def makeArbFunction[A,B](given CA: Cogen[A], GB: Gen[B], AB: Arbitrary[B]): Arbitrary[A => B] = longs.map { offset => (a: A) => GB.fromSeed(new scala.util.Random(offset + CA.toSeed(a)).nextLong) } given [A,B](given CA: Cogen[A], GB: Gen[B], AB: Arbitrary[B]): Arbitrary[A => B] = makeArbFunction given [L,R](given L: Arbitrary[L], R: Arbitrary[R]): Arbitrary[Either[L,R]] = Arbitrary.oneOf[Either[L,R]]( L.map(Left(_)), R.map(Right(_)) ) given[A] (given AA: Arbitrary[A]): Arbitrary[NonEmptyList[A]] = between(0, 20).map { size => val gen = AA.gen val head = gen.next() val tail = List.fill(size - 1)(gen.next()) NonEmptyList(head, tail) } given[A] (given Arbitrary[A]): Arbitrary[List[A]] = summon[Arbitrary[NonEmptyList[A]]].map(_.tail) given Arbitrary[Done] = Done.upcast.pure[Arbitrary]
morgen-peschke/sql2json
src/test/scala/sql2json/cat/FunctorInstancesTest.scala
<gh_stars>0 package sql2json package cat import testing.laws.FunctorLaws final class FunctorListInstancesTest extends FunctorLaws[List, Int, String, Long]
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Cofunctor.scala
package sql2json package cat trait Cofunctor[C[_]] def comap [A,B] (fa: C[A], f: B => A): C[B] object Cofunctor given ops[C[_],A]: AnyRef def [B] (fa: C[A]) comap (f: B => A)(given F: Cofunctor[C]): C[B] = F.comap(fa, f)
morgen-peschke/sql2json
src/test/scala/sql2json/types/GeneratorTest.scala
<reponame>morgen-peschke/sql2json<gh_stars>0 package sql2json package types import java.io.StringReader import Generator.{Action, Result} import Generator.Action.{halt, given} import Generator.Result.given import cat.{Eq,Show,Functor} import cat.Applicative.{~, given} import cat.ApplicativeError.given import cat.SemigroupK.given import cat.Functor.given import cat.Monad.given import cat.MonadError.given import cat.Eq.given import cat.Show.show import org.junit.Test import org.junit.Assert._ import testing.laws.{ApplicativeLaws, EqLaws, FunctorLaws, MonadLaws, SemigroupLaws} import testing.{Arbitrary, Gen, Cogen} import testing.Arbitrary.forAll import testing.Result.{both, given} import Done.given final class ActionEqLaws extends EqLaws[Action[Int]] final class ActionFunctorLaws extends FunctorLaws[Action, Int, String, Long] given[A](given AA: Arbitrary[A]): Arbitrary[Action[A]] = Arbitrary.oneOf( AA.map(_.continue), AA.map(_.stop), halt.pure[Arbitrary] ) given[A](given GA: Gen[A]): Gen[Action[A]] = Gen.usingRandom { rng => rng.nextInt(10) match case 0 => GA.fromSeed(rng.nextLong).stop case 1 => halt case _ => GA.fromSeed(rng.nextLong).continue } given[A](given CA: Cogen[A], CS: Cogen[String], CE: Cogen[NonEmptyList[String]]): Cogen[Action[A]] = _ match case Action.Continue(a) => 1L + CA.toSeed(a) case Action.Stop(a) => 2L + CA.toSeed(a) case Action.Halt() => 3L case Action.Fail(Result.Failure(gen, errors)) => 4L + CS.toSeed(gen.toString) + CE.toSeed(errors) final class GeneratorToListTest @Test def toListShouldProduceTheExpectedList(): Unit = forAll[List[Long]]("toList (if broken, fix this one first)") { list => Generator.fromList(list).toList <-> list.success } final class GeneratorEmptyTest @Test def foldLeftShouldReturnInitial(): Unit = forAll[Int ~ Action[Int]]("no-op foldLeft") { case initial ~ ignoredResult => Generator.empty[Int].foldLeft[Int](initial, (a,b) => ignoredResult) <-> initial.success } @Test def foreachShouldSkipBody(): Unit = Generator.empty[Int].foreach { a => assertFail(s"Body received $a instead of being skipped") Done.stop } <-> Done.success @Test def foldShouldReturnEmpty(): Unit = Generator.empty[Long].fold <-> 0L.success @Test def foldKShouldReturnEmpty(): Unit = Generator.empty[Long].foldK[List] <-> List.empty[Long].success final class GeneratorOneTest @Test def foldLeftShouldOnlyRunOnce(): Unit = forAll[Int ~ Int ~ (Int => Action[Int])]("foldLeft") { case base ~ initial ~ f => Generator.one(base).foldLeft(initial, (a,b) => f(a + b)) <-> f(base + initial).asResult(initial) } @Test def foreachShouldOnlyRunOnce(): Unit = forAll[Int]("foreach") { base => var counter = 0 both( "generator results" asClue { Generator.one(base).foreach { value => counter += value Done.continue } <-> Done.success }, "counter value" asClue(counter <-> base) ) } @Test def foldShouldReturnTheValue(): Unit = forAll[Long]("fold") { base => Generator.one(base).fold <-> base.success } @Test def foldKShouldReturnTheWrappedValue(): Unit = forAll[Long]("foldK") { base => Generator.one(base).foldK <-> (base :: Nil).success } final class SimpleGeneratorVariantTests @Test def fromListShouldRunOncePerElement(): Unit = forAll[List[Int]]("Generator.fromList(...).foldLeft") { list => Generator.fromList(list).foldLeft( List.empty[Int], (a,b) => (b :: a).continue ) <-> list.reverse.success } @Test def constShouldProduceTheSameValueEndlessly(): Unit = given Arbitrary[Int] = Arbitrary.between(0, 10) forAll[Int ~ Long]("Generator.const(...).foldLeft") { case count ~ element => Generator .const(element) .foldLeft( count, (remaining, b) => { assertEquals(element, b) if remaining == 0 then 0.stop else (remaining - 1).continue } ) <-> 0.success } @Test def continuallyShouldRunTheSameEffectEndlessly(): Unit = given Arbitrary[Int] = Arbitrary.between(0, 10) forAll[Int ~ Long]("Generator.const(...).foldLeft") { case count ~ element => var remainingGlobal = count both( "[elements match] " asClue { Generator .continually { val element = remainingGlobal remainingGlobal = remainingGlobal - 1 element } .foldLeft( count, (remaining, element) => { assertEquals(element.toLong, remaining.toLong) if remaining == 0 then 0.stop else (remaining - 1).continue } ) <-> 0.success }, "[remainingGlobal matches] " asClue { remainingGlobal <-> -1 } ) } @Test def calculateShouldUseThePreviousValueToProduceTheNextValue(): Unit = given Arbitrary[Int] = Arbitrary.between(0, 10) forAll[Long ~ Int]("Generator.calculate(...).foldLeft") { case base ~ length => val last = base + length.toLong Generator .calculate( base, { _ match case `last` => last.stop case i => (i + 1L).continue } ) .foldLeft[List[Long]]( Nil, (a, b) => (b :: a).continue ) .map(_.reverse) <-> (base to last).toList.success } @Test def fromShouldCountUpwardsFromStartByStep(): Unit = given Arbitrary[Long] = Arbitrary.between(-2000, 2000).map(_.toLong) given Arbitrary[Int] = Arbitrary.between(0, 10) forAll[Long ~ Int]("Generator.from(start, step).toList equivalence with Iterator.from") { case start ~ step => Generator.from[Long](start, step.toLong).take(20).toList <-> Iterator.from(start.toInt, step).take(20).map(_.toLong).toList.success } @Test def unfoldShouldExpandUntilExhausted(): Unit = forAll[String]("Generator.unfold(...).foldLeft") { string => given Show[StringReader] = _.toString val generator = for reader <- Generator.ofResource( s"StringReader($string)", () => new StringReader(string), _.close().done ) char <- Generator.unfold( reader, { _.read() match case -1 => halt case i => i.toChar.continue } ) yield char generator.foldLeft( new StringBuilder, (b, c) => b.append(c).continue ).map(_.toString) <-> string.success } @Test def takeShouldBehaveLikeTakeOnList(): Unit = given Arbitrary[(List[Int], Int)] = for list <- summon[Arbitrary[List[Int]]] take <- Arbitrary.between(0, list.length + 10) yield (list, take) forAll[(List[Int], Int)]("Generator#take(...).toList equivalence with List.take") { case (list, length) => Generator.fromList(list).take(length.toLong).toList <-> list.take(length).success } @Test def takeWhileShouldBehaveLikeTakeWhileOnList(): Unit = given Arbitrary[Int] = Arbitrary.between(0, 20) forAll[Int]("Generator#takeWhile(...).toList equivalence with List.takeWhile") { length => Generator.from[Int](0).takeWhile(_ < length).toList <-> (0 to length + 3).takeWhile(_ < length).toList.success } @Test def takeUntilShouldOmitTheSentinel(): Unit = given Arbitrary[Int] = Arbitrary.between(0, 20) forAll[Int]("Generator#takeUntil(...).toList equivalence with List.takeWhile") { length => Generator.from[Int](0).takeUntil(length).toList <-> (0 to length + 3).takeWhile(_ != length).toList.success } @Test def dropUntilShouldOmitLeadingSentinelValues(): Unit = given Arbitrary[Int] = Arbitrary.between(0, 20) forAll[Int ~ NonEmptyList[Long]]("Generator#dropUntil(...).toList equivalence with List.dropWhile") { case padding ~ nel => // The inversion of nel.head is to ensure we don't run into a situation where the next element past the sentinels // we add is also the sentinel value val base = Generator.const(nel.head).take(padding.toLong) combineK Generator.fromList(-nel.head :: nel.tail) base.dropWhile(nel.head).toList <-> (-nel.head :: nel.tail).success } object GeneratorLawTests given[A,B]: Show[A => B] = _.toString given[A: Show](given ALA: Arbitrary[List[A]]): Arbitrary[Generator[A]] = ALA.map(Generator.fromList) given[A](given Eq[Result[List[A]]]): Eq[Generator[A]] = _.toList === _.toList given[A](given Show[Result[List[A]]]): Show[Generator[A]] = _.toList.show given[A](given CA: Cogen[Result[List[A]]]): Cogen[Generator[A]] = g => CA.toSeed(g.toList) given[A: Show](given GA: Gen[List[A]]): Gen[Generator[A]] = GA.map(Generator.fromList) final class GeneratorFunctorLaws extends FunctorLaws[Generator, Int, String, Long] final class GeneratorApplicativeLaws extends ApplicativeLaws[Generator, Int, String, Long] final class GeneratorMonadLaws extends MonadLaws[Generator, Int, String] final class GeneratorSemigroupLaws extends SemigroupLaws[Generator[Int]]
morgen-peschke/sql2json
src/test/scala/sql2json/testing/Seed.scala
package sql2json package testing /** * Pretty much just ripped from `org.scalacheck.rng.Seed` */ sealed abstract class Seed extends Serializable protected val a: Long protected val b: Long protected val c: Long protected val d: Long /** Generate the next seed in the RNG's sequence. */ def next: Seed = import java.lang.Long.rotateLeft val e = a - rotateLeft(b, 7) val a1 = b ^ rotateLeft(c, 13) val b1 = c + rotateLeft(d, 37) val c1 = d + e val d1 = e + a Seed(a1, b1, c1, d1) /** Reseed the RNG using the given Long value. */ def reseed(n: Long): Seed = val n0 = ((n >>> 32) & 0xffffffff) val n1 = (n & 0xffffffff) var i = 0 var seed: Seed = Seed(a ^ n0, b ^ n1, c, d) while(i < 16) seed = seed.next i += 1 seed /** * Generates a Long value. * * The values will be uniformly distributed. */ def long: (Long, Seed) = (d, next) /** * Generates a Double value. * * The values will be uniformly distributed, and will be contained * in the interval [0.0, 1.0). */ def double: (Double, Seed) = ((d >>> 11) * 1.1102230246251565e-16, next) object Seed private case class apply(a: Long, b: Long, c: Long, d: Long) extends Seed /** Generate a deterministic seed. */ def apply(s: Long): Seed = var i = 0 var seed: Seed = Seed(0xf1ea5eed, s, s, s) while (i < 20) seed = seed.next i += 1 seed /** Generate a random seed. */ def random(): Seed = apply(scala.util.Random.nextLong)
morgen-peschke/sql2json
src/main/scala/sql2json/cat/MonoidK.scala
<reponame>morgen-peschke/sql2json package sql2json package cat trait MonoidK[C[_]](given val semigroupK: SemigroupK[C], EK: EmptyK[C]) def emptyK[A]: C[A] = EK.emptyK[A] def monoid[A]: Monoid[C[A]] = Monoid.instance[C[A]](given semigroupK.semigroup, EK.empty) object MonoidK given MonoidK[List] def combineK[A] (a: List[A], b: List[A]): List[A] = a ::: b
morgen-peschke/sql2json
src/test/scala/sql2json/testing/Cogen.scala
<gh_stars>0 package sql2json package testing import types.NonEmptyList /** * Dual of [[Gen]], mostly used to create instances * of `Arbitrary[A => B]` */ trait Cogen[A] def toSeed(a: A): Long object Cogen def apply[A](given C: Cogen[A]): Cogen[A] = C given Cogen[Int] = _.toLong given Cogen[Long] = identity(_) given Cogen[String] = _.foldLeft(0L)(_ + _.hashCode.toLong) given Cogen[Boolean] = if _ then 0L else 1L given [A](given A: Cogen[A]): Cogen[List[A]] = _.map(A.toSeed).foldLeft(0L)(_ + _) given [A](given A: Cogen[A]): Cogen[NonEmptyList[A]] = _.map(A.toSeed).fold
morgen-peschke/sql2json
src/main/scala/sql2json/jdbc/Password.scala
package sql2json package jdbc import cat.Show import cat.Applicative.given import cat.MonadError, MonadError.given import cat.ApplicativeError, ApplicativeError.given import types.Convertible.given import types.validation.Errors, Errors.given import types.validation.FailFast, FailFast.Validated.given import java.util.Properties opaque type Password = String object Password def apply[C[_]](raw: String)(given ApplicativeError[C, Errors]): C[Password] = raw.trim.pure[FailFast.Validated] .ensure("Password cannot be empty".as[Errors])(_.nonEmpty) .as[C[Password]] given Show[Password] = _ => "********" given ops: AnyRef def (props: Properties) password (password: Password): Properties = props.put("password", password) props
morgen-peschke/sql2json
src/test/scala/sql2json/testing/Gen.scala
<filename>src/test/scala/sql2json/testing/Gen.scala<gh_stars>0 package sql2json package testing import cat.Functor import types.NonEmptyList import scala.util.Random /** * While I'm using QuickCheck, I still need this to be able to * create `Arbitrary[A => B]` instances. */ trait Gen[A] def fromSeed(s: Long): A object Gen def apply[A](given G: Gen[A]): Gen[A] = G def usingRandom[A](body: Random => A): Gen[A] = seed => body(new Random(seed)) given Functor[Gen] def map [A,B] (fa: Gen[A], f: A => B): Gen[B] = seed => f(fa.fromSeed(seed)) given Gen[Int] = _.toInt given Gen[Long] = identity(_) given Gen[String] = usingRandom(rng => rng.nextString(rng.nextInt(100))) given Gen[Boolean] = usingRandom(_.nextBoolean) given[A](given GA: Gen[A]): Gen[List[A]] = usingRandom { rng => List.fill(rng.nextInt(20))(rng.nextLong).map(GA.fromSeed) } given[A](given GA: Gen[A]): Gen[NonEmptyList[A]] = usingRandom { rng => val size = rng.nextInt(20) val head = GA.fromSeed(rng.nextLong) val tail = List.fill(size - 1)(rng.nextLong).map(GA.fromSeed) NonEmptyList(head, tail) }
morgen-peschke/sql2json
src/test/scala/sql2json/cat/ShowInstancesTest.scala
package sql2json package cat import org.junit.Test import org.junit.Assert._ import cat.Show.show final class ShowIterpolatorTest { @Test def interpolatorShouldCompile(): Unit = assertEquals("3", show"${1 + 2}") assertEquals("<<<[1,2,3]>>>", show"<<<${List(1,2,3)}>>>") assertEquals( """<<<Left("Hi there")-[1,2,3]-Right("Bye now")>>>""", show"<<<${Left("Hi there")}-${List(1,2,3)}-${Right("Bye now")}>>>" ) @Test def listShowShouldLookLikePythonSyntax(): Unit = assertEquals("[]", Nil.show) assertEquals("[]", List.empty[Int].show) assertEquals("[1]", List(1).show) assertEquals("[1,2]", List(1,2).show) @Test def eitherShowShouldHandleLeftAndRight(): Unit = assertEquals("Right(3L)", Right(3L).show) assertEquals("Left(7)", Left(7).show) assertEquals("Right(3L)", (Right(3L): Either[String, Long]).show) assertEquals("Left(7)", (Left(7): Either[Int, Long]).show) }
morgen-peschke/sql2json
src/main/scala/sql2json/jdbc/SqlResult.scala
package sql2json package jdbc import cat.Show import java.sql.{ResultSet,ResultSetMetaData,Types} import types.Json import types.Convertible, Convertible.given object SqlResult enum OutputType case BareArray case ArrayWithHeader(verbose: Boolean) case Object object OutputType given Show[OutputType] = _ match case Object => "object" case BareArray => "array" case ArrayWithHeader(true) => "array with fancy header" case ArrayWithHeader(false) => "array with header" opaque type Header = ResultSetMetaData opaque type Row = ResultSet given lifts: AnyRef def (rs: ResultSet) row: Row = rs def (rs: ResultSet) header: Header = rs.getMetaData given (given outputType: OutputType): Convertible[Header, Json] = meta => val columnJson: Int => Json = outputType match case OutputType.ArrayWithHeader(true) => (i: Int) => Json.obj( "label" -> meta.getColumnLabel(i).as[Json], "type" -> meta.getColumnTypeName(i).as[Json] ) case _ => meta.getColumnLabel(_: Int).as[Json] Json.Arr((1 to meta.getColumnCount).map(columnJson).toVector) given (given outputType: OutputType): Convertible[Row, Json] = rs => val meta = rs.getMetaData outputType match case OutputType.BareArray | OutputType.ArrayWithHeader(_) => Json.Arr((1 to meta.getColumnCount).toVector.map(rs.col(_, meta))) case OutputType.Object => Json.Obj((1 to meta.getColumnCount).map { i => meta.getColumnLabel(i) -> rs.col(i, meta) }.toMap) given ops: AnyRef def (rs: Row) col(index: Int, meta: ResultSetMetaData): Json = meta.getColumnType(index) match case Types.ARRAY => val array = rs.getArray(index) if rs.wasNull then Json.nil else try val arrayRs = array.getResultSet given ResultSetMetaData = arrayRs.getMetaData given OutputType = OutputType.BareArray try arrayRs.as[Row].as[Json] finally arrayRs.close() finally array.free() case Types.BIGINT => val r = rs.getLong(index) if (rs.wasNull) Json.nil else r.as[Json] case Types.BINARY | Types.VARBINARY | Types.LONGVARBINARY => val r = rs.getBytes(index) if (rs.wasNull) Json.nil else r.as[Json] case Types.BIT => val r = rs.getBoolean(index) if (rs.wasNull) Json.nil else r.as[Json] case Types.BOOLEAN => val r = rs.getBoolean(index) if (rs.wasNull) Json.nil else r.as[Json] case Types.CHAR | Types.VARCHAR | Types.LONGNVARCHAR => val r = rs.getString(index) if (rs.wasNull) Json.nil else r.as[Json] case Types.DATE => val r = rs.getDate(index) if (rs.wasNull) Json.nil else r.toString.as[Json] case Types.DECIMAL | Types.NUMERIC => val r = rs.getBigDecimal(index) if (rs.wasNull) Json.nil else BigDecimal(r).as[Json] case Types.DOUBLE | Types.FLOAT => val r = rs.getDouble(index) if (rs.wasNull) Json.nil else r.as[Json] case Types.INTEGER | Types.SMALLINT | Types.TINYINT => val r = rs.getInt(index) if (rs.wasNull) Json.nil else r.as[Json] case Types.JAVA_OBJECT | Types.OTHER | Types.STRUCT => val r = rs.getObject(index) if (rs.wasNull) Json.nil else r.toString.as[Json] case Types.NULL => Json.nil case Types.REAL => val r = rs.getFloat(index) if (rs.wasNull) Json.nil else r.as[Json] case Types.ROWID => val r = rs.getRowId(index) if (rs.wasNull) Json.nil else r.toString.as[Json] case Types.SQLXML => val xml = rs.getSQLXML(index) if (rs.wasNull) Json.nil else try xml.toString.as[Json] finally xml.free() case Types.TIME | Types.TIME_WITH_TIMEZONE => val r = rs.getTime(index) if (rs.wasNull) Json.nil else r.toString.as[Json] case Types.TIMESTAMP | Types.TIMESTAMP_WITH_TIMEZONE => val r = rs.getTimestamp(index) if (rs.wasNull) Json.nil else r.toString.as[Json] case Types.BLOB => throw new NotImplementedError(s"BLOB not supported (column ${meta.getColumnLabel(index)})") case Types.CLOB => throw new NotImplementedError(s"CLOB not supported (column ${meta.getColumnLabel(index)})") case Types.DATALINK => throw new NotImplementedError(s"DATALINK not supported (column ${meta.getColumnLabel(index)})") case Types.DISTINCT => throw new NotImplementedError(s"DISTINCT not supported (column ${meta.getColumnLabel(index)})") case Types.NCHAR => throw new NotImplementedError(s"NCHAR not supported (column ${meta.getColumnLabel(index)})") case Types.NCLOB => throw new NotImplementedError(s"NCLOB not supported (column ${meta.getColumnLabel(index)})") case Types.NVARCHAR => throw new NotImplementedError(s"NVARCHAR not supported (column ${meta.getColumnLabel(index)})") case Types.REF => throw new NotImplementedError(s"REF not supported (column ${meta.getColumnLabel(index)})") case Types.REF_CURSOR => throw new NotImplementedError(s"REF_CURSOR not supported (column ${meta.getColumnLabel(index)})")
morgen-peschke/sql2json
build.sbt
<filename>build.sbt enablePlugins(PackPlugin) lazy val root = project .in(file(".")) .settings( name := "dotty-simple", version := "0.1.0", scalaVersion := "0.21.0-RC1", resolvers += "mvnrepository" at "http://mvnrepository.com/artifact/", libraryDependencies ++= Seq( "com.typesafe" % "config" % "1.4.0", "org.postgresql" % "postgresql" % "42.2.8", "mysql" % "mysql-connector-java" % "8.0.18", "com.novocode" % "junit-interface" % "0.11" % "test", "net.java" % "quickcheck" % "0.6" % "test" ) )
morgen-peschke/sql2json
src/test/scala/sql2json/types/DoneTest.scala
package sql2json package types import testing.laws.{EqLaws, SemigroupLaws, MonoidLaws} final class DoneEqLaws extends EqLaws[Done] final class DoneSemigroupLaws extends SemigroupLaws[Done] final class DoneMonoidLaws extends MonoidLaws[Done]
morgen-peschke/sql2json
src/test/scala/sql2json/cat/SemigroupInstancesTest.scala
<gh_stars>0 package sql2json package cat import testing.laws.SemigroupLaws final class SemigroupInstanceForIntTest extends SemigroupLaws[Int] final class SemigroupInstanceForLongTest extends SemigroupLaws[Long] final class SemigroupInstanceForListTest extends SemigroupLaws[List[Boolean]]
morgen-peschke/sql2json
src/main/scala/sql2json/cat/SemigroupK.scala
package sql2json package cat trait SemigroupK[C[_]] def combineK[A] (a: C[A], b: C[A]): C[A] def semigroup[A]: Semigroup[C[A]] = new Semigroup[C[A]] with def combine (a: C[A], b: C[A]): C[A] = combineK(a,b) trait SemigroupKProviders given[C[_]] (given M: MonoidK[C]): SemigroupK[C] = M.semigroupK object SemigroupK extends SemigroupKProviders given ops[C[_],A]: AnyRef def (a: C[A]) combineK (b: C[A])(given SK: SemigroupK[C]): C[A] = SK.combineK(a,b) given SemigroupK[List] def combineK[A] (a: List[A], b: List[A]): List[A] = a ::: b
morgen-peschke/sql2json
src/main/scala/sql2json/types/NonEmptyList.scala
package sql2json package types import cat.{Applicative, Show, Eq, SemigroupK, Functor, Monoid, Monad} import cat.Show.show import cat.Eq.given import cat.Monoid.given import cat.Semigroup.given import scala.annotation.tailrec /** * Yet another bit I grabbed from [Cats](https://typelevel.org/cats/) */ case class NonEmptyList[A](head: A, tail: List[A]) def toList: List[A] = head :: tail def fold(given M: Monoid[A]): A = tail.foldLeft(M.empty)(_ combine _) def map[B] (f: A => B): NonEmptyList[B] = NonEmptyList(f(head), tail.map(f)) def flatMap[B](fc: A => NonEmptyList[B]): NonEmptyList[B] = val processedHead = fc(head) NonEmptyList( processedHead.head, processedHead.tail ::: tail.flatMap(a => fc(a).toList) ) object NonEmptyList def one[A](head: A): NonEmptyList[A] = NonEmptyList(head, Nil) def of[A](head: A, tail0: A, tailN: A*): NonEmptyList[A] = NonEmptyList(head, tail0 :: tailN.toList) given[A] (given Show[A]): Show[NonEmptyList[A]] = _.toList.show given[A] (given Eq[A]): Eq[NonEmptyList[A]] def equiv(nelA: NonEmptyList[A], nelB: NonEmptyList[A]): Boolean = @tailrec def loop(restA: List[A], restB: List[A]): Boolean = (restA, restB) match case (Nil, Nil) => true case (_ :: _, Nil) | (Nil, _ :: _) => false case (a :: nextA, b :: nextB) => a === b && loop(nextA, nextB) nelA.head === nelB.head && loop(nelA.tail, nelB.tail) given SemigroupK[NonEmptyList] def combineK[A](a: NonEmptyList[A], b: NonEmptyList[A]): NonEmptyList[A] = NonEmptyList(a.head, a.tail ::: b.toList) given Functor[NonEmptyList] def map[A,B] (fa: NonEmptyList[A], f: A => B): NonEmptyList[B] = fa.map(f) given Applicative[NonEmptyList] def pure[A](a: A): NonEmptyList[A] = one(a) def ap[A, B](cf: NonEmptyList[A => B], ca: NonEmptyList[A]): NonEmptyList[B] = cf.flatMap(ca.map(_)) given Monad[NonEmptyList] def flatMap[A,B](ca: NonEmptyList[A], fc: A => NonEmptyList[B]): NonEmptyList[B] = ca.flatMap(fc)
morgen-peschke/sql2json
src/main/scala/sql2json/cat/EmptyK.scala
<reponame>morgen-peschke/sql2json package sql2json package cat trait EmptyK[C[_]] def emptyK[A]: C[A] def empty[A]: Empty[C[A]] = Empty.instance[C[A]](emptyK[A]) object EmptyK given EmptyK[List] def emptyK[A]: List[A] = Nil
morgen-peschke/sql2json
src/main/scala/sql2json/config/Config.scala
package sql2json package config import com.typesafe.config.{ConfigFactory, ConfigValue, Config => JConfig} import cat.ApplicativeError, ApplicativeError.given import cat.Show, Show.show import cat.Functor.given import cat.Applicative.{~,given} import cat.Semigroup, Semigroup.given import cat.SemigroupK, SemigroupK.given import cat.Monad.given import types.validation.Errors, Errors.given import types.validation.Accumulate, Accumulate.given import types.validation.FailFast, FailFast.given import types.Generator, Generator.Result.given import types.Convertible.given import types.ConvertibleK.given import types.Done import jdbc.{JdbcUrl, Database, Driver, Username, Password} case class DBConfig( jdbcURL: JdbcUrl, username: Username, password: Password, driver: Driver ) object DBConfig given Show[DBConfig] = d => show"DBConfig(jdbc-url: ${d.jdbcURL}, userName: ${d.username}, password: ${d.password})" def fromConfigValue[C[_]](path: String, cv: ConfigValue)(given ApplicativeError[C, Errors]): C[DBConfig] = val conf = cv.atKey("db") val validatedUrl: C[JdbcUrl] = try JdbcUrl[C](conf.getString("db.jdbc-url")) catch case ex: Exception => s"Bad config at $path.jdbc-url: ${ex.getMessage}".as[Errors].raise[C, JdbcUrl] val validatedUsername: C[Username] = try Username[C](conf.getString("db.username")) catch case ex: Exception => s"Bad config at $path.username: ${ex.getMessage}".as[Errors].raise[C, Username] val validatedPassword: C[Password] = try Password[C](conf.getString("db.password")) catch case ex: Exception => s"Bad config at $path.password: ${ex.getMessage}".as[Errors].raise[C, Password] val validatedDriver: C[Driver] = try Driver[C](conf.getString("db.driver")) catch case ex: Exception => s"Bad config at $path.driver: ${ex.getMessage}".as[Errors].raise[C, Driver] (validatedUrl |@| validatedUsername |@| validatedPassword |@| validatedDriver).map { case url ~ username ~ password ~ driver => DBConfig(url, username, password, driver) } case class Config(databases: Map[Database, DBConfig]) def forDatabase[C[_]](db: Database)(given ApplicativeError[C, Errors]): C[DBConfig] = databases.get(db).fold(show"No config for $db".as[Errors].raise[C, DBConfig])(_.pure[C]) def default[C[_]](given ApplicativeError[C, Errors]): C[DBConfig] = databases.toList match case Nil => "Empty config".as[Errors].raise[C, DBConfig] case (_, single) :: Nil => single.pure[C] case _ => "Unable to fall back to single config".as[Errors].raise[C, DBConfig] object Config given Show[Config] = _.databases .toList .sortBy(_._1) .map { case (db, cfg) => db.show -> cfg.show } .mkString("Config(", ", ", ")") def load[C[_]](given AE: ApplicativeError[C, Errors]): C[Config] = def entryToScala(entry: java.util.Map.Entry[String, ConfigValue]): Accumulate.Validated[Database ~ DBConfig] = Database[Accumulate.Validated](entry.getKey) |@| DBConfig.fromConfigValue[Accumulate.Validated](entry.getKey, entry.getValue) def initFold: Accumulate.Validated[List[(Database, DBConfig)]] = List.empty[(Database, DBConfig)].pure[Accumulate.Validated] def squash(accum: Accumulate.Validated[List[(Database, DBConfig)]], element: Accumulate.Validated[(Database, DBConfig)]): Accumulate.Validated[List[(Database, DBConfig)]] = accum.combine(element.map(_.pure[List])) val conf = ConfigFactory.load() Generator .fromStream(conf.getConfig("databases").root().entrySet().stream) .map[Accumulate.Validated[Database ~ DBConfig]](entryToScala) .toList .asKind[FailFast.Validated] .flatMap(_.foldLeft(initFold)(squash(_, _)).asKind[FailFast.Validated]) .map(_.toMap) .map(Config(_)) .asKind[C]
morgen-peschke/sql2json
src/main/scala/sql2json/types/validation/Accumulate.scala
<reponame>morgen-peschke/sql2json package sql2json package types package validation import cat.{Functor, Applicative, ApplicativeError} import cat.Show, Show.show import cat.Eq, Eq.given import cat.Semigroup, Semigroup.given import cat.SemigroupK.given /** * An attempt to see if `Validated` from [Cats](https://typelevel.org/cats/) could be implemented as a * zero-cost wrapper over [[Either]] * * Note: this is _not_ a [[Monad]] as it accumulates,rather than sequences, effects (in this case, errors) */ object Accumulate opaque type Validated[A] = Either[Errors, A] object Validated given[A](given Show[A]): Show[Validated[A]] = _ match case Right(a) => show"Accumulate.Valid($a)" case Left(e) => show"Accumulate.Invalid($e)" given[A](given EE: Eq[Either[Errors, A]]): Eq[Validated[A]] = EE given[A: Semigroup]: Semigroup[Validated[A]] = (_, _) match case (Right(a), Right(b)) => Right(a combine b) case (Right(_), b @ Left(_)) => b case (a @ Left(_), Right(_)) => a case (Left(a), Left(b)) => Left(a combineK b) given Functor[Validated] def map[A,B] (fa: Validated[A], f: A => B): Validated[B] = fa.map(f) given Applicative[Validated] def pure[A](a: A): Validated[A] = Right(a) def ap [A, B] (ff: Validated[A => B], fa: Validated[A]): Validated[B] = (ff, fa) match case (Right(f), Right(a)) => Right(f(a)) case (Right(_), Left(es)) => Left(es) case (Left(es), Right(_)) => Left(es) case (Left(ef), Left(eb)) => Left(ef combineK eb) given ApplicativeError[Validated, Errors] def raise[A](error: Errors): Validated[A] = Left(error) def recover[A](ca: Validated[A], f: Errors => A): Validated[A] = Right(ca.fold(f, identity)) def fold[A, B] (ca: Validated[A], fe: Errors => B, fa: A => B): B = ca.fold(fe, fa) override def toEither[A](ca: Validated[A]): Either[Errors, A] = ca override def fromEither[A](either: Either[Errors, A]): Validated[A] = either given lifts[A]: AnyRef def (a: A) valid: Validated[A] = Right(a) def[A] (reason: String) invalid: Validated[A] = Left(NonEmptyList.one(reason)) def[A] (reasons: Errors) invalid: Validated[A] = Left(reasons) def (aOpt: Option[A]) asValidated (ifMissing: String): Validated[A] = aOpt.fold(ifMissing.invalid)(_.valid) def (aEither: Either[Errors, A]) asValidated: Validated[A] = aEither object Valid def unapply[A](va: Validated[A]): Option[A] = va match case Right(a) => Some(a) case _ => None object Invalid def unapply[A](va: Validated[A]): Option[Errors] = va match case Left(errors) => Some(errors) case _ => None
morgen-peschke/sql2json
src/test/scala/sql2json/cat/MonoidInstancesTest.scala
package sql2json package cat import testing.laws.MonoidLaws final class MonoidInstanceForLongTest extends MonoidLaws[Long] final class MonoidInstanceForListTest extends MonoidLaws[List[Boolean]]
morgen-peschke/sql2json
src/test/scala/sql2json/cat/ApplicativeInstancesTest.scala
package sql2json package cat import testing.laws.ApplicativeLaws final class ApplicativeListInstancesTest extends ApplicativeLaws[List, Int, String, Long]
morgen-peschke/sql2json
src/main/scala/sql2json/jdbc/Driver.scala
package sql2json package jdbc import cat.Show import cat.Applicative.given import cat.Monad.given import cat.MonadError, MonadError.given import cat.ApplicativeError, ApplicativeError.given import types.Convertible.given import types.validation.Errors, Errors.given import types.validation.FailFast, FailFast.Validated.given import java.util.Properties import java.sql.{Connection, Driver => JDriver} opaque type Driver = Class[JDriver] object Driver def apply[C[_]](raw: String)(given AE: ApplicativeError[C,Errors]): C[Driver] = raw.trim.pure[FailFast.Validated] .ensure("Driver class cannot be empty".as[Errors])(_.nonEmpty) .flatMap { trimmed => AE.catchOnly[ClassNotFoundException](Class.forName(trimmed)).as[FailFast.Validated[Class[?]]] } .flatMap { untypedClass => AE.catchOnly[ClassCastException](untypedClass.asInstanceOf[Class[JDriver]]).as[FailFast.Validated[Class[JDriver]]] } .as[C[Driver]] given Show[Driver] = _.getName def (driver: Driver) load: JDriver = driver.newInstance
morgen-peschke/sql2json
src/main/scala/sql2json/types/validation/FailFast.scala
package sql2json package types package validation import cat.{Functor, Monad} import cat.Applicative, Applicative.given import cat.ApplicativeError, ApplicativeError.given import cat.MonadError, MonadError.given import cat.Functor.given import cat.Show, Show.show import cat.Eq, Eq.given import cat.Semigroup, Semigroup.given /** * An attempt to stay zero-cost, and introduce a way to tell at the type level if the * underlying [[Either]] is fail-fast or accumulating. */ object FailFast opaque type Validated[A] = Either[Errors, A] object Validated given[A](given Show[A]): Show[Validated[A]] = _ match case Right(a) => show"FailFast.Valid($a)" case Left(e) => show"FailFast.Invalid($e)" given[A](given EE: Eq[Either[Errors, A]]): Eq[Validated[A]] = EE given[A: Semigroup]: Semigroup[Validated[A]] = (_, _) match case (Right(a), Right(b)) => Right(a combine b) case (Right(_), b @ Left(_)) => b case (a @ Left(_), _) => a given Functor[Validated] def map[A,B] (fa: Validated[A], f: A => B): Validated[B] = fa.map(f) given Applicative[Validated] def pure[A](a: A): Validated[A] = Right(a) def ap[A, B] (ff: Validated[A => B], fa: Validated[A]): Validated[B] = (ff, fa) match case (Right(f), Right(a)) => Right(f(a)) case (Right(_), Left(es)) => Left(es) case (Left(es), _ ) => Left(es) given ApplicativeError[Validated, Errors] def raise[A](error: Errors): Validated[A] = Left(error) def recover[A](ca: Validated[A], f: Errors => A): Validated[A] = Right(ca.fold(f, identity)) def fold[A, B] (ca: Validated[A], fe: Errors => B, fa: A => B): B = ca.fold(fe, fa) override def toEither[A](ca: Validated[A]): Either[Errors, A] = ca override def fromEither[A](either: Either[Errors, A]): Validated[A] = either given Monad[Validated] def flatMap[A,B] (ca: Validated[A], fc: A => Validated[B]): Validated[B] = ca match case Right(v) => fc(v) case Left(v) => Left(v) given MonadError[Validated, Errors] = MonadError.derived[Validated, Errors] given lifts[A]: AnyRef def (a: A) validFF: Validated[A] = Right(a) def[A] (reason: String) invalidFF: Validated[A] = Left(NonEmptyList.one(reason)) def[A] (reasons: Errors) invalidFF: Validated[A] = Left(reasons) def (aOpt: Option[A]) asValidatedFF (ifMissing: String): Validated[A] = aOpt.fold(ifMissing.invalidFF)(_.validFF) def (aEither: Either[Errors, A]) asValidatedFF: Validated[A] = aEither
morgen-peschke/sql2json
src/test/scala/sql2json/testing/laws/CofunctorLaws.scala
<filename>src/test/scala/sql2json/testing/laws/CofunctorLaws.scala package sql2json package testing package laws import cat.Cofunctor import cat.Cofunctor.given import cat.Eq import cat.Show import cat.Applicative.~ import testing.Arbitrary import testing.Arbitrary.forAll import testing.Result.given import org.junit.Test abstract class CofunctorLaws[F[_], A, B, C](given FG: CofunctorLaws.Givens[F, A, B, C]) @Test def identityLaw(): Unit = FG.run { forAll[F[A]]("identity over comap") { fa => fa <-> fa.comap(identity(_: A)) } } @Test def compositionLaw(): Unit = FG.run { forAll[F[A] ~ (B => A) ~ (C => B)]("cmposition over comap") { case fa ~ b2a ~ c2b => fa.comap(b2a).comap(c2b) <-> fa.comap(c2b andThen b2a) } } object CofunctorLaws class Givens[F[_], A, B, C](given Cofunctor[F], Show[F[A]], Show[F[C]], Eq[F[A]], Eq[F[C]], Eq[C], Arbitrary[F[A]], Arbitrary[B => A], Arbitrary[C => B] ) with def run(body: (given Cofunctor[F], Show[F[A]], Show[F[C]], Eq[F[A]], Eq[F[C]], Eq[C], Arbitrary[F[A]], Arbitrary[B => A], Arbitrary[C => B]) => Unit): Unit = body.apply given[F[_], A, B, C]( given Cofunctor[F], Show[F[A]], Show[F[C]], Eq[F[A]], Eq[F[C]], Eq[C], Arbitrary[F[A]], Arbitrary[B => A], Arbitrary[C => B] ): Givens[F, A, B, C]
morgen-peschke/sql2json
src/main/scala/sql2json/types/Done.scala
<filename>src/main/scala/sql2json/types/Done.scala package sql2json package types import cat.{Eq, Empty, Semigroup, Monoid, Show} /** * Dead simple placeholder to avoid auto-conversion issues with Unit in monadic code. */ sealed abstract class Done object Done extends Done def upcast: Done = this trait DoneOps def[A] (a: A) done: Done = Done given DoneOps given Show[Done] = _ => "Done" given Eq[Done] = _ == _ given Empty[Done] = Empty.instance(Done.upcast) given Semigroup[Done] = (a, _) => a given Monoid[Done] = Monoid.instance[Done]
morgen-peschke/sql2json
src/test/scala/sql2json/types/validation/ValidatedTest.scala
<reponame>morgen-peschke/sql2json package sql2json package types package validation import Accumulate.given import FailFast.given import cat.{Eq,Show,Functor, ApplicativeError} import cat.Applicative.{~, given} import cat.ApplicativeError.given import cat.Functor.given import cat.Monad.given import cat.MonadError.given import org.junit.Test import org.junit.Assert._ import testing.laws.{ApplicativeLaws, ApplicativeErrorLaws, EqLaws, FunctorLaws, MonadLaws, MonadErrorLaws, SemigroupLaws} import testing.{Arbitrary, Gen, Cogen} import testing.Arbitrary.forAll import testing.Result.given import types.ConvertibleK.given import ValidatedTest.given final class AccumulateEqLaws extends EqLaws[Accumulate.Validated[Boolean]] final class FailFastEqLaws extends EqLaws[FailFast.Validated[Boolean]] final class AccumulateFunctorLaws extends FunctorLaws[Accumulate.Validated, Int, String, Long] final class FailFastFunctorLaws extends FunctorLaws[FailFast.Validated, Int, String, Long] final class AccumulateApplicativeLaws extends ApplicativeLaws[Accumulate.Validated, Int, String, Long] final class FailFastApplicativeLaws extends ApplicativeLaws[FailFast.Validated, Int, String, Long] final class AccumulateApplicativeErrorLaws extends ApplicativeErrorLaws[Accumulate.Validated, Errors, Int, String] final class FailFastApplicativeErrorLaws extends ApplicativeErrorLaws[FailFast.Validated, Errors, Int, String] final class FailFastMonadLaws extends MonadLaws[FailFast.Validated, Int, String] final class FailFastMonadErrorLaws extends MonadErrorLaws[FailFast.Validated, Errors, Int, String] final class AccumulateSemigroupLaws extends SemigroupLaws[Accumulate.Validated[Int]] final class FailFastSemigroupLaws extends SemigroupLaws[FailFast.Validated[Int]] final class AccumulateTest import Accumulate.Validated @Test def validConsistentWithPure(): Unit = forAll[Int]("valid consistency with pure") { value => value.valid <-> value.pure[Validated] } @Test def invalidConsistentWithRaise(): Unit = forAll[String]("invalid consistency with raise") { error => error.invalid[Int] <-> NonEmptyList.one(error).raise[Validated, Int] } @Test def testAsValidatedFromNone(): Unit = assertEquals( Option.empty[Int].asValidated("Missing").toEither, Left(NonEmptyList.one("Missing")) ) @Test def testAsValidatedFromSome(): Unit = assertEquals( Some("hi there").asValidated("Missing").toEither, Right("hi there") ) object ValidatedTest given arbAccum[A](given Arbitrary[A]): Arbitrary[Accumulate.Validated[A]] = Arbitrary.oneOf( Arbitrary[A].map(_.valid), Arbitrary[String].map(_.invalid[A]) ) given arbFFast[A](given A: Arbitrary[Accumulate.Validated[A]]): Arbitrary[FailFast.Validated[A]] = A.map(_.asKind[FailFast.Validated]) given [A](given GA: Gen[A], GS: Gen[String]): Gen[FailFast.Validated[A]] = Gen.usingRandom { rng => if (rng.nextBoolean) rng.nextString(rng.nextInt(20)).invalid[A].asKind[FailFast.Validated] else GA.fromSeed(rng.nextLong).valid.asKind[FailFast.Validated] } given [A](given CA: Cogen[A], CE: Cogen[NonEmptyList[String]]): Cogen[FailFast.Validated[A]] = _.toEither match case Left(nel) => 2L + CE.toSeed(nel) case Right(a) => 1L + CA.toSeed(a)
morgen-peschke/sql2json
src/main/scala/sql2json/cat/Monad.scala
package sql2json package cat trait Monad[C[_]](given val applicative: Applicative[C]) def flatMap[A,B](ca: C[A], fc: A => C[B]): C[B] trait MonadErrorProvidesMonad given[F[_]](given ME: MonadError[F, ?]): Monad[F] = ME.monad object Monad extends MonadErrorProvidesMonad given ops[C[_],A]: AnyRef def[B] (ca: C[A]) flatMap (fc: A => C[B])(given M: Monad[C]): C[B] = M.flatMap(ca, fc) def[B] (ca: C[A]) >=> (fc: A => C[B])(given M: Monad[C]): C[B] = M.flatMap(ca, fc) given Monad[List] def flatMap[A,B](ca: List[A], fc: A => List[B]): List[B] = ca.flatMap(fc)
morgen-peschke/sql2json
src/main/scala/sql2json/types/Generator.scala
package sql2json package types import cat.{Monad, Monoid, MonoidK} import cat.Applicative, Applicative.given import cat.ApplicativeError, ApplicativeError.given import cat.Functor, Functor.given import cat.Semigroup, Semigroup.given import cat.SemigroupK, SemigroupK.given import cat.Show, Show.show import cat.Eq, Eq.given import Done.given import Generator.{Action, given} import Generator.Action.{halt, given} import Generator.Result.given import validation.Errors, Errors.given import scala.reflect.ClassTag import scala.annotation.tailrec /** * The idea for this was shamelessly ripped off of [<NAME>'s `geny`](https://github.com/lihaoyi/geny). * * The implementation is my own, so don't judge his code by this. */ trait Generator[A] def name: String def foldLeft[B](initial: B, f: (B, A) => Action[B]): Generator.Result[B] def foreach(f: A => Action[Done]): Generator.Result[Done] = foldLeft(Done.upcast, (_, v) => f(v).map(_ => Done)) def fold(given M: Monoid[A]): Generator.Result[A] = foldLeft( M.empty, (accum, v) => Action.Continue(accum combine v) ) def foldK[C[_]: Applicative](given MK: MonoidK[C]): Generator.Result[C[A]] = foldLeft[C[A]]( MK.emptyK, (accum, v) => Action.Continue(accum combineK v.pure) ) def map[B](f: A => B): Generator[B] = new Generator.Map(f, this) def flatMap[B](f: A => Generator[B]): Generator[B] = new Generator.FlatMap(f, this) def take(count: Long): Generator[A] = new Generator.Take(count, this) def takeWhile(p: A => Boolean): Generator[A] = new Generator.TakeWhile(p, this) def takeUntil(sentinel: A)(given Show[A], Eq[A]): Generator[A] = new Generator.TakeUntil(sentinel, this) def dropWhile(sentinel: A)(given Show[A], Eq[A]): Generator[A] = new Generator.DropWhile(sentinel, this) /** * Materialize to a list - do not call on infinite generators */ def toList: Generator.Result[List[A]] = foldLeft( new scala.collection.mutable.ListBuffer[A], (buffer, element) => buffer.append(element).continue ).map(_.toList) object Generator given Show[Generator[?]] = _.name sealed abstract class Result[A] def asAction: Action[A] = this match case Result.Success(a) => Action.Continue(a) case f @ Result.Failure(_, _) => Action.Fail(f) object Result case class Success[A](value: A) extends Result[A] case class Failure[A](generator: Generator[?], errors: Errors) extends Result[A] def as[B]: Failure[B] = this.asInstanceOf[Failure[B]] trait ResultOps[A] def (a: A) success: Result[A] = Success(a) def (a: Generator[A]) failure (reason: String): Result[A] = Failure(a, reason.pure[NonEmptyList]) given[A]: ResultOps[A] given[A: Eq]: Eq[Result[A]] = (_, _) match case (Success(a), Success(b)) => a === b case (Failure(gA, errorsA), Failure(gB, errorsB)) => gA == gB && errorsA == errorsB case _ => false given[A: Show]: Show[Result[A]] = _ match case Success(a) => show"Success($a)" case Failure(gen, errors) => show"Failure($gen, $errors)" given Functor[Result] def map [A,B] (fa: Result[A], f: A => B): Result[B] = fa match case Success(a) => Success(f(a)) case failure @ Failure(_, _) => failure.as[B] given[C[_], A](given ApplicativeError[C, Errors]): types.Convertible[Result[A], C[A]] = _ match case Result.Success(a) => a.pure[C] case Result.Failure(generator, errors) => errors.map(e => s"$generator: $e").raise[C, A] given[C[_]](given ApplicativeError[C, Errors]): types.ConvertibleK[Result, C] def castK[A](ra: Result[A]): C[A] = ra match case Result.Success(a) => a.pure[C] case Result.Failure(generator, errors) => errors.map(e => s"$generator: $e").raise[C, A] sealed abstract class Action[A] def asResult(fallback: A): Result[A] = this match case Action.Continue(a) => Result.Success(a) case Action.Stop(a) => Result.Success(a) case Action.Halt() => Result.Success(fallback) case Action.Fail(failure) => failure object Action // Keep doing what you're doing case class Continue[A](result: A) extends Action[A] // Stop the generator (one last result) case class Stop[A](result: A) extends Action[A] // Stop the generator (no final result) case class Halt[A]() extends Action[A] // Fail the whole chain. Should be propagated out case class Fail[A](result: Result.Failure[A]) extends Action[A] given Functor[Action] def map [A,B] (fa: Action[A], f: A => B): Action[B] = fa match case Continue(a) => Continue(f(a)) case Stop(a) => Stop(f(a)) case Halt() => Halt() case Fail(failure) => Fail(failure.as[B]) trait ActionOps[A] def (a: A) continue: Continue[A] = Continue(a) def (a: A) stop: Stop[A] = Stop(a) given[A]: ActionOps[A] def halt[A]: Action[A] = Halt() given[A: Show]: Show[Action[A]] = _ match case Continue(r) => show"Continue($r)" case Stop(r) => show"Stop($r)" case Halt() => "Halt()" case Fail(failure) => show"Fail($failure)" given[A: Eq]: Eq[Action[A]] = (_, _) match case (Continue(a), Continue(b)) => a === b case (Stop(a), Stop(b)) => a === b case (Halt(), Halt()) => true case (Fail(a), Fail(b)) => a == b case _ => false def empty[A]: Generator[A] = new Empty[A] def one[A: Show](value: A): Generator[A] = new One(value) def fromList[A: Show](values: List[A]): Generator[A] = new FromList(values) def fromStream[A](stream: () => java.util.stream.Stream[A]): Generator[A] = new FromStream(stream) def const[A: Show](value: A): Generator[A] = new Const(value) def continually[A](value: => A): Generator[A] = new Continually(value) def calculate[A: Show](seed: A, step: A => Action[A]): Generator[A] = new Calculate(seed, step) def from[A: Show](given N: scala.math.Numeric[A])(start: A, step: A = N.one): Generator[A] = new From(start, step) def ofResource[A](name: String, aquire: () => A, cleanup: A => Done): Generator[A] = new OfResource(name, aquire, cleanup) def unfold[A: Show,B](seed: A, expand: A => Action[B]): Generator[B] = new Unfold(seed, expand) given Functor[Generator] def map [A,B] (fa: Generator[A], f: A => B): Generator[B] = fa.map(f) given Applicative[Generator] def pure[A](a: A): Generator[A] = Generator.one(a)(given _.toString) def ap[A, B](cf: Generator[A => B], ca: Generator[A]): Generator[B] = cf.flatMap(ca.map(_)) given Monad[Generator] def flatMap[A,B](ca: Generator[A], fc: A => Generator[B]): Generator[B] = ca.flatMap(fc) given SemigroupK[Generator] def combineK[A] (a: Generator[A], b: Generator[A]): Generator[A] = new Concat(a, b) class Empty[A] extends Generator[A] def name: String = "Generator.empty" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = initial.success override def foreach(f: A => Action[Done]): Result[Done] = Done.success override def fold(given M: Monoid[A]): Result[A] = M.empty.success override def foldK[C[_]: Applicative](given MK: MonoidK[C]): Result[C[A]] = MK.emptyK[A].success class One[A: Show](value: A) extends Generator[A] def name: String = show"Generator.one($value)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = f(initial, value).asResult(initial) override def foreach(f: A => Action[Done]): Result[Done] = f(value).asResult(Done) override def fold(given Monoid[A]): Result[A] = value.success override def foldK[C[_]: Applicative: MonoidK]: Result[C[A]] = value.pure.success class FromList[A: Show](values: List[A]) extends Generator[A] def name: String = show"Generator.ofList($values)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = @tailrec def loop(accum: B, remaining: List[A]): Result[B] = remaining match case Nil => accum.success case value :: rest => f(accum, value) match case Action.Continue(result) => loop(result, rest) case action => action.asResult(accum) loop(initial, values) class FromStream[A](stream: () => java.util.stream.Stream[A]) extends Generator[A] def name: String = "Generator.fromStream(???)" private class CollectorState[B](var current: Action[B]) def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = stream() .collect(java.util.stream.Collector.of( () => CollectorState(initial.continue), (state, element) => state.current = state.current match { case Action.Continue(accum) => f(accum, element) case action => action }, (_, _) => throw new IllegalStateException("Merge not supported"), _.current.asResult(initial) )) class Const[A: Show](value: A) extends Generator[A] def name: String = show"Generator.const($value)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = @tailrec def loop(accum: B): Result[B] = f(accum, value) match case Action.Continue(result) => loop(result) case action => action.asResult(accum) loop(initial) class Continually[A](value: => A) extends Generator[A] def name: String = "Generator.continually(???)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = @tailrec def loop(accum: B): Result[B] = f(accum, value) match case Action.Continue(result) => loop(result) case action => action.asResult(accum) loop(initial) class Calculate[A: Show](seed: A, step: A => Action[A]) extends Generator[A] def name: String = show"Generator.calculate(seed = $seed)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = @tailrec def loop(accum: B, prev: A): Result[B] = step(prev) match case Action.Continue(next) => f(accum, next) match case Action.Continue(result) => loop(result, next) case action => action.asResult(accum) case action => action.map(_ => accum).asResult(accum) f(initial, seed) match case Action.Continue(result) => loop(result, seed) case action => action.asResult(initial) class From[A: Show](start: A, step: A)(given N: scala.math.Numeric[A]) extends Generator[A] def name: String = s"Generator.from(start=$start, step=$step)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = @tailrec def loop(accum: B, prev: A): Result[B] = val next = N.plus(prev, step) f(accum, next) match case Action.Continue(result) => loop(result, next) case action => action.asResult(accum) f(initial, start) match case Action.Continue(result) => loop(result, start) case action => action.asResult(initial) class OfResource[A](named: String, aquire: () => A, cleanup: A => Done) extends Generator[A] def name: String = s"Generator.ofResource($named)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = val a = aquire() try f(initial, a).asResult(initial) finally cleanup(a) class Unfold[A: Show,B](seed: A, expand: A => Action[B]) extends Generator[B] def name: String = show"Generator.unfold(seed = $seed)" def foldLeft[C](initial: C, f: (C, B) => Action[C]): Result[C] = @tailrec def loop(accum: C): Result[C] = expand(seed) match case Action.Continue(next) => f(accum, next) match case Action.Continue(result) => loop(accum) case action => action.asResult(accum) case Action.Stop(next) => f(accum, next).asResult(accum) case action => action.map(_ => accum).asResult(accum) loop(initial) class Map[A,B](fab: A => B, base: Generator[A]) extends Generator[B] def name: String = s"${base.show}.map($fab)" def foldLeft[C](initial: C, fcb: (C, B) => Action[C]): Result[C] = base.foldLeft(initial, (accum, value) => fcb(accum, fab(value))) class FlatMap[A,B](fab: A => Generator[B], base: Generator[A]) extends Generator[B] def name: String = s"${base.show}.flatMap($fab)" def foldLeft[C](initial: C, fcb: (C, B) => Action[C]): Result[C] = base.foldLeft( initial, (outerAccum, outerValue) => fab(outerValue).foldLeft( outerAccum, (innerAccum, innerValue) => fcb(innerAccum, innerValue) ).asAction ) class Take[A](amount: Long, base: Generator[A]) extends Generator[A] def name: String = show"$base.take($amount)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = base.foldLeft( (initial, amount), (accumAndCount, value) => { accumAndCount match case result @ (_, 0L) => result.stop case (accum, remaining) => f(accum, value) match case Action.Continue(result) => (result, remaining - 1L).continue case Action.Stop(result) => (result, 0L).stop case Action.Halt() => (accum, 0L).stop case Action.Fail(failure) => Action.Fail(failure.as[(B, Long)]) }).map(_._1) class TakeWhile[A](predicate: A => Boolean, base: Generator[A]) extends Generator[A] def name: String = s"${base.show}.while($predicate)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = base.foldLeft( initial, (accum, value) => { if (predicate(value)) then f(accum, value) else accum.stop }) class TakeUntil[A: Show: Eq](sentinel: A, base: Generator[A]) extends Generator[A] def name: String = show"$base.until($sentinel)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = base.foldLeft( initial, (accum, value) => { if value === sentinel then halt else f(accum, value) }) class Concat[A](first: Generator[A], second: Generator[A]) extends Generator[A] def name: String = show"$first.concat($second)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = first.foldLeft(initial, f) match case f @ Result.Failure(_, _) => f case Result.Success(result) => second.foldLeft(result, f) class DropWhile[A: Show: Eq](sentinel: A, base: Generator[A]) extends Generator[A] def name: String = show"$base.dropWhile($sentinel)" def foldLeft[B](initial: B, f: (B, A) => Action[B]): Result[B] = base.foldLeft[(B, Boolean)]( (initial, false), (_, _) match { case ((accum, false), element) if element === sentinel => (accum, false).continue case ((accum, _), element) => f(accum, element).map(_ -> true) } ).map(_._1)
morgen-peschke/sql2json
src/test/scala/sql2json/testing/Result.scala
<reponame>morgen-peschke/sql2json<filename>src/test/scala/sql2json/testing/Result.scala<gh_stars>0 package sql2json package testing import org.hamcrest.Matcher import org.hamcrest.BaseMatcher import org.hamcrest.Description import scala.reflect.ClassTag import cat.Eq import cat.Eq.given import cat.Show import cat.Show.show /** * An attempt to abstract away some of the JUnit boilerplate. */ enum Result case Passed(description: String) case Failed(description: String) object Result /** * Helper to combine two results */ def both(a: Result, b: Result): Result = (a, b) match case (Passed(aDesc), Passed(bDesc)) => Passed(s"($aDesc) and ($bDesc)") case (Passed(aDesc), Failed(bDesc)) => Failed(s"($aDesc) but ($bDesc)") case (Failed(aDesc), Passed(bDesc)) => Failed(s"($bDesc) but ($aDesc)") case (Failed(aDesc), Failed(bDesc)) => Failed(s"($aDesc) and ($bDesc)") trait ResultOps /** * Compares two values, saving off a representation of this comparison so the [[Matcher]] implementation * doesn't have to duplicate stuff like type checks and calculations for both comparison and description. */ def[A: Show: Eq] (a: A) <-> (b: A): Result = if a === b then Passed(s"${a.show} === ${b.show}") else Failed(s"${a.show} =!= ${b.show}") /** * Add a prefix to the test description. * * Note: does not add a delimiter between the output and description */ def (result: Result) withClue (prefix: String): Result = result match case Passed(desc) => Passed(s"$prefix$desc") case Failed(desc) => Failed(s"$prefix$desc") /** * Alternate form of `withClue`, that's sometimes easier to use */ def (prefix: String) asClue (result: Result): Result = result withClue prefix given syntax: ResultOps /** * Part of the JUnit [[Matcher]] API that I've always found awkward is the need to * either duplicate the calculation of results, or have shared state between * [[Matcher#matches]] and [[Matcher#describeMismatch]]. * * One workaround is to precompute the results and simply asserting that it passed, * and using the description it provides. */ object Passes extends BaseMatcher[Result] override def matches(obj: Any): Boolean = obj match case Result.Passed(_) => true case _ => false override def describeMismatch(obj: Any, descr: Description): Unit = obj match case Result.Passed("") => descr.appendText("passed") case Result.Passed(description) => descr.appendText("passed: ").appendText(description) case Result.Failed("") => descr.appendText("failed") case Result.Failed(msg) => descr.appendText("failed: ").appendText(msg) case _ => descr.appendText("[not a Test: ").appendValue(obj).appendText("]") override def describeTo(description: Description): Unit = description.appendText("test should pass")
morgen-peschke/sql2json
src/test/scala/sql2json/cat/MonadInstancesTest.scala
package sql2json package cat import testing.laws.MonadLaws final class MonadListInstanceTest extends MonadLaws[List, Int, String]
morgen-peschke/sql2json
src/main/scala/sql2json/types/validation/Errors.scala
<reponame>morgen-peschke/sql2json package sql2json package types package validation /** * We'll use [[NonEmptyList]] to aggregate errors, it's pulled * out into it's own file for visibility, as it'll be used by * both [[Validated]] and [[FailFastValidated]] */ type Errors = NonEmptyList[String] object Errors given Convertible[String, Errors] = NonEmptyList.one(_) given[T <: Throwable]: Convertible[T, Errors] = t => NonEmptyList( t.getMessage, (Option(t.getCause).toList ::: t.getSuppressed.toList).map(_.getMessage) )
vamsiampolu/aws-scala-start
src/main/scala/com/lendi/hellolambda/Main.scala
package com.lendi.hellolambda import com.amazonaws.services.lambda.runtime.Context import scala.concurrent.Future import io.circe.generic.auto._ import io.github.yeghishe.lambda._ // handler io.github.yeghishe.MySimpleHander::handler // input "foo" object MySimpleHander extends App { def handler(name: String, context: Context): String = s"Hello $name" } case class Name(val name: String) case class Greeting(val message: String) // handler io.github.yeghishe.MyHandler // input {"name": "Yeghishe"} class MyHandler extends Handler[Name, Greeting] { def handler(name: Name, context: Context): Greeting = { logger.info(s"Name is $name") Greeting(s"Hello ${name.name}") } }
vamsiampolu/aws-scala-start
build.sbt
<reponame>vamsiampolu/aws-scala-start name := "helloLambda" organization := "com.lendi" version := "0.0.1" scalaVersion := "2.12.3" scalacOptions := Seq("-unchecked", "-feature", "-deprecation", "-encoding", "utf8") resolvers += Resolver.jcenterRepo libraryDependencies ++= { val ficusV = "1.4.2" val scalaMockV = "3.6.0" val lambdaUtilsV = "0.0.3" val lambdaEventsV = "2.0.1" Seq( "io.github.yeghishe" %% "scala-aws-lambda-utils" % lambdaUtilsV, "com.iheart" %% "ficus" % ficusV, "com.amazonaws" % "aws-lambda-java-events" % lambdaEventsV, "org.scalamock" %% "scalamock-scalatest-support" % scalaMockV % "it,test" ) } lazy val root = project.in(file(".")).configs(IntegrationTest) Defaults.itSettings coverageEnabled := true initialCommands := """ import io.github.yeghishe._ import io.github.yeghishe.lambda._ import scala.concurrent._ import scala.concurrent.duration._ """.stripMargin assemblyJarName in assembly := s"${name.value}.jar" assemblyMergeStrategy in assembly := { case PathList("META-INF", xs @ _ *) => MergeStrategy.discard case _ => MergeStrategy.first } enablePlugins(S3Plugin) mappings in s3Upload := Seq((file(s"target/scala-2.12/${name.value}.jar"), s"${name.value}.jar")) s3Host in s3Upload := "lambdatest.s3.amazonaws.com" s3Progress in s3Upload := true //s3Upload <<= s3Upload dependsOn assembly //s3Upload := { // assembly.value //}
vamsiampolu/aws-scala-start
src/test/scala/com/lendi/hellolambda/HelloLambdaSpec.scala
package com.lendi.hellolambda import org.scalatest._ class HelloLambdaSpec extends FlatSpec with Matchers { behavior of "MyHandler::handler" it should "return a Greeting" in { val input = Name("Test") val expected = "Hello Test" val greeting = new MyHandler().handler(input,_) println(greeting) } }
vamsiampolu/aws-scala-start
src/main/scala/com/lendi/hellolambda/Config.scala
<reponame>vamsiampolu/aws-scala-start package com.lendi.hellolambda import com.typesafe.config.ConfigFactory import net.ceedubs.ficus.Ficus import net.ceedubs.ficus.readers.ArbitraryTypeReader import net.ceedubs.ficus.readers.namemappers.implicits trait Config { import Ficus._ import ArbitraryTypeReader._ import implicits.hyphenCase private val config = ConfigFactory.load() } object Config extends Config