code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package phase import akka.actor._ import scala.util.control.NonFatal private[phase] class ProgressActor[T](service: BackgroundService[T]) extends Actor { private val serviceActor = context.actorOf(StartingServiceActor.props(service), "background_service") private var state = ProgressState() def receive = { case start: StartService[_] => updateStateHandlingErrors(state.start()) serviceActor ! start case Init(totalPhases) => updateStateHandlingErrors(state.init(totalPhases)) case BeginPhase(details) => updateStateHandlingErrors(state.beginPhase(details)) case EndPhase(details) => updateStateHandlingErrors(state.endPhase(details)) case Finish(result) => updateStateHandlingErrors(state.finish(result)) case Reset => state = ProgressState() case GetStatus => sender ! state.status } private def updateStateHandlingErrors(changeState: => ProgressState) { state = try { changeState } catch { case NonFatal(ex) => state.fail(ex) } } override def supervisorStrategy = { OneForOneStrategy() { case NonFatal(ex) => state = state.fail(ex) SupervisorStrategy.defaultDecider(ex) } } } object ProgressActor { def props[T](service: BackgroundService[T]): Props = Props(new ProgressActor(service)) } private[phase] class StartingServiceActor[T](service: BackgroundService[T]) extends Actor { def receive = { case StartService(request) => service.start(request.asInstanceOf[T], sender()) } } private[phase] object StartingServiceActor { def props[T](service: BackgroundService[T]): Props = Props(new StartingServiceActor(service)) } private[phase] case class StartService[T](request: T) private[phase] case class Init(totalPhases: Seq[PhaseDetails]) private[phase] case class BeginPhase(details: PhaseDetails) private[phase] case class EndPhase(details: PhaseDetails) private[phase] case class Finish(result: Any) case object Reset case object GetStatus trait BackgroundService[T]{ def start(request: T, progressActor: ActorRef): Unit }
arkadius/scala-phases-chain
src/main/scala/phase/ProgressActor.scala
Scala
apache-2.0
2,726
package mesosphere.marathon.tasks import mesosphere.marathon.core.base.ConstantClock import mesosphere.marathon.core.launcher.impl.TaskOpFactoryImpl import mesosphere.marathon.core.launcher.{ TaskOp, TaskOpFactory } import mesosphere.marathon.core.task.{ Task, TaskStateOp } import mesosphere.marathon.core.task.Task.LocalVolumeId import mesosphere.marathon.core.task.state.MarathonTaskStatus import mesosphere.marathon.core.task.tracker.TaskTracker import mesosphere.marathon.state.{ AppDefinition, PathId } import mesosphere.marathon.test.{ MarathonSpec, MarathonTestHelper, Mockito } import mesosphere.marathon.MarathonConf import mesosphere.mesos.protos.Implicits.slaveIDToProto import mesosphere.mesos.protos.SlaveID import org.scalatest.{ GivenWhenThen, Matchers } import scala.collection.immutable.Seq class TaskOpFactoryImplTest extends MarathonSpec with GivenWhenThen with Mockito with Matchers { test("Copy SlaveID from Offer to Task") { val f = new Fixture val offer = MarathonTestHelper.makeBasicOffer() .setHostname("some_host") .setSlaveId(SlaveID("some slave ID")) .build() val app: AppDefinition = AppDefinition(portDefinitions = List()) val runningTasks: Set[Task] = Set( MarathonTestHelper.mininimalTask("some task ID") ) val request = TaskOpFactory.Request(app, offer, runningTasks, additionalLaunches = 1) val inferredTaskOp = f.taskOpFactory.buildTaskOp(request) val expectedTask = Task.LaunchedEphemeral( taskId = inferredTaskOp.fold(Task.Id("failure"))(_.taskId), agentInfo = Task.AgentInfo( host = "some_host", agentId = Some(offer.getSlaveId.getValue), attributes = List.empty ), runSpecVersion = app.version, status = Task.Status( stagedAt = f.clock.now(), taskStatus = MarathonTaskStatus.Created ), hostPorts = Seq.empty ) assert(inferredTaskOp.isDefined, "task op is not empty") assert(inferredTaskOp.get.stateOp == TaskStateOp.LaunchEphemeral(expectedTask)) } test("Normal app -> None (insufficient offer)") { Given("A normal app, an insufficient offer and no tasks") val f = new Fixture val app = f.normalApp val offer = f.insufficientOffer val runningTasks = Nil When("We infer the taskOp") val request = TaskOpFactory.Request(app, offer, runningTasks, additionalLaunches = 1) val taskOp = f.taskOpFactory.buildTaskOp(request) Then("None is returned because there are already 2 launched tasks") taskOp shouldBe empty } test("Normal app -> Launch") { Given("A normal app, a normal offer and no tasks") val f = new Fixture val app = f.normalApp val offer = f.offer val runningTasks = Nil When("We infer the taskOp") val request = TaskOpFactory.Request(app, offer, runningTasks, additionalLaunches = 1) val taskOp = f.taskOpFactory.buildTaskOp(request) Then("A Launch is inferred") taskOp.value shouldBe a[TaskOp.Launch] } test("Resident app -> None (insufficient offer)") { Given("A resident app, an insufficient offer and no tasks") val f = new Fixture val app = f.residentApp val offer = f.insufficientOffer val runningTasks = Nil When("We infer the taskOp") val request = TaskOpFactory.Request(app, offer, runningTasks, additionalLaunches = 1) val taskOp = f.taskOpFactory.buildTaskOp(request) Then("None is returned") taskOp shouldBe empty } test("Resident app -> ReserveAndCreateVolumes fails because of insufficient disk resources") { Given("A resident app, an insufficient offer and no tasks") val f = new Fixture val app = f.residentApp val offer = f.offer val runningTasks = Nil When("We infer the taskOp") val request = TaskOpFactory.Request(app, offer, runningTasks, additionalLaunches = 1) val taskOp = f.taskOpFactory.buildTaskOp(request) Then("A no is returned because there is not enough disk space") taskOp shouldBe None } test("Resident app -> ReserveAndCreateVolumes succeeds") { Given("A resident app, a normal offer and no tasks") val f = new Fixture val app = f.residentApp val offer = f.offerWithSpaceForLocalVolume val runningTasks = Nil When("We infer the taskOp") val request = TaskOpFactory.Request(app, offer, runningTasks, additionalLaunches = 1) val taskOp = f.taskOpFactory.buildTaskOp(request) Then("A ReserveAndCreateVolumes is returned") taskOp.value shouldBe a[TaskOp.ReserveAndCreateVolumes] } test("Resident app -> Launch succeeds") { Given("A resident app, an offer with persistent volumes and a matching task") val f = new Fixture val app = f.residentApp.copy(instances = 2) val localVolumeIdLaunched = LocalVolumeId(app.id, "persistent-volume-launched", "uuidLaunched") val localVolumeIdUnwanted = LocalVolumeId(app.id, "persistent-volume-unwanted", "uuidUnwanted") val localVolumeIdMatch = LocalVolumeId(app.id, "persistent-volume", "uuidMatch") val reservedTask = f.residentReservedTask(app.id, localVolumeIdMatch) val offer = f.offerWithVolumes( reservedTask.taskId.idString, localVolumeIdLaunched, localVolumeIdUnwanted, localVolumeIdMatch ) val runningTasks = Seq( f.residentLaunchedTask(app.id, localVolumeIdLaunched), reservedTask) When("We infer the taskOp") val request = TaskOpFactory.Request(app, offer, runningTasks, additionalLaunches = 1) val taskOp = f.taskOpFactory.buildTaskOp(request) Then("A Launch is returned") taskOp.value shouldBe a[TaskOp.Launch] And("the taskInfo contains the correct persistent volume") import scala.collection.JavaConverters._ val taskInfoResources = taskOp.get.offerOperations.head.getLaunch.getTaskInfos(0).getResourcesList.asScala val found = taskInfoResources.find { resource => resource.hasDisk && resource.getDisk.hasPersistence && resource.getDisk.getPersistence.getId == localVolumeIdMatch.idString } found should not be empty } test("Resident app -> None (enough launched tasks)") { Given("A resident app, a matching offer with persistent volumes but already enough launched tasks") val f = new Fixture val app = f.residentApp val usedVolumeId = LocalVolumeId(app.id, "unwanted-persistent-volume", "uuid1") val offeredVolumeId = LocalVolumeId(app.id, "unwanted-persistent-volume", "uuid2") val runningTasks = Seq(f.residentLaunchedTask(app.id, usedVolumeId)) val offer = f.offerWithVolumes(runningTasks.head.taskId.idString, offeredVolumeId) When("We infer the taskOp") val request = TaskOpFactory.Request(app, offer, runningTasks, additionalLaunches = 1) val taskOp = f.taskOpFactory.buildTaskOp(request) Then("A None is returned because there is already a launched Task") taskOp shouldBe empty } class Fixture { import mesosphere.marathon.test.{ MarathonTestHelper => MTH } val taskTracker = mock[TaskTracker] val config: MarathonConf = MTH.defaultConfig(mesosRole = Some("test")) val clock = ConstantClock() val taskOpFactory: TaskOpFactory = new TaskOpFactoryImpl(config, clock) def normalApp = MTH.makeBasicApp() def residentApp = MTH.appWithPersistentVolume() def normalLaunchedTask(appId: PathId) = MTH.mininimalTask(appId.toString) def residentReservedTask(appId: PathId, volumeIds: LocalVolumeId*) = MTH.residentReservedTask(appId, volumeIds: _*) def residentLaunchedTask(appId: PathId, volumeIds: LocalVolumeId*) = MTH.residentLaunchedTask(appId, volumeIds: _*) def offer = MTH.makeBasicOffer().build() def offerWithSpaceForLocalVolume = MTH.makeBasicOffer(disk = 1025).build() def insufficientOffer = MTH.makeBasicOffer(cpus = 0.01, mem = 1, disk = 0.01, beginPort = 31000, endPort = 31001).build() def offerWithVolumes(taskId: String, localVolumeIds: LocalVolumeId*) = MTH.offerWithVolumes(taskId, localVolumeIds: _*) } }
timcharper/marathon
src/test/scala/mesosphere/marathon/tasks/TaskOpFactoryImplTest.scala
Scala
apache-2.0
8,000
package com.mycompany.scalcium.wordnet import java.io.File import org.junit.Assert import org.junit.Test import net.didion.jwnl.data.POS class WordnetTest { val wn = new Wordnet(new File("src/main/resources/wnconfig.xml")) /** * >>> from nltk.corpus import wordnet as wn * >>> wn.synsets('motorcar') * [Synset('car.n.01')] */ @Test def testSynsets(): Unit = { Console.println(">>> wn.synsets('motorcar')") val motorcar = wn.synsets("motorcar") Console.println(motorcar.map(m => wn.format(m))) Assert.assertNotNull(motorcar) } /** * >>> wn.synset('car.n.01').lemma_names * ['car', 'auto', 'automobile', 'machine', 'motorcar'] */ @Test def testSynsetLemmaNames(): Unit = { Console.println(">>> wn.synset('car.n.01').lemma_names") val lms = wn.lemmaNames(wn.synset("car", POS.NOUN, 1)) Console.println(lms) Assert.assertEquals(5, lms.size) } /** * >>> wn.synset('car.n.01').definition * 'a motor vehicle with four wheels; usually propelled by \\ * an internal combustion engine' */ @Test def testSynsetDefinition(): Unit = { Console.println(">>> wn.synset('car.n.01').definition") val sd = wn.definition(wn.synset("car", POS.NOUN, 1)) Console.println(sd) Assert.assertTrue(sd.contains(";")) } /** * >>> wn.synset('car.n.01').examples * ['he needs a car to get to work'] */ @Test def testSynsetExamples(): Unit = { Console.println(">>> wn.synset('car.n.01').examples") val se = wn.examples(wn.synset("car", POS.NOUN, 1)) Console.println(se) Assert.assertEquals(1, se.size) } /** * >>> wn.synset('car.n.01').lemmas * [Lemma('car.n.01.car'), Lemma('car.n.01.auto'), \\ * Lemma('car.n.01.automobile'),\\ * Lemma('car.n.01.machine'), Lemma('car.n.01.motorcar')] * >>> wn.synset('car.n.01').lemmas[1] * Lemma('car.n.01.auto') */ @Test def testSynsetLemmas(): Unit = { Console.println(">>> wn.synset('car.n.01').lemmas") val sl = wn.lemmas(wn.synset("car", POS.NOUN, 1)) Console.println(sl.map(l => wn.format(l))) Assert.assertEquals(5, sl.size) Assert.assertTrue(sl(1).getLemma().equals("auto")) } /** * >>> wn.lemma('car.n.01.automobile').name * 'automobile' */ @Test def testSynsetLemma(): Unit = { Console.println(">>> wn.lemma('car.n.01.automobile').name") val sl = wn.lemma(wn.synset("car", POS.NOUN, 1), 2) sl match { case Some(x) => { Console.println(x.getLemma()) Assert.assertTrue("automobile".equals(x.getLemma())) } case None => Assert.fail() } } /** * >>> for synset in wn.synsets('car'): * ... print synset.lemma_names * ... * ['car', 'auto', 'automobile', 'machine', 'motorcar'] * ['car', 'railcar', 'railway_car', 'railroad_car'] * ['car', 'gondola'] * ['car', 'elevator_car'] * ['cable_car', 'car'] */ @Test def testSynsetsAndLemmaNames(): Unit = { Console.println(">>> for synset in wn.synsets('car'):") Console.println("... print synset.lemma_names") Console.println("...") val lns = wn.synsets("car") .map(ss => wn.lemmaNames(Some(ss))) lns.foreach(ln => Console.println("[" + ln.mkString(", ") + "]")) Assert.assertEquals(5, lns.size) Assert.assertEquals(5, lns(0).size) } /** * >>> wn.lemmas('car') * [Lemma('car.n.01.car'), Lemma('car.n.02.car'), \\ * Lemma('car.n.03.car'), Lemma('car.n.04.car'), \\ * Lemma('cable_car.n.01.car')] * :NOTE: in NLTK, the third field in Lemma indicates * the (unique) sequence number of the synset from which * the lemma is derived. For example, Lemma('car.n.01.car') * comes from the first synset with word(0) == "car". * JWNL does not capture the information, the index * here means the sequence number of the lemma inside * the synset. */ @Test def testLemmas(): Unit = { Console.println(">>> wn.lemmas('car')") val ls = wn.lemmas("car") Console.println(ls.map(l => wn.format(l))) } /** * >>> motorcar = wn.synset('car.n.01') * >>> types_of_motorcar = motorcar.hyponyms() * >>> types_of_motorcar[26] * Synset('ambulance.n.01') * :NOTE: NLTK's wordnet returns hyponyms in a different * order than JWNL but both return the same number of * synsets. Test is modified accordingly. */ @Test def testHyponyms(): Unit = { Console.println(">>> motorcar = wn.synset('car.n.01')") Console.println(">>> types_of_motorcar = motorcar.hyponyms()") val motorcar = wn.synset("car", POS.NOUN, 1) val typesOfMotorcar = wn.hyponyms(motorcar) Console.println(">>> types_of_motorcar") Console.println(typesOfMotorcar.map(ss => wn.format(ss))) Assert.assertEquals(31, typesOfMotorcar.size) Console.println(">>> types_of_motorcar[0]") val ambulance = typesOfMotorcar(0) Console.println(wn.format(ambulance)) Assert.assertEquals("ambulance.n.01", wn.format(ambulance)) Console.println(">>> sorted([lemma.name for synset\\n" + "... in types_of_motorcar for lemma in synset.lemmas])") val sortedMotorcarNames = typesOfMotorcar .map(ss => wn.lemmaNames(Some(ss))(0)) .sortWith((a,b) => a < b) Console.println(sortedMotorcarNames) Assert.assertEquals("Model_T", sortedMotorcarNames(0)) } /** * >>> motorcar.hypernyms() * [Synset('motor_vehicle.n.01')] */ @Test def testHypernyms(): Unit = { Console.println(">> motorcar.hypernyms") val motorcar = wn.synset("car", POS.NOUN, 1) val parents = wn.hypernyms(motorcar) Console.println(parents.map(p => wn.format(p))) Assert.assertEquals(1, parents.size) Assert.assertEquals("motor_vehicle.n.01", wn.format(parents(0))) } /** * >>> paths = motorcar.hypernym_paths() * >>> len(paths) * 2 * >>> [synset.name for synset in paths[0]] * ['entity.n.01', 'physical_entity.n.01', 'object.n.01', * 'whole.n.02', 'artifact.n.01', 'instrumentality.n.03', * 'container.n.01', 'wheeled_vehicle.n.01', * 'self-propelled_vehicle.n.01', 'motor_vehicle.n.01', * 'car.n.01'] * >>> [synset.name for synset in paths[1]] * ['entity.n.01', 'physical_entity.n.01', 'object.n.01', * 'whole.n.02', 'artifact.n.01', 'instrumentality.n.03', * 'conveyance.n.03', 'vehicle.n.01', * 'wheeled_vehicle.n.01', 'self-propelled_vehicle.n.01', * 'motor_vehicle.n.01', 'car.n.01'] * >>> motorcar.root_hypernyms() * [Synset('entity.n.01')] */ @Test def testHypernymPaths(): Unit = { Console.println(">>> paths = motorcar.hypernym_paths()") Console.println(">>> len(paths)") val motorcar = wn.synset("car", POS.NOUN, 1) val paths = wn.hypernymPaths(motorcar) Console.println(paths.size) Assert.assertEquals(2, paths.size) Console.println(">>> [synset.name for synset in paths[0]]") val paths0 = paths(0).map(ss => wn.format(ss)) Console.println(paths0) Console.println(">>> [synset.name for synset in paths[1]]") val paths1 = paths(1).map(ss => wn.format(ss)) Console.println(paths1) Console.println(">>> motorcar.root_hypernyms()") val rhns = wn.rootHypernyms(motorcar) .map(rhn => wn.format(rhn)) Console.println(rhns) } /** * >>> wn.synset('tree.n.01').part_meronyms() * [Synset('burl.n.02'), Synset('crown.n.07'), * Synset('stump.n.01'), Synset('trunk.n.01'), * Synset('limb.n.02')] * >>> wn.synset('tree.n.01').substance_meronyms() * [Synset('heartwood.n.01'), Synset('sapwood.n.01')] * >>> wn.synset('tree.n.01').member_holonyms() * [Synset('forest.n.01')] */ @Test def testMiscRelationMethods(): Unit = { Console.println(">>> wn.synset('tree.n.01').part_meronyms()") val tree = wn.synset("tree", POS.NOUN, 1) val pn = wn.partMeronyms(tree) Console.println(pn.map(ss => wn.format(ss))) Assert.assertEquals(5, pn.size) Console.println(">>> wn.synset('tree.n.01').substance_meronyms()") val sn = wn.substanceMeronyms(tree) Assert.assertEquals(2, sn.size) Console.println(sn.map(ss => wn.format(ss))) Console.println(">>> wn.synset('tree.n.01').member_holonyms()") val mn = wn.memberHolonyms(tree) Assert.assertEquals(1, mn.size) Console.println(mn.map(ss => wn.format(ss))) } /** * >>> for synset in wn.synsets('mint', wn.NOUN): * ... print synset.name + ':', synset.definition * ... * batch.n.02: (often followed by `of') a large number or amount or extent * mint.n.02: any north temperate plant of the genus Mentha with aromatic leaves and * small mauve flowers * mint.n.03: any member of the mint family of plants * mint.n.04: the leaves of a mint plant used fresh or candied * mint.n.05: a candy that is flavored with a mint oil * mint.n.06: a plant where money is coined by authority of the government * >>> wn.synset('mint.n.04').part_holonyms() * [Synset('mint.n.02')] * >>> [x.definition for x * ... in wn.synset('mint.n.04').part_holonyms()] * ['any north temperate plant of the genus Mentha with * aromatic leaves and small mauve flowers'] * >>> wn.synset('mint.n.04').substance_holonyms() * [Synset('mint.n.05')] * >>> [x.definition for x * ... in wn.synset('mint.n.04').substance_holonyms()] * ['a candy that is flavored with a mint oil'] */ @Test def testListSynsetNameDefinition(): Unit = { val mintss = wn.synsets("mint", POS.NOUN) Assert.assertEquals(6, mintss.size) Console.println(">>> for synset in wn.synsets('mint', wn.NOUN):") Console.println("... print synset.name + ':', synset.definition") Console.println("...") mintss.foreach(ss => Console.println(wn.format(ss) + ": " + wn.definition(Some(ss)))) Console.println(">>> wn.synset('mint.n.04').part_holonyms()") val mint = wn.synset("mint", POS.NOUN, 4) val ph = wn.partHolonyms(mint) Console.println(ph.map(ss => wn.format(ss))) Console.println(">>> [x.definition for x") Console.println("... in wn.synset('mint.n.04').part_holonyms()]") Console.println(ph.map(ss => wn.definition(Some(ss)))) Console.println(">>> wn.synset('mint.n.04').substance_holonyms()") val sh = wn.substanceHolonyms(mint) Console.println(sh.map(ss => wn.format(ss))) Console.println(">>> [x.definition for x") Console.println("... in wn.synset('mint.n.04').substance_holonyms()]") Console.println(sh.map(ss => wn.definition(Some(ss)))) } /** * >>> wn.synset('walk.v.01').entailments() * [Synset('step.v.01')] * >>> wn.synset('eat.v.01').entailments() * [Synset('swallow.v.01'), Synset('chew.v.01')] * >>> wn.synset('tease.v.03').entailments() * [Synset('arouse.v.07'), Synset('disappoint.v.01')] */ @Test def testVerbRelationships(): Unit = { Console.println(">>> wn.synset('walk.v.01').entailments()") val walk = wn.synset("walk", POS.VERB, 1) val walkEnt = wn.entailments(walk) Console.println(walkEnt.map(ss => wn.format(ss))) Assert.assertEquals(1, walkEnt.size) Console.println(">>> wn.synset('eat.v.01').entailments()") val eat = wn.synset("eat", POS.VERB, 1) val eatEnt = wn.entailments(eat) Console.println(eatEnt.map(ss => wn.format(ss))) Assert.assertEquals(2, eatEnt.size) Console.println(">>> wn.synset('tease.v.03').entailments()") val tease = wn.synset("tease", POS.VERB, 3) val teaseEnt = wn.entailments(tease) Console.println(teaseEnt.map(ss => wn.format(ss))) Assert.assertEquals(2, teaseEnt.size) } /** * >>> wn.lemma('supply.n.02.supply').antonyms() * [Lemma('demand.n.02.demand')] * >>> wn.lemma('rush.v.01.rush').antonyms() * [Lemma('linger.v.04.linger')] * >>> wn.lemma('horizontal.a.01.horizontal').antonyms() * [Lemma('vertical.a.01.vertical'), * Lemma('inclined.a.02.inclined')] * >>> wn.lemma('staccato.r.01.staccato').antonyms() * [Lemma('legato.r.01.legato')] */ @Test def testLemmaAntonyms(): Unit = { Console.println(">>> wn.lemma('supply.n.02.supply').antonyms()") val supply = wn.lemma(wn.synset("supply", POS.NOUN, 2), "supply") val supplyAntonyms = wn.antonyms(supply) Console.println(supplyAntonyms.map(w => wn.format(w))) Assert.assertEquals(1, supplyAntonyms.size) Console.println(">>> wn.lemma('rush.v.01.rush').antonyms()") val rush = wn.lemma(wn.synset("rush", POS.VERB, 1), "rush") val rushAntonyms = wn.antonyms(rush) Console.println(rushAntonyms.map(w => wn.format(w))) Assert.assertEquals(1, rushAntonyms.size) Console.println(">>> wn.lemma('horizontal.a.01.horizontal').antonyms()") val horizontal = wn.lemma(wn.synset("horizontal", POS.ADJECTIVE, 1), "horizontal") val horizontalAntonyms = wn.antonyms(horizontal) Console.println(horizontalAntonyms.map(w => wn.format(w))) Assert.assertEquals(2, horizontalAntonyms.size) Console.println(">>> wn.lemma('staccato.r.01.staccato').antonyms()") val staccato = wn.lemma(wn.synset("staccato", POS.ADVERB, 1), "staccato") val staccatoAntonyms = wn.antonyms(staccato) Console.println(staccatoAntonyms.map(w => wn.format(w))) Assert.assertEquals(1, staccatoAntonyms.size) } /** * >>> right = wn.synset('right_whale.n.01') * >>> orca = wn.synset('orca.n.01') * >>> minke = wn.synset('minke_whale.n.01') * >>> tortoise = wn.synset('tortoise.n.01') * >>> novel = wn.synset('novel.n.01') * >>> right.lowest_common_hypernyms(minke) * [Synset('baleen_whale.n.01')] * >>> right.lowest_common_hypernyms(orca) * [Synset('whale.n.02')] * >>> right.lowest_common_hypernyms(tortoise) * [Synset('vertebrate.n.01')] * >>> right.lowest_common_hypernyms(novel) * [Synset('entity.n.01')] */ @Test def testSynsetLowestCommonHypernyms(): Unit = { Console.println(">>> right = wn.synset('right_whale.n.01')") Console.println(">>> orca = wn.synset('orca.n.01')") Console.println(">>> minke = wn.synset('minke_whale.n.01')") Console.println(">>> tortoise = wn.synset('tortoise.n.01')") Console.println(">>> novel = wn.synset('novel.n.01')") val right = wn.synset("right_whale", POS.NOUN, 1) val orca = wn.synset("orca", POS.NOUN, 1) val minke = wn.synset("minke_whale", POS.NOUN, 1) val tortoise = wn.synset("tortoise", POS.NOUN, 1) val novel = wn.synset("novel", POS.NOUN, 1) Console.println(">>> right.lowest_common_hypernyms(minke)") val rightMinkeLCH = wn.lowestCommonHypernym(right, minke) Console.println(rightMinkeLCH.map(ss => wn.format(ss))) Console.println(">>> right.lowest_common_hypernyms(orca)") val rightOrcaLCH = wn.lowestCommonHypernym(right, orca) Console.println(rightOrcaLCH.map(ss => wn.format(ss))) Console.println(">>> right.lowest_common_hypernyms(tortoise)") val rightTortoiseLCH = wn.lowestCommonHypernym(right, tortoise) Console.println(rightTortoiseLCH.map(ss => wn.format(ss))) Console.println(">>> right.lowest_common_hypernyms(novel)") val rightNovelLCH = wn.lowestCommonHypernym(right, novel) Console.println(rightNovelLCH.map(ss => wn.format(ss))) } /** * >>> wn.synset('baleen_whale.n.01').min_depth() * 14 * >>> wn.synset('whale.n.02').min_depth() * 13 * >>> wn.synset('vertebrate.n.01').min_depth() * 8 * >>> wn.synset('entity.n.01').min_depth() * 0 */ @Test def testSynsetMinDepth(): Unit = { Console.println(">>> wn.synset('baleen_whale.n.01').min_depth()") val baleenWhaleMinDepth = wn.minDepth(wn.synset("baleen_whale", POS.NOUN, 1)) Console.println(baleenWhaleMinDepth) Assert.assertEquals(14, baleenWhaleMinDepth) Console.println(">>> wn.synset('whale.n.02').min_depth()") val whaleMinDepth = wn.minDepth(wn.synset("whale", POS.NOUN, 2)) Console.println(whaleMinDepth) Assert.assertEquals(13, whaleMinDepth) Console.println(">>> wn.synset('vertebrate.n.01').min_depth()") val vertebrateMinDepth = wn.minDepth(wn.synset("vertebrate", POS.NOUN, 1)) Console.println(vertebrateMinDepth) Assert.assertEquals(8, vertebrateMinDepth) Console.println(">>> wn.synset('entity.n.01').min_depth()") val entityMinDepth = wn.minDepth(wn.synset("entity", POS.NOUN, 1)) Console.println(entityMinDepth) Assert.assertEquals(0, entityMinDepth) } /** * >>> right.path_similarity(minke) * 0.25 * >>> right.path_similarity(orca) * 0.16666666666666666 * >>> right.path_similarity(tortoise) * 0.076923076923076927 * >>> right.path_similarity(novel) * 0.043478260869565216 */ @Test def testPathSimilarity(): Unit = { val right = wn.synset("right_whale", POS.NOUN, 1) val orca = wn.synset("orca", POS.NOUN, 1) val minke = wn.synset("minke_whale", POS.NOUN, 1) val tortoise = wn.synset("tortoise", POS.NOUN, 1) val novel = wn.synset("novel", POS.NOUN, 1) Console.println(">>> right.path_similarity(minke)") val rightMinkePathSimilarity = wn.pathSimilarity(right, minke) Console.println(rightMinkePathSimilarity) Assert.assertEquals(0.25D, rightMinkePathSimilarity, 0.01D) Console.println(">>> right.path_similarity(orca)") val rightOrcaPathSimilarity = wn.pathSimilarity(right, orca) Console.println(rightOrcaPathSimilarity) Assert.assertEquals(0.1667D, rightOrcaPathSimilarity, 0.01D) Console.println(">>> right.path_similarity(tortoise)") val rightTortoisePathSimilarity = wn.pathSimilarity(right, tortoise) Console.println(rightTortoisePathSimilarity) Assert.assertEquals(0.0769D, rightTortoisePathSimilarity, 0.01D) Console.println(">>> right.path_similarity(novel)") val rightNovelPathSimilarity = wn.pathSimilarity(right, novel) Console.println(rightNovelPathSimilarity) Assert.assertEquals(0.043D, rightNovelPathSimilarity, 0.01D) } /** * >>> dog = wn.synset('dog.n.01') * >>> cat = wn.synset('cat.n.01') * >>> hit = wn.synset('hit.v.01') * >>> slap = wn.synset('slap.v.01') * >>> dog.path_similarity(cat) * 0.2... * >>> hit.path_similarity(slap) * 0.142... * >>> dog.lch_similarity(cat) * 2.028... * >>> hit.lch_similarity(slap) * 1.312... * >>> dog.wup_similarity(cat) * 0.857... * >>> hit.wup_similarity(slap) * 0.25 * >>> dog.res_similarity(cat, semcor_ic) * 7.911... * >>> dog.jcn_similarity(cat, semcor_ic) * 0.449... * >>> dog.lin_similarity(cat, semcor_ic) * 0.886... */ @Test def testOtherSimilarities(): Unit = { Console.println(">>> dog = wn.synset('dog.n.01')") Console.println(">>> cat = wn.synset('cat.n.01')") Console.println(">>> hit = wn.synset('hit.v.01')") Console.println(">>> slap = wn.synset('slap.v.01')") Console.println(">>> car = wn.synset('car.n.01')") Console.println(">>> bus = wn.synset('bus.n.01')") val dog = wn.synset("dog", POS.NOUN, 1) val cat = wn.synset("cat", POS.NOUN, 1) val hit = wn.synset("hit", POS.VERB, 1) val slap = wn.synset("slap", POS.VERB, 1) //val car = wn.synset("car", POS.NOUN, 1) //val bus = wn.synset("bus", POS.NOUN, 1) val car = wn.synset("weak", POS.ADJECTIVE, 1) val bus = wn.synset("physical", POS.ADJECTIVE, 1) Console.println(">>> dog.path_similarity(cat)") val dogCatPathSimilarity = wn.pathSimilarity(dog, cat) Console.println(dogCatPathSimilarity) Console.println(">>> hit.path_similarity(slap)") val hitSlapPathSimilarity = wn.pathSimilarity(hit, slap) Console.println(hitSlapPathSimilarity) Assert.assertEquals(0.2D, dogCatPathSimilarity, 0.01D) Assert.assertEquals(0.1428D, hitSlapPathSimilarity, 0.01D) Console.println(">>> dog.lch_similarity(cat)") val dogCatLchSimilarity = wn.lchSimilarity(dog, cat) Console.println(dogCatLchSimilarity) Console.println(">>> hit.lch_similarity(slap)") val hitSlapLchSimilarity = wn.lchSimilarity(hit, slap) Console.println(hitSlapLchSimilarity) Assert.assertEquals(2.079D, dogCatLchSimilarity, 0.01D) Assert.assertEquals(1.386D, hitSlapLchSimilarity, 0.01D) Console.println(">>> dog.wup_similarity(cat)") val dogCatWupSimilarity = wn.wupSimilarity(dog, cat) Console.println(dogCatWupSimilarity) Console.println(">>> hit.wup_similarity(slap)") val hitSlapWupSimilarity = wn.wupSimilarity(hit, slap) Console.println(hitSlapWupSimilarity) Assert.assertEquals(0.866D, dogCatWupSimilarity, 0.01D) Assert.assertEquals(0.25D, hitSlapWupSimilarity, 0.01D) Console.println(">>> dog.res_similarity(cat)") val dogCatResSimilarity = wn.resSimilarity(dog, cat) Console.println(dogCatResSimilarity) Console.println(">>> hit.res_similarity(slap)") Assert.assertEquals(7.254D, dogCatResSimilarity, 0.01D) Console.println(">>> dog.jcn_similarity(cat)") val dogCatJcnSimilarity = wn.jcnSimilarity(dog, cat) Console.println(dogCatJcnSimilarity) Assert.assertEquals(0.537D, dogCatJcnSimilarity, 0.01D) Console.println(">>> dog.lin_similarity(cat)") val dogCatLinSimilarity = wn.linSimilarity(dog, cat) Console.println(dogCatLinSimilarity) Assert.assertEquals(0.886D, dogCatLinSimilarity, 0.01D) Console.println(">>> car.lesk_similarity(bus)") val carBusLeskSimilarity = wn.leskSimilarity(car, bus) Console.println(carBusLeskSimilarity) Assert.assertEquals(6.0D, carBusLeskSimilarity, 0.01D) } /** * >>> for synset in list(wn.all_synsets('n'))[:10]: * ... print(synset) * ... * Synset('entity.n.01') * Synset('physical_entity.n.01') * Synset('abstraction.n.06') * Synset('thing.n.12') * Synset('object.n.01') * Synset('whole.n.02') * Synset('congener.n.03') * Synset('living_thing.n.01') * Synset('organism.n.01') * Synset('benthos.n.02') * :NOTE: order of synsets returned is different in * JWNL than with NLTK. */ @Test def testAllSynsets(): Unit = { Console.println(">>> for synset in list(wn.all_synsets('n'))[:10]:") Console.println("... print(synset)") Console.println("...") val fss = wn.allSynsets(POS.NOUN) .take(10) .toList fss.foreach(ss => Console.println(wn.format(ss))) Assert.assertEquals(10, fss.size) } /** * >>> print(wn.morphy('denied', wn.VERB)) * deny * >>> print(wn.morphy('dogs')) * dog * >>> print(wn.morphy('churches')) * church * >>> print(wn.morphy('aardwolves')) * aardwolf * >>> print(wn.morphy('abaci')) * abacus * >>> print(wn.morphy('book', wn.NOUN)) * book * >>> wn.morphy('hardrock', wn.ADV) * >>> wn.morphy('book', wn.ADJ) * >>> wn.morphy('his', wn.NOUN) */ @Test def testMorphy(): Unit = { Console.println(">>> print(wn.morphy('denied', wn.VERB))") val denied = wn.morphy("denied", POS.VERB) Console.println(denied) Assert.assertEquals("deny", denied) Console.println(">>> print(wn.morphy('dogs'))") val dogs = wn.morphy("dogs") Console.println(dogs) Assert.assertEquals("dog", dogs) Console.println(">>> print(wn.morphy('churches'))") val churches = wn.morphy("churches") Console.println(churches) Assert.assertEquals("church", churches) Console.println(">>> print(wn.morphy('aardwolves'))") val aardwolves = wn.morphy("aardwolves") Console.println(aardwolves) Assert.assertEquals("aardwolf", aardwolves) Console.println(">>> print(wn.morphy('abaci'))") val abaci = wn.morphy("abaci") Console.println(abaci) Assert.assertEquals("abacus", abaci) Console.println(">>> print(wn.morphy('book', wn.NOUN))") val book = wn.morphy("book", POS.NOUN) Console.println(book) Assert.assertEquals("book", book) Console.println(">>> wn.morphy('hardrock', wn.ADV)") val hardrock = wn.morphy("hardrock", POS.ADVERB) Console.println(hardrock) Assert.assertTrue(hardrock.isEmpty) Console.println(">>> wn.morphy('book', wn.ADJ)") val bookAdj = wn.morphy("book", POS.ADJECTIVE) Console.println(bookAdj) Assert.assertTrue(bookAdj.isEmpty) Console.println(">>> wn.morphy('his', wn.NOUN)") val his = wn.morphy("his", POS.NOUN) Console.println(his) Assert.assertTrue(his.isEmpty) } }
sujitpal/scalcium
src/test/scala/com/mycompany/scalcium/wordnet/WordnetTest.scala
Scala
apache-2.0
24,277
package vggames.scala.specs.string import vggames.scala.specs.GameSpecification import vggames.scala.code.RestrictedFunction1 import vggames.scala.specs.TestRun class ReplaceString extends GameSpecification[RestrictedFunction1[String, String]] { def runSignature = "(a:String):String" def extendsType = "RestrictedFunction1[String, String]" def challenge = """Substitua as ocorrências de <code>"aba"</code> em <code>a</code> por <code>"ebe"</code> """ def run(code : Code, submittedCode : String)(implicit cases : TestRun) = "O seu código" should { """ mudar "abaixa" para "ebeixa" """ in { code("abaixa") must_== "ebeixa" } """ não mudar "elefante" """ in { code("elefante") must_== "elefante" } } }
vidageek/games
games/scala/src/main/scala/vggames/scala/specs/string/ReplaceString.scala
Scala
gpl-3.0
767
package edu.rit.csh.linter import _root_.fastparse.core.Parsed.{Failure, Success} import _root_.fastparse.core.{ParseError, Parser} import edu.rit.csh.scalaLint.parser.{ScalaParser, ScalaLexer} import org.antlr.v4.runtime.ANTLRInputStream import org.antlr.v4.runtime.CommonTokenStream import org.scalatest.FunSuite import edu.rit.csh.linter.language.Literals._ object TestUtils extends FunSuite { def parse[T](str: String, parser: Parser[T], answer: T): Unit = { parser.parse(str) match { case Success(value, index) => assert(value === answer) ; assert(index === str.length) case f : Failure => fail(s"failed to parse `$str`, ${f.msg}") } } def parseError[T](str: String, parser: Parser[T]): Unit = { val result = parser.parse(str) result match { case Success(value, index) => assert(index !== str.length) case f: Failure => intercept[ParseError] { result.get } } } def parse2Error[T](str: String, f: ScalaParser => T): Unit = { val in = new ANTLRInputStream(str) val lexer = new ScalaLexer(in) val tokens = new CommonTokenStream(lexer) val parser = new ScalaParser(tokens) assert(f(parser) === null) } def parse2[T](str: String, expected: T, f: ScalaParser => T): Unit = { val in = new ANTLRInputStream(str) val lexer = new ScalaLexer(in) val tokens = new CommonTokenStream(lexer) val parser = new ScalaParser(tokens) assert(f(parser) === expected) } }
JDrit/ScalaLint
src/test/scala/edu/rit/csh/linter/TestUtils.scala
Scala
apache-2.0
1,475
package org.scalaide.ui.internal.editor.decorators.semantichighlighting import org.eclipse.core.runtime.jobs.Job import org.eclipse.jface.text.IRegion import org.eclipse.jface.text.source.ISourceViewer import org.scalaide.core.internal.decorators.semantichighlighting.PositionsTracker /** This interface expose the minimal amount of functionality needed by the semantic highlighting * component to apply the presentation styles in a text editor. * * @note This trait is needed for running tests in a headless environment. */ private[scalaide] trait TextPresentationHighlighter { def sourceViewer: ISourceViewer def initialize(semanticHighlightingJob: Job, positionsTracker: PositionsTracker): Unit def dispose(): Unit /** Triggers an update of the editor's `TextPresentation` based on the passed `damage` region.*/ def updateTextPresentation(damage: IRegion): Unit }
andrey-ilinykh/scala-ide
org.scala-ide.sdt.core/src/org/scalaide/ui/internal/editor/decorators/semantichighlighting/TextPresentationHighlighter.scala
Scala
bsd-3-clause
887
/** * Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0 * See accompanying LICENSE file. */ package kafka.manager import kafka.manager.ActorModel.BrokerInfo /** * @author hiral */ case class BrokerIdentity(id: String, host: String, port: Int) object BrokerIdentity { import play.api.libs.json._ implicit def from(info: BrokerInfo): BrokerIdentity = { val config = Json.parse(info.config) BrokerIdentity(info.id, (config \\ "host").as[String], (config \\ "port").as[Int]) } }
wking1986/kafka-manager
app/kafka/manager/BrokerIdentity.scala
Scala
apache-2.0
520
package com.oct.sclaav import org.scalatest.{BeforeAndAfter, FunSuite, Matchers} import org.slf4j.LoggerFactory class MainTest extends FunSuite with BeforeAndAfter with Matchers with TestHelpers { val log = LoggerFactory.getLogger(getClass) }
ogeagla/sclaav
src/test/scala/com/oct/sclaav/MainTest.scala
Scala
apache-2.0
249
package test import org.specs2.mutable.Specification class AvroRecordUserDefinedTypesTest extends Specification { "A case class with another record as a field" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest58(AvroRecordTest00(1)) val record2 = AvroRecordTest58(AvroRecordTest00(2)) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with an `Float` field" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest59(AvroRecordTest58(AvroRecordTest00(1))) val record2 = AvroRecordTest59(AvroRecordTest58(AvroRecordTest00(2))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with an `Long` field" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest60(AvroRecordTest00(1), AvroRecordTest58(AvroRecordTest00(2))) val record2 = AvroRecordTest60(AvroRecordTest00(3), AvroRecordTest58(AvroRecordTest00(4))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with a field that is list of a user-defined type" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest61(List(AvroRecordTest00(1), AvroRecordTest00(2))) val record2 = AvroRecordTest61(List(AvroRecordTest00(3), AvroRecordTest00(4))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with a field that is list of a nested user-defined type" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest62(List(AvroRecordTest58(AvroRecordTest00(1)), AvroRecordTest58(AvroRecordTest00(2)))) val record2 = AvroRecordTest62(List(AvroRecordTest58(AvroRecordTest00(3)), AvroRecordTest58(AvroRecordTest00(4)))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } /* //TODO make readable file for this class - not very urgent since this field type is tested in other contexts also "A case class with a field that is list of a nested user-defined type in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest63(List(AvroRecordTest00(1), AvroRecordTest00(2)), List(AvroRecordTest60(AvroRecordTest00(3), AvroRecordTest58(AvroRecordTest00(2))))) val record2 = AvroRecordTest63(List(AvroRecordTest00(3), AvroRecordTest00(2)), List(AvroRecordTest60(AvroRecordTest00(3), AvroRecordTest58(AvroRecordTest00(2))))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } */ "A case class with a field that is list of a nested user-defined type in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest64(Some(AvroRecordTest00(1))) val record2 = AvroRecordTest64(Some(AvroRecordTest00(2))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with a field that is list of a nested user-defined type in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest65(None) val record2 = AvroRecordTest65(None) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with a field that is list of a nested user-defined type in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest66(Some(AvroRecordTest58(AvroRecordTest00(1)))) val record2 = AvroRecordTest66(Some(AvroRecordTest58(AvroRecordTest00(2)))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with a field that is list of a nested user-defined type in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest67(Some(AvroRecordTest00(1)), Some(AvroRecordTest60(AvroRecordTest00(4), AvroRecordTest58(AvroRecordTest00(1))))) val record2 = AvroRecordTest67(Some(AvroRecordTest00(7)), Some(AvroRecordTest60(AvroRecordTest00(8), AvroRecordTest58(AvroRecordTest00(7))))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with a field that is list of a nested user-defined type in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTest68(Some(List(Some(AvroRecordTest00(1)), None)), List(None, Some(List(AvroRecordTest01(1F), AvroRecordTest01(2F))))) val record2 = AvroRecordTest68(Some(List(Some(AvroRecordTest00(3)), None)), List(None, Some(List(AvroRecordTest01(3F), AvroRecordTest01(4F))))) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } "A case class with two differeing Map fields that contain user-defined types" should { "serialize and deserialize correctly" in { val record1 = AvroRecordTestMap12( Map("socialist"->Map("capitalist"->AvroRecordTest00(1))), Map("private"->AvroRecordTest58(AvroRecordTest00(1))) ) val record2 = AvroRecordTestMap12( Map("mixed"->Map("communist"->AvroRecordTest00(2))), Map("public"->AvroRecordTest58(AvroRecordTest00(2))) ) val records = List(record1, record2) TestUtil.verifyWriteAndRead(records) } } }
julianpeeters/avro-scala-macro-annotations
tests/src/test/scala/AvroRecordTests/datatypetests/AvroRecordUserDefinedTypesTest.scala
Scala
apache-2.0
5,531
object Test { def main(args: Array[String]): Unit = { try { println("0") val f = new Foo println("1") println(f.foo) } catch { case e: NotImplementedError => println("???") } assert(!classOf[Foo].getDeclaredFields.exists(_.getName.startsWith("foo")), "field foo not erased") } } class Foo { val foo: Nothing = { println("foo") ??? } }
som-snytt/dotty
tests/run/nothing-val.scala
Scala
apache-2.0
402
package provingground // import scala.swing._ import java.awt.Dimension import akka.actor._ import provingground.TextToInt._ object AckGUI /*extends SimpleSwingApplication*/ { /* val s = new Dimension(1500, 1500) object Ack{ val posInts: Stream[Int] = Stream.from(0) def ack(m: BigInt, n: BigInt): BigInt = { if (m==0) n+1 else if (n==0) ack(m-1, 1) else ack(m-1, ack(m, n-1)) } def ackStreamM(m: Int): Stream[BigInt] = for (n <- posInts) yield ack(m,n) val ackStream = for (m<- posInts) yield ackStreamM(m) def apply(m: Int, n:Int) = ackStream(m)(n) } def ack(n: Int) = n * n val toCompute = new TextArea(10, 10){ charWrap = true } val computeButton = new Button{ text = "Compute" verticalAlignment = Alignment.Top } val computeFrame = new FlowPanel{ contents += computeButton contents += toCompute border = Swing.EmptyBorder(20, 20, 20, 20) } val computeResult = new TextArea(10, 40){ charWrap = true } val leftPanel = new BoxPanel(Orientation.Vertical){ contents += new Label("Enter number to compute Ackerman function") contents += computeFrame contents += new Label("Value") contents += computeResult border = Swing.EmptyBorder(20, 20, 20, 20) } def top = new MainFrame{ title = "Computing the Ackermann function A(m, n) for n=2" contents = new BoxPanel(Orientation.Horizontal){ contents += leftPanel minimumSize = s } } listenTo(computeButton) reactions +={ case swing.event.ButtonClicked(`computeButton`) => computeResult.text = toCompute.text match { case Int(m) if m>=0 => Ack.ack(m, 2).toString case _ => "Ackerman function is defined only for integers" } }*/ }
siddhartha-gadgil/ProvingGround
digressions/src/main/scala/provingground/codeexperiments/AckGUI.scala
Scala
mit
1,720
/* * Copyright (C) 2011 Romain Reuillon * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openmole.plugin.hook.display import org.openmole.core.workflow.data._ import org.openmole.core.workflow.tools._ import org.openmole.core.workflow.mole._ import org.openmole.core.workflow.mole._ import org.openmole.core.workflow.tools.ExpandedString object DisplayHook { def apply(toDisplay: ExpandedString) = new HookBuilder { def toHook = new DisplayHook(toDisplay) with Built } } abstract class DisplayHook(toDisplay: ExpandedString) extends Hook { override def process(context: Context, executionContext: ExecutionContext) = { executionContext.out.println(toDisplay.from(context)) context } }
ISCPIF/PSEExperiments
openmole-src/openmole/plugins/org.openmole.plugin.hook.display/src/main/scala/org/openmole/plugin/hook/display/DisplayHook.scala
Scala
agpl-3.0
1,338
package app.restlike.common case class CliCommand(value: String)
alltonp/reprobate
src/main/scala/app/restlike/common/CliCommand.scala
Scala
apache-2.0
66
package uk.co.morleydev.zander.client.test.unit import org.scalatest.Tag object UnitTag extends Tag("uk.co.morleydev.zander.client.test.unit.UnitTag")
MorleyDev/zander.client
src/test/scala/uk/co/morleydev/zander/client/test/unit/UnitTag.scala
Scala
mit
153
/* * Copyright 2015 LG CNS. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package scouter.server.db.xlog; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.util.Hashtable; import java.util.Properties; import scouter.server.Configure; import scouter.server.db.io.zip.GZipStore; import scouter.util.FileUtil; import scouter.util.IClose; object XLogProfileDataReader { val table = new Hashtable[String, XLogProfileDataReader](); def open(date: String, file: String): XLogProfileDataReader = { table.synchronized { var reader = table.get(file); if (reader != null) { reader.refrence += 1; } else { reader = new XLogProfileDataReader(date, file); table.put(file, reader); } return reader; } } } class XLogProfileDataReader(date: String, file: String) extends IClose { var refrence = 0; val conf = Configure.getInstance(); private var profileFile: RandomAccessFile = null private var gzip = conf.gzip_profile val confFile = new File(file + ".profile.conf"); if (confFile.exists()) { val properties = FileUtil.readProperties(confFile); this.gzip = "true".equalsIgnoreCase(properties.getProperty("gzip_profile", ""+conf.gzip_profile).trim()); } val profile = new File(file + ".profile"); if (profile.canRead() == true) { this.profileFile = new RandomAccessFile(profile, "r"); } def read(pos: Long): Array[Byte] = { if (this.gzip) { return GZipStore.getInstance().read(date, pos); } if (profileFile == null) return null; try { this.synchronized { profileFile.seek(pos); val len = profileFile.readInt(); val buffer = new Array[Byte](len); profileFile.read(buffer); return buffer; } } catch { case e: IOException => throw new RuntimeException(e); } } override def close() { XLogProfileDataReader.table.synchronized { if (this.refrence == 0) { XLogProfileDataReader.table.remove(this.file); try { if (profileFile != null) profileFile.close(); profileFile = null; } catch { case e: Throwable => e.printStackTrace(); } } else { this.refrence -= 1 } } } }
jahnaviancha/scouter
scouter.server/src/scouter/server/db/xlog/XLogProfileDataReader.scala
Scala
apache-2.0
3,281
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.command import scala.language.implicitConversions import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.carbondata.common.logging.LogServiceFactory object Checker { def validateTableExists( dbName: Option[String], tableName: String, session: SparkSession): Unit = { val identifier = TableIdentifier(tableName, dbName) if (!CarbonEnv.getInstance(session).carbonMetastore.tableExists(identifier)(session)) { val err = s"table $dbName.$tableName not found" LogServiceFactory.getLogService(this.getClass.getName).error(err) throw new IllegalArgumentException(err) } } } /** * Interface for command that modifies schema */ trait SchemaProcessCommand { def processSchema(sparkSession: SparkSession): Seq[Row] } /** * Interface for command that need to process data in file system */ trait DataProcessCommand { def processData(sparkSession: SparkSession): Seq[Row] }
HuaweiBigData/carbondata
integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/package.scala
Scala
apache-2.0
1,814
package com.sksamuel.scapegoat.inspections.unnecessary import com.sksamuel.scapegoat.InspectionTest import com.sksamuel.scapegoat.inspections.unneccesary.UnnecessaryConversion /** @author Stephen Samuel */ class UnnecessaryConversionTest extends InspectionTest { override val inspections = Seq(new UnnecessaryConversion) "Unnecessary conversion" - { "should report warning" - { "when invoking toString on a String" in { val code = """object Test { val i = "sam" val j = i.toString }""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 1 } "when invoking toInt on an int" in { val code = """object Test { | val i = 4 | val j = i.toInt |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 1 } "when invoking toInt on an integer literal" in { val code = """object Example extends App { | val a = 3.toInt // NullPointerException here (v1.3.6). | val b = (10 / 5).toInt // NullPointerException here (v1.3.6). |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 2 } "when invoking toLong on a long" in { val code = """object Test { | val i: Long = 436 | val j = i.toLong |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 1 } "when invoking toLong on a Long literal" in { val code = """object Example extends App { | val a = 123456789012L | val b = a.toLong |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 1 } "when invoking toList on a list" in { val code = """object Test { | val list = List(1,2,3) | val something = list.toList |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 1 } "when invoking toSet on a set" in { val code = """object Test { | val set = Set(4,3,6) | val something = set.toSet |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 1 } "when invoking toSeq on a seq" in { val code = """object Test { | val seq = Seq(4,3,6) | val something = seq.toSeq |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 1 } } "should not report warning" - { "when invoking toString on a BigDecimal" in { val code = """object Test { val s = BigDecimal(5) val t = s.toString }""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 0 } "when invoking toInt on a String" in { val code = """object Test { | val s = "5" | val t = s.toInt |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 0 } "when invoking toInt on an Integer" in { val code = """object Test { | def test(i: java.lang.Integer) = { | val t = i.toInt | } |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 0 } "when invoking toLong on a int" in { val code = """object Test { | val i: Int = 436 | val j = i.toLong |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 0 } "when invoking toSeq on a set" in { val code = """object Test { | val set = Set(4,3,6) | val something = set.toSeq |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 0 } "when invoking toSet on a list or seq" in { val code = """object Test { | val list = List(4,3,6) | val something = list.toSet | val seq = Seq(1,3,6) | val thing = seq.toSet |}""".stripMargin compileCodeSnippet(code) compiler.scapegoat.feedback.warnings.size shouldBe 0 } } } }
sksamuel/scalac-scapegoat-plugin
src/test/scala/com/sksamuel/scapegoat/inspections/unnecessary/UnnecessaryConversionTest.scala
Scala
apache-2.0
4,819
package org.hammerlab.guacamole.strings trait TruncatedToString { override def toString: String = truncatedString(Int.MaxValue) /** String representation, truncated to maxLength characters. */ def truncatedString(maxLength: Int = 500): String = TruncatedToString(stringPieces, maxLength) /** * Iterator over string representations of data comprising this object. */ def stringPieces: Iterator[String] } object TruncatedToString { /** * Like Scala's List.mkString method, but supports truncation. * * Return the concatenation of an iterator over strings, separated by separator, truncated to at most maxLength * characters. If truncation occurs, the string is terminated with ellipses. */ def apply(pieces: Iterator[String], maxLength: Int, separator: String = ",", ellipses: String = " [...]"): String = { val builder = StringBuilder.newBuilder var remaining: Int = maxLength while (pieces.hasNext && remaining > ellipses.length) { val string = pieces.next() builder.append(string) if (pieces.hasNext) builder.append(separator) remaining -= string.length + separator.length } if (pieces.hasNext) builder.append(ellipses) builder.result } }
hammerlab/guacamole
src/main/scala/org/hammerlab/guacamole/strings/TruncatedToString.scala
Scala
apache-2.0
1,269
// Databricks notebook source exported at Sun, 28 Aug 2016 15:52:30 UTC // MAGIC %md // MAGIC // MAGIC # [Big Data Analysis for Humanities and Social Sciences](https://www.eventbrite.co.uk/e/big-data-analysis-for-the-humanities-and-social-sciences-tickets-26708754604) // MAGIC // MAGIC ### August 26, 2016, King's Digital Lab, King's College London // MAGIC #### prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) // MAGIC // MAGIC *supported by* [![](https://raw.githubusercontent.com/raazesh-sainudiin/scalable-data-science/master/images/databricks_logoTM_200px.png)](https://databricks.com/) // COMMAND ---------- // MAGIC %md // MAGIC The recorded Uji ![Image of Uji, Dogen's Time-Being](https://raw.githubusercontent.com/raazesh-sainudiin/scalable-data-science/master/images/UjiTimeBeingDogen.png "uji") of an older version of this notebook: // MAGIC // MAGIC [![sds/uji/week2/03_wordcount/006_wordcount](http://img.youtube.com/vi/zgkvusQdNLY/0.jpg)](https://www.youtube.com/v/zgkvusQdNLY?rel=0&autoplay=1&modestbranding=1&start=4613) // COMMAND ---------- // MAGIC %md // MAGIC # Word Count on US State of the Union (SoU) Addresses // MAGIC // MAGIC * Word Count in big data is the equivalent of 'Hello World' in programming // MAGIC * We count the number of occurences of each word in the first and last (2016) SoU addresses. // COMMAND ---------- // MAGIC %md // MAGIC // MAGIC An interesting analysis of the textual content of the *State of the Union (SoU)* addresses by all US presidents was done in: // MAGIC * [Alix Rule, Jean-Philippe Cointet, and Peter S. Bearman, Lexical shifts, substantive changes, and continuity in State of the Union discourse, 1790–2014, PNAS 2015 112 (35) 10837-10844; doi:10.1073/pnas.1512221112](http://www.pnas.org/content/112/35/10837.full). // MAGIC // MAGIC // MAGIC ![](http://www.pnas.org/content/112/35/10837/F5.large.jpg) // MAGIC // MAGIC [Fig. 5](http://www.pnas.org/content/112/35/10837.full). A river network captures the flow across history of US political discourse, as perceived by contemporaries. Time moves along the x axis. Clusters on semantic networks of 300 most frequent terms for each of 10 historical periods are displayed as vertical bars. Relations between clusters of adjacent periods are indexed by gray flows, whose density reflects their degree of connection. Streams that connect at any point in history may be considered to be part of the same system, indicated with a single color. // MAGIC // MAGIC ## Let us investigate this dataset ourselves! // MAGIC 1. We first get the source text data by scraping and parsig from [http://stateoftheunion.onetwothree.net/texts/index.html](http://stateoftheunion.onetwothree.net/texts/index.html) as explained in // MAGIC [scraping and parsing SoU addresses](/#workspace/scalable-data-science/xtraResources/sdsDatasets/scraperUSStateofUnionAddresses). // MAGIC * This data is already made available in DBFS, our distributed file system. // MAGIC * We only do the simplest word count with this data in this notebook and will do more sophisticated analyses in the sequel (including topic modeling, etc). // COMMAND ---------- // MAGIC %md // MAGIC ## Key Data Management Concepts // MAGIC // MAGIC ### The Structure Spectrum // MAGIC // MAGIC **(watch now 1:10)**: // MAGIC // MAGIC [![Structure Spectrum by Anthony Joseph in BerkeleyX/CS100.1x](http://img.youtube.com/vi/pMSGGZVSwqo/0.jpg)](https://www.youtube.com/v/pMSGGZVSwqo?rel=0&autoplay=1&modestbranding=1&start=1&end=70) // MAGIC // MAGIC Here we will be working with **unstructured** or **schema-never** data (plain text files). // MAGIC *** // MAGIC // MAGIC ### Files // MAGIC // MAGIC **(watch later 1:43)**: // MAGIC // MAGIC [![Files by Anthony Joseph in BerkeleyX/CS100.1x](http://img.youtube.com/vi/NJyBQ-cQ3Ac/0.jpg)](https://www.youtube.com/v/NJyBQ-cQ3Ac?rel=0&autoplay=1&modestbranding=1&start=1) // COMMAND ---------- // MAGIC %md // MAGIC ###DBFS and dbutils - where is this dataset in our distributed file system? // MAGIC * Since we are on the databricks cloud, it has a file system called DBFS // MAGIC * DBFS is similar to HDFS, the Hadoop distributed file system // MAGIC * dbutils allows us to interact with dbfs. // MAGIC * The 'display' command displays the list of files in a given directory in the file system. // COMMAND ---------- // MAGIC %md // MAGIC # Let us get our hands dirty with SOUs // COMMAND ---------- // MAGIC %md // MAGIC ### Download and load data into distributed file system // COMMAND ---------- // MAGIC %sh // MAGIC pwd // MAGIC du -sh /databricks/driver # size available in this dir // MAGIC du -sh /tmp # size available in this dir // COMMAND ---------- display(dbutils.fs.ls("dbfs:/datasets")) // COMMAND ---------- dbutils.fs.mkdirs("dbfs:/datasets/sou") // COMMAND ---------- // MAGIC %md // MAGIC Let us just copy the first SoU address by George Washington // COMMAND ---------- import java.net.URL import java.io.File import org.apache.commons.io.FileUtils val localFile = new File("/tmp/17900108.txt") FileUtils.copyURLToFile(new URL("https://dl.dropboxusercontent.com/u/3531607/datasets/StateOfUnionAddressesUSPresidentsUntil2016/sou/17900108.txt"), localFile) dbutils.fs.mv("file:/tmp/17900108.txt", "dbfs:/datasets/sou/") display(dbutils.fs.ls("dbfs:/datasets/sou")) // COMMAND ---------- // MAGIC %md // MAGIC Let us display the *head* or the first few lines of the file `dbfs:/datasets/sou/17900108.txt` to see what it contains using `dbutils.fs.head` method. // MAGIC `head(file: String, maxBytes: int = 65536): String` -> Returns up to the first 'maxBytes' bytes of the given file as a String encoded in UTF-8 // MAGIC as follows: // COMMAND ---------- dbutils.fs.head("dbfs:/datasets/sou/17900108.txt",673) // Cntrl+Enter to get the first 673 bytes of the file (which corresponds to the first five lines) // COMMAND ---------- // MAGIC %md // MAGIC ##### You Try! // MAGIC Modify ``xxxx` in the cell below to read the first 1000 bytes from the file. // COMMAND ---------- dbutils.fs.head("dbfs:/datasets/sou/17900108.txt", xxxx) // Cntrl+Enter to get the first 1000 bytes of the file // COMMAND ---------- // MAGIC %md // MAGIC ### Read the file into Spark Context as an RDD of Strings // MAGIC * The `textFile` method on the available `SparkContext` `sc` can read the text file `dbfs:/datasets/sou/17900108.txt` into Spark and create an RDD of Strings // MAGIC * but this is done lazily until an action is taken on the RDD `sou17900108`! // COMMAND ---------- val sou17900108 = sc.textFile("dbfs:/datasets/sou/17900108.txt") // Cntrl+Enter to read in the textfile as RDD[String] // COMMAND ---------- // MAGIC %md // MAGIC ### Perform some actions on the RDD // MAGIC * Each String in the RDD `sou17900108` represents one line of data from the file and can be made to perform one of the following actions: // MAGIC * count the number of elements in the RDD `sou17900108` (i.e., the number of lines in the text file `dbfs:/datasets/sou/17900108.txt`) using `sou17900108.count()` // MAGIC * display the contents of the RDD using `take` or `collect`. // COMMAND ---------- sou17900108.count() // <Shift+Enter> to count the number of elements in the RDD // COMMAND ---------- sou17900108.take(5) // <Shift+Enter> to display the first 5 elements of RDD // COMMAND ---------- sou17900108.take(5).foreach(println) // <Shift+Enter> to display the first 5 elements of RDD line by line // COMMAND ---------- sou17900108.collect // <Cntrl+Enter> to display all the elements of RDD // COMMAND ---------- // MAGIC %md // MAGIC ### Cache the RDD in (distributed) memory to avoid recreating it for each action // MAGIC * Above, every time we took an action on the same RDD, the RDD was reconstructed from the textfile. // MAGIC * Spark's advantage compared to Hadoop MapReduce is the ability to cache or store the RDD in distributed memory across the nodes. // MAGIC * Let's use `.cache()` after creating an RDD so that it is in memory after the first action (and thus avoid reconstruction for subsequent actions). // MAGIC * count the number of elements in the RDD `sou17900108` (i.e., the number of lines in the text file `dbfs:/datasets/sou/17900108.txt`) using `sou17900108.count()` // MAGIC * display the contents of the RDD using `take` or `collect`. // COMMAND ---------- // Shift+Enter to read in the textfile as RDD[String] and cache it in distributed memory val sou17900108 = sc.textFile("dbfs:/datasets/sou/17900108.txt") sou17900108.cache() // cache the RDD in memory // COMMAND ---------- sou17900108.count() // Shift+Enter during this count action the RDD is constructed from texfile and cached // COMMAND ---------- sou17900108.count() // Shift+Enter during this count action the cached RDD is used (notice less time taken by the same command) // COMMAND ---------- sou17900108.take(5) // <Cntrl+Enter> to display the first 5 elements of the cached RDD // COMMAND ---------- // MAGIC %md // MAGIC #### Lifecycle of a Spark Program // MAGIC // MAGIC **(watch now 0:23)**: // MAGIC // MAGIC [![Spark Program Lifecycle by Anthony Joseph in BerkeleyX/CS100.1x](http://img.youtube.com/vi/HWZUqNYAJj4/0.jpg)](https://www.youtube.com/v/HWZUqNYAJj4?rel=0&autoplay=1&modestbranding=1&start=1) // MAGIC // MAGIC ##### Summary // MAGIC * create RDDs from: // MAGIC * some external data source (such as a distributed file system) // MAGIC * parallelized collection in your driver program // MAGIC * lazily transform these RDDs into new RDDs // MAGIC * cache some of those RDDs for future reuse // MAGIC * you perform actions to execute parallel computation to produce results // COMMAND ---------- // MAGIC %md // MAGIC ### Transform lines to words // MAGIC * We need to loop through each line and split the line into words // MAGIC * For now, let us split using whitespace // MAGIC * More sophisticated regular expressions can be used to split the line (as we will see soon) // COMMAND ---------- sou17900108 .flatMap(line => line.split(" ")) .take(100) // COMMAND ---------- // MAGIC %md // MAGIC ### Naive word count // MAGIC At a first glace, to do a word count of George Washingtons SoU address, we are templed to do the following: // MAGIC * just break each line by the whitespace character " " and find the words using a `flatMap` // MAGIC * then do the `map` with the closure `word => (word, 1)` to initialize each `word` with a integer count of `1` // MAGIC * ie., transform each word to a *(key, value)* pair or `Tuple` such as `(word, 1)` // MAGIC * then count all *value*s with the same *key* (`word` is the Key in our case) by doing a // MAGIC * `reduceByKey(_+_)` // MAGIC * and finally `collect()` to display the results. // COMMAND ---------- sou17900108 .flatMap( line => line.split(" ") ) .map( word => (word, 1) ) .reduceByKey(_+_) .collect() // COMMAND ---------- // MAGIC %md // MAGIC Unfortunately, as you can see from the `collect` above: // MAGIC * the words have punctuations at the end which means that the same words are being counted as different words. Eg: importance // MAGIC * empty words are being counted // MAGIC // MAGIC So we need a bit of `regex`'ing or regular-expression matching (all readily available from Scala via Java String types). // MAGIC // MAGIC We will cover the three things we want to do with a simple example from Middle Earth! // MAGIC * replace all multiple whitespace characters with one white space character " " // MAGIC * replace all punction characters we specify within `[` and `]` such as `[,?.!:;]` by the empty string "" (i.e., remove these punctuation characters) // MAGIC * convert everything to lower-case. // COMMAND ---------- val example = "Master, Master! It's me, Sméagol... mhrhm*%* But they took away our precious, they wronged us. Gollum will protect us..., Master, it's me Sméagol." // COMMAND ---------- example.replaceAll("\\\\s+", " ") //replace multiple whitespace characters (including space, tab, new line, etc.) with one whitespace " " .replaceAll("""([,?.!:;])""", "") // replace the following punctions characters: , ? . ! : ; . with the empty string "" .toLowerCase() // converting to lower-case // COMMAND ---------- // MAGIC %md // MAGIC ### More sophisticated word count // MAGIC We are now ready to do a word count of George Washington's SoU on January 8th 1790 as follows: // COMMAND ---------- val wordCount_sou17900108 = sou17900108 .flatMap(line => line.replaceAll("\\\\s+", " ") //replace multiple whitespace characters (including space, tab, new line, etc.) with one whitespace " " .replaceAll("""([,?.!:;])""", "") // replace the following punctions characters: , ? . ! : ; . with the empty string "" .toLowerCase() // converting to lower-case .split(" ")) .map(x => (x, 1)) .reduceByKey(_+_) wordCount_sou17900108.collect() // COMMAND ---------- val top10 = wordCount_sou17900108.sortBy(_._2, false).collect() // COMMAND ---------- // MAGIC %md // MAGIC ### Doing it all together for George Washington // COMMAND ---------- sc.textFile("dbfs:/datasets/sou/17900108.txt") // George Washington's first SoU //sc.textFile("dbfs:/datasets/sou/20160112.txt") // Barrack Obama's second SoU .flatMap(line => line.replaceAll("\\\\s+", " ") //replace multiple whitespace characters (including space, tab, new line, etc.) with one whitespace " " .replaceAll("""([,?.!:;])""", "") // replace the following punctions characters: , ? . ! : ; . with the empty string "" .toLowerCase() // converting to lower-case .split(" ")) .map(x => (x,1)) .reduceByKey(_+_) .sortBy(_._2, false) .collect() // COMMAND ---------- // MAGIC %md // MAGIC ### A hack to get all the SoUs into databricks CE // MAGIC This is not necessary if you have S3 credentials and just mount your s3 file ssytem here. // COMMAND ---------- dbutils.fs.rm("dbfs:/datasets/sou/17900108.txt") // let' remove the file we added and add all in a loop // COMMAND ---------- import java.net.URL import java.io.File import org.apache.commons.io.FileUtils for (name <- fileNames) { println(name) val localFileName = "/tmp/"+name val localFile = new File(localFileName) val url="https://dl.dropboxusercontent.com/u/3531607/datasets/StateOfUnionAddressesUSPresidentsUntil2016/sou/"+name FileUtils.copyURLToFile(new URL(url), localFile) val fLocalFileName="file:"+localFileName dbutils.fs.mv(fLocalFileName, "dbfs:/datasets/sou/") } display(dbutils.fs.ls("dbfs:/datasets/sou")) // this can take a couple minutes // COMMAND ---------- // MAGIC %md // MAGIC ### Doing it all together for Barrack Obama // COMMAND ---------- sc.textFile("dbfs:/datasets/sou/20160112.txt") // Barrack Obama's second SoU .flatMap(line => line.replaceAll("\\\\s+", " ") //replace multiple whitespace characters (including space, tab, new line, etc.) with one whitespace " " .replaceAll("""([,?.!:;])""", "") // replace the following punctions characters: , ? . ! : ; . with the empty string "" .toLowerCase() // converting to lower-case .split(" ")) .map(x => (x,1)) .reduceByKey(_+_) .sortBy(_._2, false) .collect() // COMMAND ---------- // MAGIC %md // MAGIC ### Reading all SoUs at once using `wholetextFiles` // MAGIC // MAGIC Let us next read all text files (ending with `.txt`) in the directory `dbfs:/datasets/sou/` at once! // MAGIC // MAGIC `SparkContext.wholeTextFiles` lets you read a directory containing multiple small text files, and returns each of them as `(filename, content)` pairs of strings. // MAGIC // MAGIC This is in contrast with `textFile`, which would return one record per line in each file. // COMMAND ---------- val souAll = sc.wholeTextFiles("dbfs:/datasets/sou/*.txt") // Shift+Enter to read all text files in dbfs:/datasets/sou/ souAll.cache() // let's cache this RDD for efficient reuse // COMMAND ---------- souAll.count() // Shift+enter to count the number of entries in RDD[(String,String)] // COMMAND ---------- souAll.count() // Cntrl+Enter to count the number of entries in cached RDD[(String,String)] again (much faster!) // COMMAND ---------- // MAGIC %md // MAGIC Let's examine the first two elements of the RDD `souAll`. // COMMAND ---------- souAll.take(2) // Cntr+Enter to see the first two elements of souAll // COMMAND ---------- // MAGIC %md // MAGIC Clearly, the elements are a pair of Strings, where the first String gives the filename and the second String gives the contents in the file. // MAGIC // MAGIC this can be very helpful to simply loop through the files and take an action, such as counting the number of words per address, as folows: // COMMAND ---------- // this just collects the file names which is the first element of the tuple given by "._1" souAll.map( fileContentsPair => fileContentsPair._1).collect() // COMMAND ---------- // MAGIC %md // MAGIC Let us find the number of words in each of the SoU addresses next (we need to work with Strings inside the closure!). // COMMAND ---------- val wcs = souAll.map( fileContentsPair => { val wc = fileContentsPair._2 .replaceAll("\\\\s+", " ") //replace multiple whitespace characters (including space, tab, new line, etc.) with one whitespace " " .replaceAll("""([,?.!:;])""", "") // replace the following punctions characters: , ? . ! : ; . with the empty string "" .toLowerCase() // converting to lower-case .split(" ") // split each word separated by white space .size // find the length of array wc } ) // COMMAND ---------- wcs.collect() // COMMAND ---------- // MAGIC %md // MAGIC ## HOMEWORK // MAGIC * HOWEWORK WordCount 1: `sortBy` // MAGIC * HOMEWROK WordCount 2: `dbutils.fs` // COMMAND ---------- // MAGIC %md // MAGIC ##### HOMEWORK WordCount 1. `sortBy` // MAGIC // MAGIC Let's understand `sortBy` a bit more carefully. // COMMAND ---------- val example = "Master, Master! It's me, Sméagol... mhrhm*%* But they took away our precious, they wronged us. Gollum will protect us..., Master, it's me Sméagol." // COMMAND ---------- val words = example.replaceAll("\\\\s+", " ") //replace multiple whitespace characters (including space, tab, new line, etc.) with one whitespace " " .replaceAll("""([,?.!:;])""", "") // replace the following punctions characters: , ? . ! : ; . with the empty string "" .toLowerCase() // converting to lower-case .split(" ") // COMMAND ---------- val rddWords = sc.parallelize(words) // COMMAND ---------- rddWords.take(10) // COMMAND ---------- val wordCounts = rddWords .map(x => (x,1)) .reduceByKey(_+_) // COMMAND ---------- val top10 = wordCounts.sortBy(_._2, false).take(10) // COMMAND ---------- // MAGIC %md // MAGIC Make your code easy to read for other developers ;) // MAGIC Use 'case classes' with well defined variable names that everyone can understand // COMMAND ---------- val top10 = wordCounts.sortBy({ case (word, count) => count }, false).take(10) // COMMAND ---------- // MAGIC %md // MAGIC If you just want a total count of all words in the file // COMMAND ---------- rddWords.count // COMMAND ---------- // MAGIC %md // MAGIC ##### HOMEWORK WordCount 2: `dbutils.fs` // MAGIC // MAGIC Have a brief look at what other commands dbutils.fs supports. We will introduce them as needed. // COMMAND ---------- dbutils.fs.help // some of these were used to ETL this data into dbfs:/datasets/sou // COMMAND ---------- // MAGIC %md // MAGIC # To grab all the data // MAGIC #### This is not possible in CE due to file size limits in /tmp unless you use your AWS S3 keys - hence our hack over a loop grabbing one file at a time :) // COMMAND ---------- // MAGIC %sh // MAGIC wget https://dl.dropboxusercontent.com/u/3531607/datasets/StateOfUnionAddressesUSPresidentsUntil2016/sou.tar.gz // COMMAND ---------- // MAGIC %sh // MAGIC # uncompress the sou.tar.gv // MAGIC tar zxvf sou.tar.gz // COMMAND ---------- dbutils.fs.mkdirs("dbfs:/datasets/sou") // make a directory in dbfs:/datasets called sou dbutils.fs.cp("file:/databricks/driver/sou", "dbfs:/datasets/sou/",recurse=true) display(dbutils.fs.ls("dbfs:/datasets/sou")) // Cntrl+Enter to display the files in dbfs:/datasets/sou // COMMAND ---------- // MAGIC %md // MAGIC // MAGIC # [Big Data Analysis for Humanities and Social Sciences](https://www.eventbrite.co.uk/e/big-data-analysis-for-the-humanities-and-social-sciences-tickets-26708754604) // MAGIC // MAGIC ### August 26, 2016, King's Digital Lab, King's College London // MAGIC #### prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) // MAGIC // MAGIC *supported by* [![](https://raw.githubusercontent.com/raazesh-sainudiin/scalable-data-science/master/images/databricks_logoTM_200px.png)](https://databricks.com/)
lamastex/scalable-data-science
db/20160826_KDL_Intro2BDA_HumSocSci/03_WordCount/005_WordCount.scala
Scala
unlicense
21,202
package org.knora.webapi.store.triplestore import akka.actor.Props import akka.testkit.ImplicitSender import com.typesafe.config.ConfigFactory import org.knora.webapi.messages.v1.store.triplestoremessages.{RdfDataObject, ResetTriplestoreContent, ResetTriplestoreContentACK, SparqlUpdateRequest} import org.knora.webapi.{CoreSpec, LiveActorMaker, TriplestoreResponseException} import org.knora.webapi.store._ import scala.concurrent.duration._ /** * Tests the GraphDB triplestore consistency checking rules in webapi/scripts/KnoraRules.pie. */ class GraphDBConsistencyCheckingSpec extends CoreSpec(GraphDBConsistencyCheckingSpec.config) with ImplicitSender { import GraphDBConsistencyCheckingSpec._ val storeManager = system.actorOf(Props(new StoreManager with LiveActorMaker), STORE_MANAGER_ACTOR_NAME) private val timeout = 30.seconds val rdfDataObjects = List( RdfDataObject(path = "_test_data/store.triplestore.GraphDBConsistencyCheckingSpec/incunabula-data.ttl", name = "http://www.knora.org/data/incunabula"), RdfDataObject(path = "_test_data/all_data/anything-data.ttl", name = "http://www.knora.org/data/anything") ) if (settings.triplestoreType.startsWith("graphdb")) { "Load test data" in { storeManager ! ResetTriplestoreContent(rdfDataObjects) expectMsg(300.seconds, ResetTriplestoreContentACK()) } "not create a new resource with a missing property that has owl:cardinality 1" in { storeManager ! SparqlUpdateRequest(missingPartOf) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => (msg.contains(s"$CONSISTENCY_CHECK_ERROR cardinality_1_not_less_any_object") && msg.trim.endsWith("http://data.knora.org/missingPartOf http://www.knora.org/ontology/incunabula#partOf *")) should ===(true) } } "not create a new resource with a missing inherited property that has owl:minCardinality 1" in { storeManager ! SparqlUpdateRequest(missingFileValue) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => (msg.contains(s"$CONSISTENCY_CHECK_ERROR min_cardinality_1_any_object") && msg.trim.endsWith("http://data.knora.org/missingFileValue http://www.knora.org/ontology/knora-base#hasStillImageFileValue *")) should ===(true) } } "not create a new resource with two values for a property that has owl:maxCardinality 1" in { storeManager ! SparqlUpdateRequest(tooManyPublocs) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR max_cardinality_1_with_deletion_flag") should ===(true) } } "not create a new resource with more than one lastModificationDate" in { storeManager ! SparqlUpdateRequest(tooManyLastModificationDates) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR max_cardinality_1_without_deletion_flag") should ===(true) } } "not create a new resource with a property that cannot have a resource as a subject" in { storeManager ! SparqlUpdateRequest(wrongSubjectClass) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR subject_class_constraint") should ===(true) } } "not create a new resource with properties whose objects have the wrong types" in { storeManager ! SparqlUpdateRequest(wrongObjectClass) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR object_class_constraint") should ===(true) } } "not create a new resource with a link to a resource of the wrong class" in { storeManager ! SparqlUpdateRequest(wrongLinkTargetClass) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR object_class_constraint") should ===(true) } } "not create a new resource with a property for which there is no cardinality" in { storeManager ! SparqlUpdateRequest(resourcePropWithNoCardinality) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR resource_prop_cardinality_any") should ===(true) } } "not create a new resource containing a value with a property for which there is no cardinality" in { storeManager ! SparqlUpdateRequest(valuePropWithNoCardinality) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR value_prop_cardinality_any") should ===(true) } } "not create a new resource with two labels" in { storeManager ! SparqlUpdateRequest(twoLabels) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR cardinality_1_not_greater_rdfs_label") should ===(true) } } "not create a LinkValue without permissions" in { storeManager ! SparqlUpdateRequest(linkValueWithoutPermissions) expectMsgPF(timeout) { case akka.actor.Status.Failure(TriplestoreResponseException(msg: String, _)) => msg.contains(s"$CONSISTENCY_CHECK_ERROR cardinality_1_not_less_any_object") should ===(true) } } } else { s"Not running GraphDBConsistencyCheckingSpec with triplestore type ${settings.triplestoreType}" in {} } } object GraphDBConsistencyCheckingSpec { // A string that's found in all consistency check error messages from GraphDB. private val CONSISTENCY_CHECK_ERROR = "Consistency check" private val config = ConfigFactory.parseString( """ # akka.loglevel = "DEBUG" # akka.stdout-loglevel = "DEBUG" """.stripMargin) // Tries to create a new incunabula:page with a missing incunabula:partOf link. private val missingPartOf = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource0 rdf:type ?resourceClass0 ; | knora-base:isDeleted "false"^^xsd:boolean ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label0 ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pagenum | | | ?newValue0_1 rdf:type ?valueType0_1 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0_1 knora-base:valueHasString "recto" . | | | | ?newValue0_1 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0_1 knora-base:valueHasOrder ?nextOrder0_1 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource0 ?property0_1 ?newValue0_1 . | | | | | # Value 2 | # Property: http://www.knora.org/ontology/knora-base#hasStillImageFileValue | | | ?newValue0_2 rdf:type ?valueType0_2 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | ?newValue0_2 knora-base:originalFilename "test.jpg" ; | knora-base:originalMimeType "image/jpeg" ; | knora-base:internalFilename "full.jp2" ; | knora-base:internalMimeType "image/jp2" ; | knora-base:dimX 800 ; | knora-base:dimY 800 ; | knora-base:qualityLevel 100 ; | knora-base:valueHasQname "full" . | | | | ?newValue0_2 knora-base:valueHasString "test.jpg" . | | | ?newValue0_2 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0_2 knora-base:valueHasOrder ?nextOrder0_2 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource0 ?property0_2 ?newValue0_2 . | | | | | # Value 3 | # Property: http://www.knora.org/ontology/knora-base#hasStillImageFileValue | | | ?newValue0_3 rdf:type ?valueType0_3 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | ?newValue0_3 knora-base:originalFilename "test.jpg" ; | knora-base:originalMimeType "image/jpeg" ; | knora-base:internalFilename "thumb.jpg" ; | knora-base:internalMimeType "image/jpeg" ; | knora-base:dimX 80 ; | knora-base:dimY 80 ; | knora-base:qualityLevel 10 ; | knora-base:valueHasQname "thumbnail" . | | | ?newValue0_3 knora-base:isPreview true . | | | ?newValue0_3 knora-base:valueHasString "test.jpg" . | | | | ?newValue0_3 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0_3 knora-base:valueHasOrder ?nextOrder0_3 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource0 ?property0_3 ?newValue0_3 . | | | | | # Value 4 | # Property: http://www.knora.org/ontology/incunabula#hasRightSideband | | | | ?resource0 ?linkProperty0_4 ?linkTarget0_4 . | | | | ?newLinkValue0_4 rdf:type knora-base:LinkValue ; | knora-base:isDeleted "false"^^xsd:boolean ; | rdf:subject ?resource0 ; | rdf:predicate ?linkProperty0_4 ; | rdf:object ?linkTarget0_4 ; | knora-base:valueHasRefCount 1 ; | | knora-base:valueHasOrder ?nextOrder0_4 ; | knora-base:valueCreationDate ?currentTime . | | | ?newLinkValue0_4 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?resource0 ?linkValueProperty0_4 ?newLinkValue0_4 . | | | | | # Value 5 | # Property: http://www.knora.org/ontology/incunabula#origname | | | ?newValue0_5 rdf:type ?valueType0_5 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0_5 knora-base:valueHasString "Blatt" . | | | | | ?newValue0_5 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0_5 knora-base:valueHasOrder ?nextOrder0_5 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource0 ?property0_5 ?newValue0_5 . | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#seqnum | | | ?newValue0_6 rdf:type ?valueType0_6 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0_6 knora-base:valueHasInteger 1 ; | knora-base:valueHasString "1" . | | | ?newValue0_6 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0_6 knora-base:valueHasOrder ?nextOrder0_6 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource0 ?property0_6 ?newValue0_6 . | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/incunabula") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/missingPartOf") AS ?resource0) | BIND(IRI("http://www.knora.org/ontology/incunabula#page") AS ?resourceClass0) | BIND(IRI("http://data.knora.org/users/b83acc5f05") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/77275339") AS ?projectIri) | BIND(str("Test-Page") AS ?label0) | BIND(NOW() AS ?currentTime) | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pagenum | | BIND(IRI("http://www.knora.org/ontology/incunabula#pagenum") AS ?property0_1) | BIND(IRI("http://data.knora.org/missingPartOf/values/nQ3tRObaQWe74WQv2_OdCg") AS ?newValue0_1) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0_1) | | | | ?property0_1 knora-base:objectClassConstraint ?propertyRange0_1 . | ?valueType0_1 rdfs:subClassOf* ?propertyRange0_1 . | | | | ?resourceClass0 rdfs:subClassOf* ?restriction0_1 . | ?restriction0_1 a owl:Restriction . | ?restriction0_1 owl:onProperty ?property0_1 . | | | | | BIND(0 AS ?nextOrder0_1) | | | | | | | # Value 2 | # Property: http://www.knora.org/ontology/knora-base#hasStillImageFileValue | | BIND(IRI("http://www.knora.org/ontology/knora-base#hasStillImageFileValue") AS ?property0_2) | BIND(IRI("http://data.knora.org/missingPartOf/values/GVE754RbT1CykpMnwR3Csw") AS ?newValue0_2) | BIND(IRI("http://www.knora.org/ontology/knora-base#StillImageFileValue") AS ?valueType0_2) | | | | ?property0_2 knora-base:objectClassConstraint ?propertyRange0_2 . | ?valueType0_2 rdfs:subClassOf* ?propertyRange0_2 . | | | | ?resourceClass0 rdfs:subClassOf* ?restriction0_2 . | ?restriction0_2 a owl:Restriction . | ?restriction0_2 owl:onProperty ?property0_2 . | | | | | BIND(0 AS ?nextOrder0_2) | | | | | | | # Value 3 | # Property: http://www.knora.org/ontology/knora-base#hasStillImageFileValue | | BIND(IRI("http://www.knora.org/ontology/knora-base#hasStillImageFileValue") AS ?property0_3) | BIND(IRI("http://data.knora.org/missingPartOf/values/LOT71U6hSQu7shi76oRxWQ") AS ?newValue0_3) | BIND(IRI("http://www.knora.org/ontology/knora-base#StillImageFileValue") AS ?valueType0_3) | | | | ?property0_3 knora-base:objectClassConstraint ?propertyRange0_3 . | ?valueType0_3 rdfs:subClassOf* ?propertyRange0_3 . | | | | ?resourceClass0 rdfs:subClassOf* ?restriction0_3 . | ?restriction0_3 a owl:Restriction . | ?restriction0_3 owl:onProperty ?property0_3 . | | | | | BIND(1 AS ?nextOrder0_3) | | | | | | | # Value 4 | # Property: http://www.knora.org/ontology/incunabula#hasRightSideband | | BIND(IRI("http://www.knora.org/ontology/incunabula#hasRightSideband") AS ?linkProperty0_4) | BIND(IRI("http://www.knora.org/ontology/incunabula#hasRightSidebandValue") AS ?linkValueProperty0_4) | BIND(IRI("http://data.knora.org/missingPartOf/values/i5tE5i-RRLOH631soexPFw") AS ?newLinkValue0_4) | BIND(IRI("http://data.knora.org/482a33d65c36") AS ?linkTarget0_4) | | | | ?linkTarget0_4 rdf:type ?linkTargetClass0_4 . | ?linkTargetClass0_4 rdfs:subClassOf+ knora-base:Resource . | | | | ?linkProperty0_4 knora-base:objectClassConstraint ?expectedTargetClass0_4 . | ?linkTargetClass0_4 rdfs:subClassOf* ?expectedTargetClass0_4 . | | | | MINUS { | ?linkTarget4 knora-base:isDeleted true . | } | | | | ?resourceClass0 rdfs:subClassOf* ?restriction0_4 . | ?restriction0_4 a owl:Restriction . | ?restriction0_4 owl:onProperty ?linkProperty0_4 . | | | | | BIND(0 AS ?nextOrder0_4) | | | | | | | # Value 5 | # Property: http://www.knora.org/ontology/incunabula#origname | | BIND(IRI("http://www.knora.org/ontology/incunabula#origname") AS ?property0_5) | BIND(IRI("http://data.knora.org/missingPartOf/values/MLWWT-F8SlKsZmRo4JMLHw") AS ?newValue0_5) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0_5) | | | | ?property0_5 knora-base:objectClassConstraint ?propertyRange0_5 . | ?valueType0_5 rdfs:subClassOf* ?propertyRange0_5 . | | | | ?resourceClass0 rdfs:subClassOf* ?restriction0_5 . | ?restriction0_5 a owl:Restriction . | ?restriction0_5 owl:onProperty ?property0_5 . | | | | | BIND(0 AS ?nextOrder0_5) | | | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#seqnum | | BIND(IRI("http://www.knora.org/ontology/incunabula#seqnum") AS ?property0_6) | BIND(IRI("http://data.knora.org/missingPartOf/values/uWQtW_X3RxKjFyGrsQwbpQ") AS ?newValue0_6) | BIND(IRI("http://www.knora.org/ontology/knora-base#IntValue") AS ?valueType0_6) | | | | ?property0_6 knora-base:objectClassConstraint ?propertyRange0_6 . | ?valueType0_6 rdfs:subClassOf* ?propertyRange0_6 . | | | | ?resourceClass0 rdfs:subClassOf* ?restriction0_6 . | ?restriction0_6 a owl:Restriction . | ?restriction0_6 owl:onProperty ?property0_6 . | | | | | BIND(0 AS ?nextOrder0_6) | | | | | # Value 7 | # Property: http://www.knora.org/ontology/incunabula#pagenum | | BIND(IRI("http://www.knora.org/ontology/incunabula#pagenum") AS ?property0_7) | BIND(IRI("http://data.knora.org/missingPartOf/values/nQ3tRObaQWe74WQv2_OdCg") AS ?newValue0_7) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0_7) | | | | ?property0_7 knora-base:objectClassConstraint ?propertyRange0_7 . | ?valueType0_7 rdfs:subClassOf* ?propertyRange0_7 . | | | | | | | BIND(0 AS ?nextOrder0_1) | |} """.stripMargin // Tries to create an incunabula:page with a missing file value (the cardinality is inherited). private val missingFileValue = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource rdf:type ?resourceClass ; | knora-base:isDeleted "false"^^xsd:boolean ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#partOf | | | | ?resource ?linkProperty0 ?linkTarget0 . | | | | ?newLinkValue0 rdf:type knora-base:LinkValue ; | knora-base:isDeleted "false"^^xsd:boolean ; | rdf:subject ?resource ; | rdf:predicate ?linkProperty0 ; | rdf:object ?linkTarget0 ; | knora-base:valueHasRefCount 1 ; | | knora-base:valueHasOrder ?nextOrder0 ; | knora-base:valueCreationDate ?currentTime . | | | ?newLinkValue0 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | | ?resource ?linkValueProperty0 ?newLinkValue0 . | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pagenum | | | ?newValue1 rdf:type ?valueType1 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue1 knora-base:valueHasString "recto" . | | | | ?newValue1 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue1 knora-base:valueHasOrder ?nextOrder1 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property1 ?newValue1 . | | | | | # Value 4 | # Property: http://www.knora.org/ontology/incunabula#hasRightSideband | | | | ?resource ?linkProperty4 ?linkTarget4 . | | | | ?newLinkValue4 rdf:type knora-base:LinkValue ; | knora-base:isDeleted "false"^^xsd:boolean ; | rdf:subject ?resource ; | rdf:predicate ?linkProperty4 ; | rdf:object ?linkTarget4 ; | knora-base:valueHasRefCount 1 ; | | knora-base:valueHasOrder ?nextOrder4 ; | knora-base:valueCreationDate ?currentTime . | | | ?newLinkValue4 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | | ?resource ?linkValueProperty4 ?newLinkValue4 . | | | | | # Value 5 | # Property: http://www.knora.org/ontology/incunabula#origname | | | ?newValue5 rdf:type ?valueType5 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue5 knora-base:valueHasString "Blatt" . | | | | ?newValue5 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue5 knora-base:valueHasOrder ?nextOrder5 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property5 ?newValue5 . | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#seqnum | | | ?newValue6 rdf:type ?valueType6 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue6 knora-base:valueHasInteger 1 ; | knora-base:valueHasString "1" . | | | | ?newValue6 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue6 knora-base:valueHasOrder ?nextOrder6 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property6 ?newValue6 . | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/incunabula") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/missingFileValue") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/incunabula#page") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/b83acc5f05") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/77275339") AS ?projectIri) | BIND(str("Test-Page") AS ?label) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#partOf | | BIND(IRI("http://www.knora.org/ontology/incunabula#partOf") AS ?linkProperty0) | BIND(IRI("http://www.knora.org/ontology/incunabula#partOfValue") AS ?linkValueProperty0) | BIND(IRI("http://data.knora.org/missingFileValue/values/RFzfHLk1R-mU66NAFrVTYQ") AS ?newLinkValue0) | BIND(IRI("http://data.knora.org/c5058f3a") AS ?linkTarget0) | | | | ?linkTarget0 rdf:type ?linkTargetClass0 . | ?linkTargetClass0 rdfs:subClassOf+ knora-base:Resource . | | | | ?linkProperty0 knora-base:objectClassConstraint ?expectedTargetClass0 . | ?linkTargetClass0 rdfs:subClassOf* ?expectedTargetClass0 . | | | | MINUS { | ?linkTarget0 knora-base:isDeleted true . | } | | | | ?resourceClass rdfs:subClassOf* ?restriction0 . | ?restriction0 a owl:Restriction . | ?restriction0 owl:onProperty ?linkProperty0 . | | | BIND(0 AS ?nextOrder0) | | | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pagenum | | BIND(IRI("http://www.knora.org/ontology/incunabula#pagenum") AS ?property1) | BIND(IRI("http://data.knora.org/missingFileValue/values/nQ3tRObaQWe74WQv2_OdCg") AS ?newValue1) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType1) | | | | ?property1 knora-base:objectClassConstraint ?propertyRange1 . | ?valueType1 rdfs:subClassOf* ?propertyRange1 . | | | | ?resourceClass rdfs:subClassOf* ?restriction1 . | ?restriction1 a owl:Restriction . | ?restriction1 owl:onProperty ?property1 . | | | BIND(0 AS ?nextOrder1) | | | | | | | # Value 4 | # Property: http://www.knora.org/ontology/incunabula#hasRightSideband | | BIND(IRI("http://www.knora.org/ontology/incunabula#hasRightSideband") AS ?linkProperty4) | BIND(IRI("http://www.knora.org/ontology/incunabula#hasRightSidebandValue") AS ?linkValueProperty4) | BIND(IRI("http://data.knora.org/missingFileValue/values/i5tE5i-RRLOH631soexPFw") AS ?newLinkValue4) | BIND(IRI("http://data.knora.org/482a33d65c36") AS ?linkTarget4) | | | | ?linkTarget4 rdf:type ?linkTargetClass4 . | ?linkTargetClass4 rdfs:subClassOf+ knora-base:Resource . | | | | ?linkProperty4 knora-base:objectClassConstraint ?expectedTargetClass4 . | ?linkTargetClass4 rdfs:subClassOf* ?expectedTargetClass4 . | | | | MINUS { | ?linkTarget4 knora-base:isDeleted true . | } | | | | ?resourceClass rdfs:subClassOf* ?restriction4 . | ?restriction4 a owl:Restriction . | ?restriction4 owl:onProperty ?linkProperty4 . | | | | BIND(0 AS ?nextOrder4) | | | | | | | # Value 5 | # Property: http://www.knora.org/ontology/incunabula#origname | | BIND(IRI("http://www.knora.org/ontology/incunabula#origname") AS ?property5) | BIND(IRI("http://data.knora.org/missingFileValue/values/MLWWT-F8SlKsZmRo4JMLHw") AS ?newValue5) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType5) | | | | ?property5 knora-base:objectClassConstraint ?propertyRange5 . | ?valueType5 rdfs:subClassOf* ?propertyRange5 . | | | | ?resourceClass rdfs:subClassOf* ?restriction5 . | ?restriction5 a owl:Restriction . | ?restriction5 owl:onProperty ?property5 . | | | BIND(0 AS ?nextOrder5) | | | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#seqnum | | BIND(IRI("http://www.knora.org/ontology/incunabula#seqnum") AS ?property6) | BIND(IRI("http://data.knora.org/missingFileValue/values/uWQtW_X3RxKjFyGrsQwbpQ") AS ?newValue6) | BIND(IRI("http://www.knora.org/ontology/knora-base#IntValue") AS ?valueType6) | | | | ?property6 knora-base:objectClassConstraint ?propertyRange6 . | ?valueType6 rdfs:subClassOf* ?propertyRange6 . | | | | ?resourceClass rdfs:subClassOf* ?restriction6 . | ?restriction6 a owl:Restriction . | ?restriction6 owl:onProperty ?property6 . | | | | BIND(0 AS ?nextOrder6) | | | |} """.stripMargin // Tries to create an incunabula:book with two incunabula:publoc values (at most one is allowed). private val tooManyPublocs = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource rdf:type ?resourceClass ; | knora-base:isDeleted "false"^^xsd:boolean ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | | ?newValue0 rdf:type ?valueType0 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0 knora-base:valueHasString "A beautiful book" . | | | | ?newValue0 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0 knora-base:valueHasOrder ?nextOrder0 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property0 ?newValue0 . | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | | ?newValue1 rdf:type ?valueType1 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue1 knora-base:valueHasStartJDN 2457360 ; | knora-base:valueHasEndJDN 2457360 ; | knora-base:valueHasStartPrecision "DAY" ; | knora-base:valueHasEndPrecision "DAY" ; | knora-base:valueHasCalendar "GREGORIAN" ; | knora-base:valueHasString "2015-12-03" . | | | | ?newValue1 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue1 knora-base:valueHasOrder ?nextOrder1 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property1 ?newValue1 . | | | | | # Value 2 | # Property: http://www.knora.org/ontology/incunabula#citation | | | ?newValue2 rdf:type ?valueType2 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue2 knora-base:valueHasString "noch ein letztes" . | | | | | ?newValue2 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue2 knora-base:valueHasOrder ?nextOrder2 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property2 ?newValue2 . | | | | | # Value 3 | # Property: http://www.knora.org/ontology/incunabula#citation | | | ?newValue3 rdf:type ?valueType3 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue3 knora-base:valueHasString "ein Zitat" . | | | ?newValue3 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue3 knora-base:valueHasOrder ?nextOrder3 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property3 ?newValue3 . | | | | | # Value 4 | # Property: http://www.knora.org/ontology/incunabula#citation | | | ?newValue4 rdf:type ?valueType4 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue4 knora-base:valueHasString "und noch eines" . | | | | ?newValue4 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue4 knora-base:valueHasOrder ?nextOrder4 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property4 ?newValue4 . | | | | | # Value 5 | # Property: http://www.knora.org/ontology/incunabula#citation | | | ?newValue5 rdf:type ?valueType5 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue5 knora-base:valueHasString "This citation refers to another resource" . | | | | | ?newValue5 knora-base:valueHasStandoff | [ | | | rdf:type knora-base:StandoffVisualAttribute ; | knora-base:standoffHasAttribute "bold" ; | | | knora-base:standoffHasStart 5 ; | knora-base:standoffHasEnd 13 | ] . | | ?newValue5 knora-base:valueHasStandoff | [ | | | rdf:type knora-base:StandoffLink ; | knora-base:standoffHasAttribute "_link" ; | knora-base:standoffHasLink <http://data.knora.org/c5058f3a> ; | | | knora-base:standoffHasStart 32 ; | knora-base:standoffHasEnd 40 | ] . | | | | ?newValue5 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue5 knora-base:valueHasOrder ?nextOrder5 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property5 ?newValue5 . | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | | ?newValue6 rdf:type ?valueType6 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue6 knora-base:valueHasString "Entenhausen" . | | | | | ?newValue6 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue6 knora-base:valueHasOrder ?nextOrder6 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property6 ?newValue6 . | | | | | # Value 7 | # Property: http://www.knora.org/ontology/incunabula#publoc | | | ?newValue7 rdf:type ?valueType7 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue7 knora-base:valueHasString "Bebenhausen" . | | | | ?newValue7 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue7 knora-base:valueHasOrder ?nextOrder7 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property7 ?newValue7 . | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/incunabula") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/tooManyPublocs") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/incunabula#book") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/b83acc5f05") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/77275339") AS ?projectIri) | BIND(str("Test-Book") AS ?label) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | BIND(IRI("http://www.knora.org/ontology/incunabula#title") AS ?property0) | BIND(IRI("http://data.knora.org/tooManyPublocs/values/IKVNJVSWTryEtK4i9OCSIQ") AS ?newValue0) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0) | | | | ?property0 knora-base:objectClassConstraint ?propertyRange0 . | ?valueType0 rdfs:subClassOf* ?propertyRange0 . | | | | ?resourceClass rdfs:subClassOf* ?restriction0 . | ?restriction0 a owl:Restriction . | ?restriction0 owl:onProperty ?property0 . | | | | | BIND(0 AS ?nextOrder0) | | | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | BIND(IRI("http://www.knora.org/ontology/incunabula#pubdate") AS ?property1) | BIND(IRI("http://data.knora.org/tooManyPublocs/values/L4YSL2SeSkKVt-J9OQAMog") AS ?newValue1) | BIND(IRI("http://www.knora.org/ontology/knora-base#DateValue") AS ?valueType1) | | | | ?property1 knora-base:objectClassConstraint ?propertyRange1 . | ?valueType1 rdfs:subClassOf* ?propertyRange1 . | | | | ?resourceClass rdfs:subClassOf* ?restriction1 . | ?restriction1 a owl:Restriction . | ?restriction1 owl:onProperty ?property1 . | | | | BIND(0 AS ?nextOrder1) | | | | | | | # Value 2 | # Property: http://www.knora.org/ontology/incunabula#citation | | BIND(IRI("http://www.knora.org/ontology/incunabula#citation") AS ?property2) | BIND(IRI("http://data.knora.org/tooManyPublocs/values/oTvvcMRgR_CC-Os-61I-Qw") AS ?newValue2) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType2) | | | | ?property2 knora-base:objectClassConstraint ?propertyRange2 . | ?valueType2 rdfs:subClassOf* ?propertyRange2 . | | | | ?resourceClass rdfs:subClassOf* ?restriction2 . | ?restriction2 a owl:Restriction . | ?restriction2 owl:onProperty ?property2 . | | | | BIND(0 AS ?nextOrder2) | | | | | | | # Value 3 | # Property: http://www.knora.org/ontology/incunabula#citation | | BIND(IRI("http://www.knora.org/ontology/incunabula#citation") AS ?property3) | BIND(IRI("http://data.knora.org/tooManyPublocs/values/Jvcncu3iSr2_fWdWdOfn-w") AS ?newValue3) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType3) | | | | ?property3 knora-base:objectClassConstraint ?propertyRange3 . | ?valueType3 rdfs:subClassOf* ?propertyRange3 . | | | | ?resourceClass rdfs:subClassOf* ?restriction3 . | ?restriction3 a owl:Restriction . | ?restriction3 owl:onProperty ?property3 . | | | | BIND(1 AS ?nextOrder3) | | | | | | | # Value 4 | # Property: http://www.knora.org/ontology/incunabula#citation | | BIND(IRI("http://www.knora.org/ontology/incunabula#citation") AS ?property4) | BIND(IRI("http://data.knora.org/tooManyPublocs/values/7wJJcQLtS2mG_tyPKCe1Ig") AS ?newValue4) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType4) | | | | ?property4 knora-base:objectClassConstraint ?propertyRange4 . | ?valueType4 rdfs:subClassOf* ?propertyRange4 . | | | | ?resourceClass rdfs:subClassOf* ?restriction4 . | ?restriction4 a owl:Restriction . | ?restriction4 owl:onProperty ?property4 . | | | BIND(2 AS ?nextOrder4) | | | | | | | # Value 5 | # Property: http://www.knora.org/ontology/incunabula#citation | | BIND(IRI("http://www.knora.org/ontology/incunabula#citation") AS ?property5) | BIND(IRI("http://data.knora.org/tooManyPublocs/values/y7zDf5oNSE6-9GNNgXSbwA") AS ?newValue5) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType5) | | | | ?property5 knora-base:objectClassConstraint ?propertyRange5 . | ?valueType5 rdfs:subClassOf* ?propertyRange5 . | | | | ?resourceClass rdfs:subClassOf* ?restriction5 . | ?restriction5 a owl:Restriction . | ?restriction5 owl:onProperty ?property5 . | | | | BIND(3 AS ?nextOrder5) | | | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | BIND(IRI("http://www.knora.org/ontology/incunabula#publoc") AS ?property6) | BIND(IRI("http://data.knora.org/tooManyPublocs/values/1ryBgY4MSn2Y8K8QAPiJBw0") AS ?newValue6) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType6) | | | | ?property6 knora-base:objectClassConstraint ?propertyRange6 . | ?valueType6 rdfs:subClassOf* ?propertyRange6 . | | | | ?resourceClass rdfs:subClassOf* ?restriction6 . | ?restriction6 a owl:Restriction . | ?restriction6 owl:onProperty ?property6 . | | | | BIND(0 AS ?nextOrder6) | | | # Value 7 | # Property: http://www.knora.org/ontology/incunabula#publoc | | BIND(IRI("http://www.knora.org/ontology/incunabula#publoc") AS ?property7) | BIND(IRI("http://data.knora.org/tooManyPublocs/values/1ryBgY4MSn2Y8K8QAPiJBw1") AS ?newValue7) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType7) | | | | ?property7 knora-base:objectClassConstraint ?propertyRange7 . | ?valueType7 rdfs:subClassOf* ?propertyRange7 . | | | | ?resourceClass rdfs:subClassOf* ?restriction7 . | ?restriction7 a owl:Restriction . | ?restriction7 owl:onProperty ?property7 . | | | | BIND(1 AS ?nextOrder7) |} """.stripMargin private val tooManyLastModificationDates = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource rdf:type ?resourceClass ; | knora-base:isDeleted "false"^^xsd:boolean ; | knora-base:lastModificationDate "2016-01-23T11:31:24Z"^^xsd:dateTimeStamp ; | knora-base:lastModificationDate ?currentTime ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | | ?newValue0 rdf:type ?valueType0 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0 knora-base:valueHasString "A beautiful book" . | | | | ?newValue0 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0 knora-base:valueHasOrder ?nextOrder0 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property0 ?newValue0 . | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | | ?newValue1 rdf:type ?valueType1 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue1 knora-base:valueHasStartJDN 2457360 ; | knora-base:valueHasEndJDN 2457360 ; | knora-base:valueHasStartPrecision "DAY" ; | knora-base:valueHasEndPrecision "DAY" ; | knora-base:valueHasCalendar "GREGORIAN" ; | knora-base:valueHasString "2015-12-03" . | | | | | ?newValue1 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue1 knora-base:valueHasOrder ?nextOrder1 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property1 ?newValue1 . | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | | ?newValue6 rdf:type ?valueType6 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue6 knora-base:valueHasString "Entenhausen" . | | | | | | ?newValue6 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue6 knora-base:valueHasOrder ?nextOrder6 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property6 ?newValue6 . | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/incunabula") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/tooManyLastModificationDates") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/incunabula#book") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/b83acc5f05") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/77275339") AS ?projectIri) | BIND(str("Test-Book") AS ?label) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | BIND(IRI("http://www.knora.org/ontology/incunabula#title") AS ?property0) | BIND(IRI("http://data.knora.org/tooManyLastModificationDates/values/IKVNJVSWTryEtK4i9OCSIQ") AS ?newValue0) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0) | | | | ?property0 knora-base:objectClassConstraint ?propertyRange0 . | ?valueType0 rdfs:subClassOf* ?propertyRange0 . | | | | ?resourceClass rdfs:subClassOf* ?restriction0 . | ?restriction0 a owl:Restriction . | ?restriction0 owl:onProperty ?property0 . | | | | BIND(0 AS ?nextOrder0) | | | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | BIND(IRI("http://www.knora.org/ontology/incunabula#pubdate") AS ?property1) | BIND(IRI("http://data.knora.org/tooManyLastModificationDates/values/L4YSL2SeSkKVt-J9OQAMog") AS ?newValue1) | BIND(IRI("http://www.knora.org/ontology/knora-base#DateValue") AS ?valueType1) | | | | ?property1 knora-base:objectClassConstraint ?propertyRange1 . | ?valueType1 rdfs:subClassOf* ?propertyRange1 . | | | | ?resourceClass rdfs:subClassOf* ?restriction1 . | ?restriction1 a owl:Restriction . | ?restriction1 owl:onProperty ?property1 . | | | | BIND(0 AS ?nextOrder1) | | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | BIND(IRI("http://www.knora.org/ontology/incunabula#publoc") AS ?property6) | BIND(IRI("http://data.knora.org/tooManyLastModificationDates/values/1ryBgY4MSn2Y8K8QAPiJBw") AS ?newValue6) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType6) | | | | ?property6 knora-base:objectClassConstraint ?propertyRange6 . | ?valueType6 rdfs:subClassOf* ?propertyRange6 . | | | | ?resourceClass rdfs:subClassOf* ?restriction6 . | ?restriction6 a owl:Restriction . | ?restriction6 owl:onProperty ?property6 . | | | | BIND(0 AS ?nextOrder6) | | | |} """.stripMargin private val wrongSubjectClass = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource rdf:type ?resourceClass ; | knora-base:valueHasString "A resource is not allowed to have a valueHasString property" ; | knora-base:isDeleted "false"^^xsd:boolean ; | knora-base:lastModificationDate ?currentTime ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | | ?newValue0 rdf:type ?valueType0 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0 knora-base:valueHasString "A beautiful book" . | | | | | ?newValue0 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0 knora-base:valueHasOrder ?nextOrder0 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property0 ?newValue0 . | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | | ?newValue1 rdf:type ?valueType1 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue1 knora-base:valueHasStartJDN 2457360 ; | knora-base:valueHasEndJDN 2457360 ; | knora-base:valueHasStartPrecision "DAY" ; | knora-base:valueHasEndPrecision "DAY" ; | knora-base:valueHasCalendar "GREGORIAN" ; | knora-base:valueHasString "2015-12-03" . | | | | ?newValue1 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue1 knora-base:valueHasOrder ?nextOrder1 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property1 ?newValue1 . | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | | ?newValue6 rdf:type ?valueType6 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue6 knora-base:valueHasString "Entenhausen" . | | | | | ?newValue6 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue6 knora-base:valueHasOrder ?nextOrder6 ; | knora-base:valueCreationDate ?currentTime . | | | ?resource ?property6 ?newValue6 . | | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/incunabula") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/wrongSubjectClass") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/incunabula#book") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/b83acc5f05") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/77275339") AS ?projectIri) | BIND(str("Test-Book") AS ?label) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | BIND(IRI("http://www.knora.org/ontology/incunabula#title") AS ?property0) | BIND(IRI("http://data.knora.org/wrongSubjectClass/values/IKVNJVSWTryEtK4i9OCSIQ") AS ?newValue0) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0) | | | | ?property0 knora-base:objectClassConstraint ?propertyRange0 . | ?valueType0 rdfs:subClassOf* ?propertyRange0 . | | | | ?resourceClass rdfs:subClassOf* ?restriction0 . | ?restriction0 a owl:Restriction . | ?restriction0 owl:onProperty ?property0 . | | | | BIND(0 AS ?nextOrder0) | | | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | BIND(IRI("http://www.knora.org/ontology/incunabula#pubdate") AS ?property1) | BIND(IRI("http://data.knora.org/wrongSubjectClass/values/L4YSL2SeSkKVt-J9OQAMog") AS ?newValue1) | BIND(IRI("http://www.knora.org/ontology/knora-base#DateValue") AS ?valueType1) | | | | ?property1 knora-base:objectClassConstraint ?propertyRange1 . | ?valueType1 rdfs:subClassOf* ?propertyRange1 . | | | | ?resourceClass rdfs:subClassOf* ?restriction1 . | ?restriction1 a owl:Restriction . | ?restriction1 owl:onProperty ?property1 . | | | | BIND(0 AS ?nextOrder1) | | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | BIND(IRI("http://www.knora.org/ontology/incunabula#publoc") AS ?property6) | BIND(IRI("http://data.knora.org/wrongSubjectClass/values/1ryBgY4MSn2Y8K8QAPiJBw") AS ?newValue6) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType6) | | | | ?property6 knora-base:objectClassConstraint ?propertyRange6 . | ?valueType6 rdfs:subClassOf* ?propertyRange6 . | | | | ?resourceClass rdfs:subClassOf* ?restriction6 . | ?restriction6 a owl:Restriction . | ?restriction6 owl:onProperty ?property6 . | | | | BIND(0 AS ?nextOrder6) | | | |} """.stripMargin private val wrongObjectClass = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource rdf:type ?resourceClass ; | knora-base:isDeleted "false"^^xsd:boolean ; | knora-base:lastModificationDate ?currentTime ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | | ?newValue0 rdf:type ?valueType0 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0 knora-base:valueHasString "A beautiful book" . | | | | | ?newValue0 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0 knora-base:valueHasOrder ?nextOrder0 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property1 ?newValue0 . # ?property0 and ?property1 are reversed to cause an error. | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | | ?newValue1 rdf:type ?valueType1 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue1 knora-base:valueHasStartJDN 2457360 ; | knora-base:valueHasEndJDN 2457360 ; | knora-base:valueHasStartPrecision "DAY" ; | knora-base:valueHasEndPrecision "DAY" ; | knora-base:valueHasCalendar "GREGORIAN" ; | knora-base:valueHasString "2015-12-03" . | | ?newValue1 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue1 knora-base:valueHasOrder ?nextOrder1 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property0 ?newValue1 . # ?property0 and ?property1 are reversed to cause an error. | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | | ?newValue6 rdf:type ?valueType6 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue6 knora-base:valueHasString "Entenhausen" . | | | | ?newValue6 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue6 knora-base:valueHasOrder ?nextOrder6 ; | knora-base:valueCreationDate ?currentTime . | | | ?resource ?property6 ?newValue6 . | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/incunabula") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/wrongObjectClass") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/incunabula#book") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/b83acc5f05") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/77275339") AS ?projectIri) | BIND(str("Test-Book") AS ?label) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | BIND(IRI("http://www.knora.org/ontology/incunabula#title") AS ?property0) | BIND(IRI("http://data.knora.org/wrongObjectClass/values/IKVNJVSWTryEtK4i9OCSIQ") AS ?newValue0) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0) | | | | ?property0 knora-base:objectClassConstraint ?propertyRange0 . | ?valueType0 rdfs:subClassOf* ?propertyRange0 . | | | | ?resourceClass rdfs:subClassOf* ?restriction0 . | ?restriction0 a owl:Restriction . | ?restriction0 owl:onProperty ?property0 . | | | | BIND(0 AS ?nextOrder0) | | | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | BIND(IRI("http://www.knora.org/ontology/incunabula#pubdate") AS ?property1) | BIND(IRI("http://data.knora.org/wrongObjectClass/values/L4YSL2SeSkKVt-J9OQAMog") AS ?newValue1) | BIND(IRI("http://www.knora.org/ontology/knora-base#DateValue") AS ?valueType1) | | | | ?property1 knora-base:objectClassConstraint ?propertyRange1 . | ?valueType1 rdfs:subClassOf* ?propertyRange1 . | | | | ?resourceClass rdfs:subClassOf* ?restriction1 . | ?restriction1 a owl:Restriction . | ?restriction1 owl:onProperty ?property1 . | | | | BIND(0 AS ?nextOrder1) | | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | BIND(IRI("http://www.knora.org/ontology/incunabula#publoc") AS ?property6) | BIND(IRI("http://data.knora.org/wrongObjectClass/values/1ryBgY4MSn2Y8K8QAPiJBw") AS ?newValue6) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType6) | | | | ?property6 knora-base:objectClassConstraint ?propertyRange6 . | ?valueType6 rdfs:subClassOf* ?propertyRange6 . | | | | ?resourceClass rdfs:subClassOf* ?restriction6 . | ?restriction6 a owl:Restriction . | ?restriction6 owl:onProperty ?property6 . | | | | BIND(0 AS ?nextOrder6) | | | |} """.stripMargin private val resourcePropWithNoCardinality = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> |PREFIX incunabula: <http://www.knora.org/ontology/incunabula#> |PREFIX salsah-gui: <http://www.knora.org/ontology/salsah-gui#> | |INSERT { | GRAPH ?dataNamedGraph { | | | # A property that incunabula:book has no cardinality for. | incunabula:unused rdf:type owl:ObjectProperty ; | rdfs:subPropertyOf knora-base:hasValue ; | rdfs:label "Unused property"@en ; | rdfs:comment "A property used only in tests"@en ; | knora-base:subjectClassConstraint incunabula:book ; | knora-base:objectClassConstraint knora-base:TextValue ; | salsah-gui:guiOrder "1"^^xsd:integer ; | salsah-gui:guiElement salsah-gui:SimpleText ; | salsah-gui:guiAttribute "min=4" , | "max=8" . | | | ?resource rdf:type ?resourceClass ; | knora-base:isDeleted "false"^^xsd:boolean ; | knora-base:lastModificationDate ?currentTime ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | | ?newValue0 rdf:type ?valueType0 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0 knora-base:valueHasString "A beautiful book" . | | | ?newValue0 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0 knora-base:valueHasOrder ?nextOrder0 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property0 ?newValue0 . | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | | ?newValue1 rdf:type ?valueType1 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue1 knora-base:valueHasStartJDN 2457360 ; | knora-base:valueHasEndJDN 2457360 ; | knora-base:valueHasStartPrecision "DAY" ; | knora-base:valueHasEndPrecision "DAY" ; | knora-base:valueHasCalendar "GREGORIAN" ; | knora-base:valueHasString "2015-12-03" . | | | | ?newValue1 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue1 knora-base:valueHasOrder ?nextOrder1 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property1 ?newValue1 . | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | | ?newValue6 rdf:type ?valueType6 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue6 knora-base:valueHasString "Entenhausen" . | | | ?newValue6 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue6 knora-base:valueHasOrder ?nextOrder6 ; | knora-base:valueCreationDate ?currentTime . | | | | | | ?resource ?property6 ?newValue6 . | | | | | # Value 7 (there's no cardinality for it, so it should cause an error) | # Property: http://www.knora.org/ontology/incunabula#unused | | | ?newValue7 rdf:type ?valueType7 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue7 knora-base:valueHasString "recto" . | | | | | ?newValue7 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue7 knora-base:valueHasOrder ?nextOrder7 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property7 ?newValue7 . | | | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/incunabula") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/resourcePropWithNoCardinality") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/incunabula#book") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/b83acc5f05") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/77275339") AS ?projectIri) | BIND(str("Test-Book") AS ?label) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | BIND(IRI("http://www.knora.org/ontology/incunabula#title") AS ?property0) | BIND(IRI("http://data.knora.org/resourcePropWithNoCardinality/values/IKVNJVSWTryEtK4i9OCSIQ") AS ?newValue0) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0) | | | | ?property0 knora-base:objectClassConstraint ?propertyRange0 . | ?valueType0 rdfs:subClassOf* ?propertyRange0 . | | | | ?resourceClass rdfs:subClassOf* ?restriction0 . | ?restriction0 a owl:Restriction . | ?restriction0 owl:onProperty ?property0 . | | | | | BIND(0 AS ?nextOrder0) | | | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | BIND(IRI("http://www.knora.org/ontology/incunabula#pubdate") AS ?property1) | BIND(IRI("http://data.knora.org/resourcePropWithNoCardinality/values/L4YSL2SeSkKVt-J9OQAMog") AS ?newValue1) | BIND(IRI("http://www.knora.org/ontology/knora-base#DateValue") AS ?valueType1) | | | | ?property1 knora-base:objectClassConstraint ?propertyRange1 . | ?valueType1 rdfs:subClassOf* ?propertyRange1 . | | | | ?resourceClass rdfs:subClassOf* ?restriction1 . | ?restriction1 a owl:Restriction . | ?restriction1 owl:onProperty ?property1 . | | | | | BIND(0 AS ?nextOrder1) | | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | BIND(IRI("http://www.knora.org/ontology/incunabula#publoc") AS ?property6) | BIND(IRI("http://data.knora.org/resourcePropWithNoCardinality/values/1ryBgY4MSn2Y8K8QAPiJBw") AS ?newValue6) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType6) | | | | ?property6 knora-base:objectClassConstraint ?propertyRange6 . | ?valueType6 rdfs:subClassOf* ?propertyRange6 . | | | | ?resourceClass rdfs:subClassOf* ?restriction6 . | ?restriction6 a owl:Restriction . | ?restriction6 owl:onProperty ?property6 . | | | | | BIND(0 AS ?nextOrder6) | | | | | # Value 7 | # Property: http://www.knora.org/ontology/incunabula#unused | | BIND(IRI("http://www.knora.org/ontology/incunabula#unused") AS ?property7) | BIND(IRI("http://data.knora.org/resourcePropWithNoCardinality/values/nQ3tRObaQWe74WQv2_OdCg") AS ?newValue7) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType7) | | | | | BIND(0 AS ?nextOrder7) |} """.stripMargin private val valuePropWithNoCardinality = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource rdf:type ?resourceClass ; | knora-base:isDeleted "false"^^xsd:boolean ; | knora-base:lastModificationDate ?currentTime ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | | ?newValue0 rdf:type ?valueType0 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue0 knora-base:valueHasString "A beautiful book" . | | | ?newValue0 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue0 knora-base:valueHasOrder ?nextOrder0 ; | knora-base:valueCreationDate ?currentTime . | | | | ?resource ?property0 ?newValue0 . | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | | ?newValue1 rdf:type ?valueType1 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue1 knora-base:valueHasStartJDN 2457360 ; | knora-base:valueHasEndJDN 2457360 ; | knora-base:valueHasStartPrecision "DAY" ; | knora-base:valueHasEndPrecision "DAY" ; | knora-base:valueHasCalendar "GREGORIAN" ; | knora-base:valueHasString "2015-12-03" . | | | | ?newValue1 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue1 knora-base:valueHasOrder ?nextOrder1 ; | knora-base:valueCreationDate ?currentTime . | | | | | ?resource ?property1 ?newValue1 . | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | | # A property that knora-base:TextValue has no cardinality for. | knora-base:valueHasTest rdf:type owl:DatatypeProperty ; | rdfs:subPropertyOf knora-base:valueHas ; | knora-base:subjectClassConstraint knora-base:TextValue ; | knora-base:objectDatatypeConstraint xsd:integer . | | | ?newValue6 rdf:type ?valueType6 ; | knora-base:isDeleted "false"^^xsd:boolean . | | | | ?newValue6 knora-base:valueHasString "Entenhausen" . | | ?newValue6 knora-base:valueHasTest "3"^^xsd:integer . # No cardinality for this property, so it should cause an error. | | ?newValue6 <http://www.knora.org/ontology/knora-base#attachedToUser> ?creatorIri ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | ?newValue6 knora-base:valueHasOrder ?nextOrder6 ; | knora-base:valueCreationDate ?currentTime . | | | | | | ?resource ?property6 ?newValue6 . | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/incunabula") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/valuePropWithNoCardinality") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/incunabula#book") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/b83acc5f05") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/77275339") AS ?projectIri) | BIND(str("Test-Book") AS ?label) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/incunabula#title | | BIND(IRI("http://www.knora.org/ontology/incunabula#title") AS ?property0) | BIND(IRI("http://data.knora.org/valuePropWithNoCardinality/values/IKVNJVSWTryEtK4i9OCSIQ") AS ?newValue0) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType0) | | | | ?property0 knora-base:objectClassConstraint ?propertyRange0 . | ?valueType0 rdfs:subClassOf* ?propertyRange0 . | | | | ?resourceClass rdfs:subClassOf* ?restriction0 . | ?restriction0 a owl:Restriction . | ?restriction0 owl:onProperty ?property0 . | | | | | BIND(0 AS ?nextOrder0) | | | | | | | # Value 1 | # Property: http://www.knora.org/ontology/incunabula#pubdate | | BIND(IRI("http://www.knora.org/ontology/incunabula#pubdate") AS ?property1) | BIND(IRI("http://data.knora.org/valuePropWithNoCardinality/values/L4YSL2SeSkKVt-J9OQAMog") AS ?newValue1) | BIND(IRI("http://www.knora.org/ontology/knora-base#DateValue") AS ?valueType1) | | | | ?property1 knora-base:objectClassConstraint ?propertyRange1 . | ?valueType1 rdfs:subClassOf* ?propertyRange1 . | | | | ?resourceClass rdfs:subClassOf* ?restriction1 . | ?restriction1 a owl:Restriction . | ?restriction1 owl:onProperty ?property1 . | | | | | BIND(0 AS ?nextOrder1) | | | | | | # Value 6 | # Property: http://www.knora.org/ontology/incunabula#publoc | | BIND(IRI("http://www.knora.org/ontology/incunabula#publoc") AS ?property6) | BIND(IRI("http://data.knora.org/valuePropWithNoCardinality/values/1ryBgY4MSn2Y8K8QAPiJBw") AS ?newValue6) | BIND(IRI("http://www.knora.org/ontology/knora-base#TextValue") AS ?valueType6) | | | | ?property6 knora-base:objectClassConstraint ?propertyRange6 . | ?valueType6 rdfs:subClassOf* ?propertyRange6 . | | | | ?resourceClass rdfs:subClassOf* ?restriction6 . | ?restriction6 a owl:Restriction . | ?restriction6 owl:onProperty ?property6 . | | | | | BIND(0 AS ?nextOrder6) |} """.stripMargin private val wrongLinkTargetClass = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource0 rdf:type ?resourceClass0 ; | knora-base:isDeleted false ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label0 ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/anything#hasBlueThing | | # The property hasBlueThing has an objectClassConstraint of BlueThing, so using a Thing as a link target should fail. | | ?resource0 ?linkProperty0_0 ?linkTarget0_0 . | | | | ?newLinkValue0_0 rdf:type knora-base:LinkValue ; | rdf:subject ?resource0 ; | rdf:predicate ?linkProperty0_0 ; | rdf:object ?linkTarget0_0 ; | knora-base:valueHasString "http://data.knora.org/a-thing" ; | knora-base:valueHasRefCount 1 ; | | knora-base:valueHasOrder ?nextOrder0_0 ; | knora-base:isDeleted false ; | knora-base:valueCreationDate ?currentTime . | | ?newLinkValue0_0 knora-base:attachedToUser <http://data.knora.org/users/9XBCrDV3SRa7kS1WwynB4Q> ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" . | | | | | ?resource0 ?linkValueProperty0_0 ?newLinkValue0_0 . | | | | | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/anything") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/wrongTargetClass") AS ?resource0) | BIND(IRI("http://www.knora.org/ontology/anything#Thing") AS ?resourceClass0) | BIND(IRI("http://data.knora.org/users/9XBCrDV3SRa7kS1WwynB4Q") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/anything") AS ?projectIri) | BIND(str("Test Thing") AS ?label0) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/anything#hasBlueThing | | BIND(IRI("http://www.knora.org/ontology/anything#hasBlueThing") AS ?linkProperty0_0) | BIND(IRI("http://www.knora.org/ontology/anything#hasBlueThingValue") AS ?linkValueProperty0_0) | BIND(IRI("http://data.knora.org/wrongTargetClass/values/GjV_4ayjRDebneEQM0zHuw") AS ?newLinkValue0_0) | BIND(IRI("http://data.knora.org/a-thing") AS ?linkTarget0) | | | | ?linkTarget0_0 rdf:type ?linkTargetClass0_0 ; | knora-base:isDeleted false . | ?linkTargetClass0_0 rdfs:subClassOf+ knora-base:Resource . | | | | ?resourceClass0 rdfs:subClassOf* ?restriction0_0 . | ?restriction0_0 a owl:Restriction . | ?restriction0_0 owl:onProperty ?linkProperty0_0 . | | | | | | | | | | BIND(0 AS ?nextOrder0_0) | | | |} """.stripMargin private val twoLabels = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource rdf:type ?resourceClass ; | knora-base:isDeleted false ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label; | rdfs:label "Second label not allowed" ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/anything") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/twoLabels") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/anything#Thing") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/9XBCrDV3SRa7kS1WwynB4Q") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/anything") AS ?projectIri) | BIND(str("Test Thing") AS ?label) | BIND(NOW() AS ?currentTime) | | | | # Value 0 | # Property: http://www.knora.org/ontology/anything#hasBlueThing | | BIND(IRI("http://www.knora.org/ontology/anything#hasBlueThing") AS ?linkProperty0) | BIND(IRI("http://www.knora.org/ontology/anything#hasBlueThingValue") AS ?linkValueProperty0) | BIND(IRI("http://data.knora.org/twoLabels/values/GjV_4ayjRDebneEQM0zHuw") AS ?newLinkValue0) | BIND(IRI("http://data.knora.org/a-thing") AS ?linkTarget0) |} """.stripMargin private val linkValueWithoutPermissions = """ |PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> |PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> |PREFIX owl: <http://www.w3.org/2002/07/owl#> |PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> |PREFIX knora-base: <http://www.knora.org/ontology/knora-base#> | |INSERT { | GRAPH ?dataNamedGraph { | ?resource rdf:type ?resourceClass ; | knora-base:isDeleted false ; | knora-base:attachedToUser ?creatorIri ; | knora-base:attachedToProject ?projectIri ; | rdfs:label ?label ; | knora-base:hasPermissions "V knora-base:UnknownUser|M knora-base:ProjectMember" ; | knora-base:creationDate ?currentTime . | | | | # Value 0 | # Property: http://www.knora.org/ontology/anything#hasThing | | ?resource ?linkProperty0 ?linkTarget0 . | | ?newLinkValue0 rdf:type knora-base:LinkValue ; | rdf:subject ?resource ; | rdf:predicate ?linkProperty0 ; | rdf:object ?linkTarget0 ; | knora-base:valueHasString "http://data.knora.org/a-thing" ; | knora-base:valueHasRefCount 1 ; | knora-base:valueHasOrder ?nextOrder0 ; | knora-base:isDeleted false ; | knora-base:valueCreationDate ?currentTime . | | ?newLinkValue0 knora-base:attachedToUser <http://data.knora.org/users/9XBCrDV3SRa7kS1WwynB4Q> . | ?resource ?linkValueProperty0 ?newLinkValue0 . | } |} | | | USING <http://www.ontotext.com/explicit> | |WHERE { | BIND(IRI("http://www.knora.org/data/anything") AS ?dataNamedGraph) | BIND(IRI("http://data.knora.org/missingValuePermissions") AS ?resource) | BIND(IRI("http://www.knora.org/ontology/anything#Thing") AS ?resourceClass) | BIND(IRI("http://data.knora.org/users/9XBCrDV3SRa7kS1WwynB4Q") AS ?creatorIri) | BIND(IRI("http://data.knora.org/projects/anything") AS ?projectIri) | BIND(str("Test Thing") AS ?label) | BIND(NOW() AS ?currentTime) | | # Value 0 | # Property: http://www.knora.org/ontology/anything#hasOtherThing | | BIND(IRI("http://www.knora.org/ontology/anything#hasOtherThing") AS ?linkProperty0) | BIND(IRI("http://www.knora.org/ontology/anything#hasOtherThingValue") AS ?linkValueProperty0) | BIND(IRI("http://data.knora.org/missingValuePermissions/values/GjV_4ayjRDebneEQM0zHuw") AS ?newLinkValue0) | BIND(IRI("http://data.knora.org/a-thing") AS ?linkTarget0) | | | | ?linkTarget0 rdf:type ?linkTargetClass0 ; | knora-base:isDeleted false . | ?linkTargetClass0 rdfs:subClassOf+ knora-base:Resource . | | | | ?resourceClass rdfs:subClassOf* ?restriction0 . | ?restriction0 a owl:Restriction . | ?restriction0 owl:onProperty ?linkProperty0 . | | | BIND(0 AS ?nextOrder0) | | | |} """.stripMargin }
nie-ine/Knora
webapi/src/test/scala/org/knora/webapi/store/triplestore/GraphDBConsistencyCheckingSpec.scala
Scala
agpl-3.0
105,485
/* * Copyright (c) 2014-2015 by its authors. Some rights reserved. * See the project homepage at: http://www.monifu.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monifu.reactive.observers import java.util.concurrent.{CountDownLatch, TimeUnit} import minitest.TestSuite import monifu.concurrent.Scheduler import monifu.reactive.Ack.{Cancel, Continue} import monifu.reactive.OverflowStrategy.DropNew import monifu.reactive.exceptions.DummyException import monifu.reactive.{Subscriber, Ack, Observer} import scala.concurrent.{Future, Promise} object BufferDropNewConcurrencySuite extends TestSuite[Scheduler] { def tearDown(env: Scheduler) = () def setup() = { monifu.concurrent.Implicits.globalScheduler } test("should not lose events, test 1") { implicit s => var number = 0 val completed = new CountDownLatch(1) val underlying = new Observer[Int] { def onNext(elem: Int): Future[Ack] = { number += 1 Continue } def onError(ex: Throwable): Unit = { s.reportFailure(ex) } def onComplete(): Unit = { completed.countDown() } } val buffer = BufferedSubscriber[Int](Subscriber(underlying, s), DropNew(100000)) for (i <- 0 until 100000) buffer.onNext(i) buffer.onComplete() assert(completed.await(20, TimeUnit.SECONDS), "completed.await should have succeeded") assert(number == 100000) } test("should not lose events, test 2") { implicit s => var number = 0 val completed = new CountDownLatch(1) val underlying = new Observer[Int] { def onNext(elem: Int): Future[Ack] = { number += 1 Continue } def onError(ex: Throwable): Unit = { s.reportFailure(ex) } def onComplete(): Unit = { completed.countDown() } } val buffer = BufferedSubscriber[Int](Subscriber(underlying, s), DropNew(100000)) def loop(n: Int): Unit = if (n > 0) s.execute(new Runnable { def run() = { buffer.onNext(n); loop(n-1) } }) else buffer.onComplete() loop(10000) assert(completed.await(20, TimeUnit.SECONDS), "completed.await should have succeeded") assertEquals(number, 10000) } test("should drop incoming when over capacity") { implicit s => // repeating test 100 times because of problems for (_ <- 0 until 100) { var sum = 0 val completed = new CountDownLatch(1) val promise = Promise[Ack]() val underlying = new Observer[Int] { var received = 0 def onNext(elem: Int) = { sum += elem received += 1 promise.future } def onError(ex: Throwable): Unit = { s.reportFailure(ex) } def onComplete() = { completed.countDown() } } val buffer = BufferedSubscriber[Int](Subscriber(underlying, s), DropNew(5)) assertEquals(buffer.onNext(1), Continue) assertEquals(buffer.onNext(2), Continue) assertEquals(buffer.onNext(3), Continue) assertEquals(buffer.onNext(4), Continue) assertEquals(buffer.onNext(5), Continue) for (i <- 0 until 20) buffer.onNext(6 + i) buffer.onComplete() promise.success(Continue) assert(completed.await(5, TimeUnit.SECONDS), "wasCompleted.await should have succeeded") assertEquals(sum, 15) } } test("should send onError when empty") { implicit s => val latch = new CountDownLatch(1) val buffer = BufferedSubscriber[Int](new Subscriber[Int] { def onError(ex: Throwable) = { assert(ex.getMessage == "dummy") latch.countDown() } def onNext(elem: Int) = throw new IllegalStateException() def onComplete() = throw new IllegalStateException() val scheduler = s }, DropNew(5)) buffer.onError(new RuntimeException("dummy")) assert(latch.await(5, TimeUnit.SECONDS), "latch.await should have succeeded") val r = buffer.onNext(1) assertEquals(r, Cancel) } test("should send onError when in flight") { implicit s => val latch = new CountDownLatch(1) val buffer = BufferedSubscriber[Int](new Subscriber[Int] { def onError(ex: Throwable) = { assert(ex.getMessage == "dummy") latch.countDown() } def onNext(elem: Int) = Continue def onComplete() = throw new IllegalStateException() val scheduler = s }, DropNew(5)) buffer.onNext(1) buffer.onError(new RuntimeException("dummy")) assert(latch.await(5, TimeUnit.SECONDS), "latch.await should have succeeded") } test("should send onError when at capacity") { implicit s => val latch = new CountDownLatch(1) val promise = Promise[Ack]() val buffer = BufferedSubscriber[Int]( new Subscriber[Int] { def onError(ex: Throwable) = { assert(ex.getMessage == "dummy") latch.countDown() } def onNext(elem: Int) = promise.future def onComplete() = throw new IllegalStateException() val scheduler = s }, DropNew(5)) buffer.onNext(1) buffer.onNext(2) buffer.onNext(3) buffer.onNext(4) buffer.onNext(5) buffer.onError(DummyException("dummy")) promise.success(Continue) assert(latch.await(5, TimeUnit.SECONDS), "latch.await should have succeeded") } test("should send onComplete when empty") { implicit s => val latch = new CountDownLatch(1) val buffer = BufferedSubscriber[Int](new Subscriber[Int] { def onError(ex: Throwable) = throw new IllegalStateException() def onNext(elem: Int) = throw new IllegalStateException() def onComplete() = latch.countDown() val scheduler = s }, DropNew(5)) buffer.onComplete() assert(latch.await(5, TimeUnit.SECONDS), "latch.await should have succeeded") } test("should send onComplete when in flight") { implicit s => val latch = new CountDownLatch(1) val promise = Promise[Ack]() val buffer = BufferedSubscriber[Int](new Subscriber[Int] { def onError(ex: Throwable) = throw new IllegalStateException() def onNext(elem: Int) = promise.future def onComplete() = latch.countDown() val scheduler = s }, DropNew(5)) buffer.onNext(1) buffer.onComplete() assert(!latch.await(1, TimeUnit.SECONDS), "latch.await should have failed") promise.success(Continue) assert(latch.await(5, TimeUnit.SECONDS), "latch.await should have succeeded") } test("should send onComplete when at capacity") { implicit s => val latch = new CountDownLatch(1) val promise = Promise[Ack]() val buffer = BufferedSubscriber[Int](new Subscriber[Int] { def onError(ex: Throwable) = throw new IllegalStateException() def onNext(elem: Int) = promise.future def onComplete() = latch.countDown() val scheduler = s }, DropNew(5)) buffer.onNext(1) buffer.onNext(2) buffer.onNext(3) buffer.onNext(4) buffer.onComplete() assert(!latch.await(1, TimeUnit.SECONDS), "latch.await should have failed") promise.success(Continue) assert(latch.await(5, TimeUnit.SECONDS), "latch.await should have succeeded") } test("should do onComplete only after all the queue was drained") { implicit s => var sum = 0L val complete = new CountDownLatch(1) val startConsuming = Promise[Continue]() val buffer = BufferedSubscriber[Long](new Subscriber[Long] { def onNext(elem: Long) = { sum += elem startConsuming.future } def onError(ex: Throwable) = throw ex def onComplete() = complete.countDown() val scheduler = s }, DropNew(10000)) (0 until 9999).foreach(x => buffer.onNext(x)) buffer.onComplete() startConsuming.success(Continue) assert(complete.await(10, TimeUnit.SECONDS), "complete.await should have succeeded") assert(sum == (0 until 9999).sum) } test("should do onComplete only after all the queue was drained, test2") { implicit s => var sum = 0L val complete = new CountDownLatch(1) val buffer = BufferedSubscriber[Long](new Subscriber[Long] { def onNext(elem: Long) = { sum += elem Continue } def onError(ex: Throwable) = throw ex def onComplete() = complete.countDown() val scheduler = s }, DropNew(10000)) (0 until 9999).foreach(x => buffer.onNext(x)) buffer.onComplete() assert(complete.await(10, TimeUnit.SECONDS), "complete.await should have succeeded") assert(sum == (0 until 9999).sum) } test("should do onError only after the queue was drained") { implicit s => var sum = 0L val complete = new CountDownLatch(1) val startConsuming = Promise[Continue]() val buffer = BufferedSubscriber[Long](new Subscriber[Long] { def onNext(elem: Long) = { sum += elem startConsuming.future } def onError(ex: Throwable) = complete.countDown() def onComplete() = throw new IllegalStateException() val scheduler = s }, DropNew(10000)) (0 until 9999).foreach(x => buffer.onNext(x)) buffer.onError(new RuntimeException) startConsuming.success(Continue) assert(complete.await(10, TimeUnit.SECONDS), "complete.await should have succeeded") assertEquals(sum, (0 until 9999).sum) } test("should do onError only after all the queue was drained, test2") { implicit s => var sum = 0L val complete = new CountDownLatch(1) val buffer = BufferedSubscriber[Long](new Subscriber[Long] { def onNext(elem: Long) = { sum += elem Continue } def onError(ex: Throwable) = complete.countDown() def onComplete() = throw new IllegalStateException() val scheduler = s }, DropNew(10000)) (0 until 9999).foreach(x => buffer.onNext(x)) buffer.onError(new RuntimeException) assert(complete.await(10, TimeUnit.SECONDS), "complete.await should have succeeded") assertEquals(sum, (0 until 9999).sum) } }
virtualirfan/monifu
monifu/jvm/src/test/scala/monifu/reactive/observers/BufferDropNewConcurrencySuite.scala
Scala
apache-2.0
10,512
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // scalastyle:off println package org.apache.spark.examples.streaming import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.common.serialization.StringDeserializer import org.apache.spark.SparkConf import org.apache.spark.streaming._ import org.apache.spark.streaming.kafka010._ /** * Consumes messages from one or more topics in Kafka and does wordcount. * Usage: DirectKafkaWordCount <brokers> <topics> * <brokers> is a list of one or more Kafka brokers * <groupId> is a consumer group name to consume from topics * <topics> is a list of one or more kafka topics to consume from * * Example: * $ bin/run-example streaming.DirectKafkaWordCount broker1-host:port,broker2-host:port \ * consumer-group topic1,topic2 */ object DirectKafkaWordCount { def main(args: Array[String]) { if (args.length < 3) { System.err.println(s""" |Usage: DirectKafkaWordCount <brokers> <topics> | <brokers> is a list of one or more Kafka brokers | <groupId> is a consumer group name to consume from topics | <topics> is a list of one or more kafka topics to consume from | """.stripMargin) System.exit(1) } StreamingExamples.setStreamingLogLevels() val Array(brokers, groupId, topics) = args // Create context with 2 second batch interval val sparkConf = new SparkConf().setAppName("DirectKafkaWordCount") val ssc = new StreamingContext(sparkConf, Seconds(2)) // Create direct kafka stream with brokers and topics val topicsSet = topics.split(",").toSet val kafkaParams = Map[String, Object]( ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> brokers, ConsumerConfig.GROUP_ID_CONFIG -> groupId, ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer], ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer]) val messages = KafkaUtils.createDirectStream[String, String]( ssc, LocationStrategies.PreferConsistent, ConsumerStrategies.Subscribe[String, String](topicsSet, kafkaParams)) // Get the lines, split them into words, count the words and print val lines = messages.map(_.value) val words = lines.flatMap(_.split(" ")) val wordCounts = words.map(x => (x, 1L)).reduceByKey(_ + _) wordCounts.print() // Start the computation ssc.start() ssc.awaitTermination() } } // scalastyle:on println
lhfei/spark-in-action
spark-2.x/src/main/scala/org/apache/spark/examples/streaming/DirectKafkaWordCount.scala
Scala
apache-2.0
3,333
/* * Copyright 2019 Google LLC All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bqsh case class QueryConfig( // Custom Options timeoutMinutes: Int = 60, parametersFromFile: Seq[String] = Seq.empty, createIfNeeded: Boolean = false, allowMultipleQueries: Boolean = false, // Standard Options allowLargeResults: Boolean = false, appendTable: Boolean = false, batch: Boolean = false, clusteringFields: Seq[String] = Seq.empty, destinationKmsKey: String = "", destinationSchema: String = "", destinationTable: String = "", dryRun: Boolean = false, externalTableDefinition: String = "", label: String = "", maximumBytesBilled: Long = -1, parameters: Seq[String] = Seq.empty, replace: Boolean = false, requireCache: Boolean = false, requirePartitionFilter: Boolean = true, schemaUpdateOption: Seq[String] = Seq.empty, timePartitioningExpiration: Long = -1, timePartitioningField: String = "", timePartitioningType: String = "", useCache: Boolean = true, useLegacySql: Boolean = false, // Global Options datasetId: String = "", debugMode: Boolean = false, jobId: String = "", jobProperties: Map[String,String] = Map.empty, location: String = "", projectId: String = "", sync: Boolean = true, statsTable: String = "" )
CloudVLab/professional-services
tools/bigquery-zos-mainframe-connector/src/main/scala/com/google/cloud/bqsh/QueryConfig.scala
Scala
apache-2.0
1,846
package org.jetbrains.plugins.scala package lang package parser package parsing package top.params import com.intellij.lang.PsiBuilder import builder.ScalaPsiBuilder /** * @author Alexander Podkhalyuzin * Date: 08.02.2008 */ /* * ClassParamClauses ::= {ClassParamClause} * [[nl] '(' 'implicit' ClassParams ')'] */ object ClassParamClauses { def parse(builder: ScalaPsiBuilder): Boolean = { val classParamClausesMarker = builder.mark while (ClassParamClause parse builder) {} ImplicitClassParamClause parse builder classParamClausesMarker.done(ScalaElementTypes.PARAM_CLAUSES) true } }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/lang/parser/parsing/top/params/ClassParamClauses.scala
Scala
apache-2.0
637
package org.littlewings.tweetbot.application.lilymyu.lyrics.service import org.junit.Test import org.littlewings.tweetbot.test.JaxrsServerTestSupport class LilymyuLyricsTweetServiceITests extends JaxrsServerTestSupport { @Test def tweet(): Unit = withServer { _ => injectedBean(classOf[LilymyuLyricsTweetService]).autoPickTweet() } }
kazuhira-r/tweet-bot
src/integration-test/scala/org/littlewings/tweetbot/application/lilymyu/lyrics/service/LilymyuLyricsTweetServiceITests.scala
Scala
mit
354
package org.faker import org.scalatest.{FlatSpec, Matchers} class PhoneNumberSpec extends FlatSpec with Matchers with FakerBehaviors { "phoneNumber" should behave like validResult(PhoneNumber.phoneNumber) "cellPhone" should behave like validResult(PhoneNumber.cellPhone) }
ralli/faker_scala
src/test/scala/org/faker/PhoneNumberSpec.scala
Scala
bsd-3-clause
281
package org.infinispan.spark.suites import org.apache.spark.SparkConf import org.infinispan.client.hotrod.{RemoteCache, Search} import org.infinispan.spark.config.ConnectorConfiguration import org.infinispan.spark.domain._ import org.infinispan.spark.rdd.InfinispanRDD import org.infinispan.spark.test._ import org.scalatest.{DoNotDiscover, FunSuite, Matchers} @DoNotDiscover class FilterByQueryProtoSuite extends FunSuite with Spark with MultipleServers with Matchers { override def getCacheType: CacheType.Value = CacheType.DISTRIBUTED override def getSparkConfig: SparkConf = { val config = super.getSparkConfig config.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") config } override def getConfiguration: ConnectorConfiguration = { new ConnectorConfiguration() .addProtoFile("test.proto", protoFile) .setAutoRegisterProto() .addMessageMarshaller(classOf[AddressMarshaller]) .addMessageMarshaller(classOf[PersonMarshaller]) } lazy val remoteCache: RemoteCache[Int, Person] = { val defaultCache = remoteCacheManager.getCache[Int, Person] (1 to 20).foreach { idx => defaultCache.put(idx, new Person(s"name$idx", idx, new Address(s"street$idx", idx, "N/A"))); } defaultCache } val protoFile = """ package org.infinispan.spark.domain; message Person { required string name = 1; optional int32 age = 2; optional Address address = 3; } message Address { required string street = 1; required int32 number = 2; required string country = 3; } """.stripMargin test("Filter by Query with proto file and provided marshallers") { val rdd = new InfinispanRDD[Int, Person](sc, getConfiguration) val query = Search.getQueryFactory(remoteCache).from(classOf[Person]).having("address.number").gt(10).build() val filteredRdd = rdd.filterByQuery[Person](query) val result = filteredRdd.values.collect() result.length shouldBe 10 result.sortWith(_.getName > _.getName).head.getName shouldBe "name20" } test("Filter by Query String") { val rdd = new InfinispanRDD[Int, Person](sc, getConfiguration) val filteredRdd = rdd.filterByQuery[Person]("From org.infinispan.spark.domain.Person p where p.address.number > 10") val result = filteredRdd.values.collect() result.length shouldBe 10 result.sortWith(_.getName > _.getName).head.getName shouldBe "name20" } }
galderz/infinispan-spark
src/test/scala/org/infinispan/spark/suites/FilterByQueryProtoSuite.scala
Scala
apache-2.0
2,569
/* * This file is part of the "silex" library of helpers for Apache Spark. * * Copyright (c) 2016 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.redhat.et.silex.sample.split import scala.reflect.ClassTag import org.apache.spark.storage.StorageLevel import org.apache.spark.rdd.RDD import org.apache.spark.Logging /** * Enhances RDDs with methods for split-sampling * @tparam T The row type of the RDD * {{{ * // import conversions to enhance RDDs with split sampling * import com.redhat.et.silex.sample.split.implicits._ * * // obtain a sequence of 5 RDDs randomly split from RDD 'data', where each element * // has probability 1/5 of being assigned to each output. * val splits = data.splitSample(5) * * // randomly split data so that the second output has twice the probability of receiving * // a data element as the first, and the third output has three times the probability. * val splitsW = data.weightedSplitSample(Seq(1.0, 2.0, 3.0)) * }}} */ class SplitSampleRDDFunctions[T :ClassTag](self: RDD[T]) extends Logging with Serializable { import com.redhat.et.silex.rdd.multiplex.implicits._ import SplitSampleRDDFunctions.{defaultSL, find} /** * Split an RDD into `n` random subsets, where each row is assigned to an output with * equal probability 1/n. * @param n The number of output RDDs to split into * @param persist The storage level to use for persisting the intermediate result. * @param seed A random seed to use for sampling. Will be modified, deterministically, * by partition id. */ def splitSample(n: Int, persist: StorageLevel = defaultSL, seed: Long = scala.util.Random.nextLong): Seq[RDD[T]] = { require(n > 0, "n must be > 0") self.flatMuxPartitionsWithIndex(n, (id: Int, data: Iterator[T]) => { scala.util.Random.setSeed(id.toLong * seed) val samples = Vector.fill(n) { scala.collection.mutable.ArrayBuffer.empty[T] } data.foreach { e => samples(scala.util.Random.nextInt(n)) += e } samples }, persist) } /** * Split an RDD into weighted random subsets, where each row is assigned to an output (j) with * probability proportional to the corresponding jth weight. * @param weights A sequence of weights that determine the relative probabilities of * sampling into the corresponding RDD outputs. Weights will be normalized so that they * sum to 1. Individual weights must be strictly > 0. * @param persist The storage level to use for persisting the intermediate result. * @param seed A random seed to use for sampling. Will be modified, deterministically, * by partition id. */ def weightedSplitSample(weights: Seq[Double], persist: StorageLevel = defaultSL, seed: Long = scala.util.Random.nextLong): Seq[RDD[T]] = { require(weights.length > 0, "weights must be non-empty") require(weights.forall(_ > 0.0), "weights must be > 0") val n = weights.length val z = weights.sum val w = weights.scan(0.0)(_ + _).map(_ / z).toVector self.flatMuxPartitionsWithIndex(n, (id: Int, data: Iterator[T]) => { scala.util.Random.setSeed(id.toLong * seed) val samples = Vector.fill(n) { scala.collection.mutable.ArrayBuffer.empty[T] } data.foreach { e => val x = scala.util.Random.nextDouble val j = find(x, w) samples(j) += e } samples }, persist) } } /** Definitions used by SplitSampleRDDFunctions instances */ object SplitSampleRDDFunctions { /** The default storage level used for intermediate sampling results */ val defaultSL = StorageLevel.MEMORY_ONLY private def find(x: Double, w: Seq[Double]) = { var (l, u) = (0, w.length - 1) if (x >= 1.0) u - 1 else { var m = (l + u) / 2 while (m > l) { if (x < w(m)) u = m else if (x >= w(m + 1)) l = m + 1 else { l = m; u = m + 1 } m = (l + u) / 2 } m } } } /** Implicit conversions to enhance RDDs with split sampling methods */ object implicits { import scala.language.implicitConversions implicit def splitSampleRDDFunctions[T :ClassTag](rdd: RDD[T]): SplitSampleRDDFunctions[T] = new SplitSampleRDDFunctions(rdd) }
erikerlandson/silex
src/main/scala/com/redhat/et/silex/sample/split.scala
Scala
apache-2.0
4,738
/** * Copyright (C) 2009-2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.scalate.converter import _root_.org.junit.runner.RunWith import _root_.org.scalatest.junit.JUnitRunner import _root_.org.scalatest.FunSuite import _root_.java.io.File import _root_.org.fusesource.scalate._ /** * @version $Revision : 1.1 $ */ @RunWith(classOf[JUnitRunner]) class ConvertJspTest extends FunSuite { assertJustText("<foo/>") assertJustText("text <foo/> text") assertJustText("<curl value='/foo'/>") assertConvert( """<c:url value='/foo'/>""", """${uri("/foo")}""") assertConvert( """blah <c:url value='/foo'/> blah""", """blah ${uri("/foo")} blah""") assertConvert( """blah <c:url value='/foo/${x}/bar/${y}'/> blah""", """blah ${uri("/foo/" + x + "/bar/" + y)} blah""") assertConvert( """<a href="<c:url value='/foo'/>">body</a>""", """<a href="${uri("/foo")}">body</a>""") assertConvert( """something <c:out value="${foo}"/> or other""", """something ${foo} or other""") assertConvert( """something <c:out value="${foo}" escapeXml="true"/> or other""", """something ${escape(foo)} or other""") assertConvert( """something <c:out value="${foo}" escapeXml="false"/> or other""", """something ${unescape(foo)} or other""") assertConvert( """something <c:out value="${foo}" escapeXml="x"/> or other""", """something ${value(foo, x)} or other""") assertConvert( """foo <c:if test='${foo}'> a <c:if test='${bar}'> b </c:if> c </c:if> whatnot""", """foo #if(foo) a #if(bar) b #end c #end whatnot""") assertConvert( """foo <c:set var="x" value='${foo}'/> whatnot""", """foo #{ var x = foo }# whatnot""") assertConvert( """foo <c:if test="${it.language eq 'Cheese'}"> bar </c:if> whatnot""", """foo #if(it.getLanguage == "Cheese") bar #end whatnot""") assertConvert( """foo <c:if test='${foo}'> bar </c:if> whatnot""", """foo #if(foo) bar #end whatnot""") assertConvert( """ foo <c:if test="${x.y == 5}"> bar </c:if> whatnot""", """ foo #if(x.getY == 5) bar #end whatnot""") assertConvert( """ foo <c:forEach var="foo" items="${something.whatnot}"> blah ${foo.bar} </c:forEach> whatnot""", """ foo #for(foo <- something.getWhatnot) blah ${foo.getBar} #end whatnot""") assertConvert( """ foo <c:forEach var="i" begin="1" end="10"> blah ${i} </c:forEach> whatnot""", """ foo #for(i <- 1.to(10)) blah ${i} #end whatnot""") assertConvert( """ foo <c:forEach var="i" begin="1" end="10" step="3"> blah ${i} </c:forEach> whatnot""", """ foo #for(i <- 1.to(10, 3)) blah ${i} #end whatnot""") assertConvert( """ foo <c:choose> <c:when test="${x == 5}"> five </c:when> <c:when test="${x == 6}"> six </c:when> <c:otherwise> default </c:otherwise> </c:choose> whatnot""", """ foo #if(x == 5) five #elseif(x == 6) six #else default #end whatnot""") def assertJustText(jsp: String): String = { val result = convert(jsp) expect(jsp, "converting JSP: " + jsp){result} result } def assertConvert(jsp: String, ssp: String): String = { val result = convert(jsp) expect(ssp, "converting JSP: " + jsp){result} result } def convert(jsp: String): String = { println("Converting JSP: " + jsp) val converter = new JspConverter val result = converter.convert(jsp) println(" => " + result) println result } }
dnatic09/scalate
scalate-jsp-converter/src/test/scala/org/fusesource/scalate/converter/ConvertJspTest.scala
Scala
apache-2.0
4,135
/* * Copyright 2010 LinkedIn * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.consumer import java.util.Properties import kafka.utils.{ZKConfig, Utils} import kafka.api.OffsetRequest class ConsumerConfig(props: Properties) extends ZKConfig(props) { /** a string that uniquely identifies a set of consumers within the same consumer group */ val groupId = Utils.getString(props, "groupid") /** consumer id: generated automatically if not set. * Set this explicitly for only testing purpose. */ val consumerId: Option[String] = /** TODO: can be written better in scala 2.8 */ if (Utils.getString(props, "consumerid", null) != null) Some(Utils.getString(props, "consumerid")) else None /** the socket timeout for network requests */ val socketTimeoutMs = Utils.getInt(props, "socket.timeout.ms", 30 * 1000) /** the socket receive buffer for network requests */ val socketBufferSize = Utils.getInt(props, "socket.buffersize", 64*1024) /** the number of byes of messages to attempt to fetch */ val fetchSize = Utils.getInt(props, "fetch.size", 300 * 1024) /** the maximum allowable fetch size for a very large message */ val maxFetchSize: Int = fetchSize * 10 /** to avoid repeatedly polling a broker node which has no new data we will backoff every time we get an empty set from the broker*/ val backoffIncrementMs: Long = Utils.getInt(props, "backoff.increment.ms", 1000) /** if true, periodically commit to zookeeper the offset of messages already fetched by the consumer */ val autoCommit = Utils.getBoolean(props, "autocommit.enable", true) /** the frequency in ms that the consumer offsets are committed to zookeeper */ val autoCommitIntervalMs = Utils.getInt(props, "autocommit.interval.ms", 10 * 1000) /** max number of messages buffered for consumption */ val maxQueuedChunks = Utils.getInt(props, "queuedchunks.max", 100) /* what to do if an offset is out of range. smallest : automatically reset the offset to the smallest offset largest : automatically reset the offset to the largest offset anything else: throw exception to the consumer */ val autoOffsetReset = Utils.getString(props, "autooffset.reset", OffsetRequest.SMALLEST_TIME_STRING) /** throw a timeout exception to the consumer if no message is available for consumption after the specified interval */ val consumerTimeoutMs = Utils.getInt(props, "consumer.timeout.ms", -1) /* embed a consumer in the broker. e.g., topic1:1,topic2:1 */ val embeddedConsumerTopics = Utils.getString(props, "embeddedconsumer.topics", null) }
jinfei21/kafka
src/kafka/consumer/ConsumerConfig.scala
Scala
apache-2.0
3,127
/* * Test.scala * Test runner for com.cra.figaro.test.algorithm. * * Created By: Avi Pfeffer (apfeffer@cra.com) * Creation Date: Jan 1, 2009 * * Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc. * See http://www.cra.com or email figaro@cra.com for information. * * See http://www.github.com/p2t2/figaro for a copy of the software license. */ package com.cra.figaro.test.algorithm import com.cra.figaro.algorithm._ private object Test { def main(args: Array[String]) = { (new ValuesTest).execute() (new AlgorithmTest).execute() //(new ExpandTest).execute() (new AbstractionTest).execute() } }
agarbuno/figaro
Figaro/src/test/scala/com/cra/figaro/test/algorithm/Test.scala
Scala
bsd-3-clause
653
package sparklyr import java.io.{ByteArrayOutputStream, OutputStream} import java.nio.channels.Channels import scala.collection.JavaConverters._ import org.apache.arrow.vector._ import org.apache.arrow.vector.ipc.ArrowStreamReader import org.apache.arrow.vector.ipc.message.MessageSerializer import org.apache.arrow.vector.ipc.WriteChannel import org.apache.arrow.vector.util.ByteArrayReadableSeekableByteChannel import org.apache.spark.TaskContext import org.apache.spark.api.java.JavaRDD import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.CatalystTypeConverters import org.apache.spark.sql.catalyst.encoders.RowEncoder import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.{DataFrame, Dataset, Row, SQLContext} import org.apache.spark.sql.types._ import org.apache.spark.sql.execution.LogicalRDD import org.apache.spark.sql.util.ArrowUtils import org.apache.spark.sql.execution.arrow.ArrowWriter import org.apache.spark.sql.Row import org.apache.spark.sql.SparkSession import org.apache.spark.sql.util.__THIS_IS_THE_ROAD_TO_CLOWNTOWN__ArrowUtils import org.apache.spark.sql.vectorized.{ArrowColumnVector, ColumnarBatch, ColumnVector} import org.apache.spark.util.TaskCompletionListener /** * Iterator interface to iterate over Arrow record batches and return rows */ trait ArrowRowIterator extends Iterator[InternalRow] { } class ArrowConvertersImpl { def tryWithSafeFinally[T](block: => T)(finallyBlock: => Unit): T = { var originalThrowable: Throwable = null try { block } catch { case t: Throwable => // Purposefully not using NonFatal, because even fatal exceptions // we don't want to have our finallyBlock suppress originalThrowable = t throw originalThrowable } finally { try { finallyBlock } catch { case t: Throwable if (originalThrowable != null && originalThrowable != t) => originalThrowable.addSuppressed(t) throw originalThrowable } } } def toBatchIterator( rowIter: Iterator[Row], schema: StructType, maxRecordsPerBatch: Int, timeZoneId: String, context: TaskContext): Iterator[Array[Byte]] = { (new ArrowConvertersImpl()).toBatchIterator(rowIter, schema, maxRecordsPerBatch, timeZoneId, Option(context)) } def toBatchArray( rowIter: Iterator[Row], schema: StructType, timeZoneId: String, recordsPerBatch: Int) : Array[Byte] = { val batches: Iterator[Array[Byte]] = toBatchIterator( rowIter, schema, recordsPerBatch, timeZoneId, Option.empty ) val out = new ByteArrayOutputStream() val batchWriter = new ArrowBatchStreamWriter(schema, out, timeZoneId) batchWriter.writeBatches(batches) batchWriter.end() out.toByteArray() } /** * Maps Iterator from InternalRow to serialized ArrowRecordBatches. Limit ArrowRecordBatch size * in a batch by setting maxRecordsPerBatch or use 0 to fully consume rowIter. */ def toBatchIterator( rowIter: Iterator[Row], schema: StructType, maxRecordsPerBatch: Int, timeZoneId: String, context: Option[TaskContext]): Iterator[Array[Byte]] = { val arrowSchema = org.apache.spark.sql.util.__THIS_IS_THE_ROAD_TO_CLOWNTOWN__ArrowUtils.toArrowSchema(schema, timeZoneId) val allocator = org.apache.spark.sql.util.__THIS_IS_THE_ROAD_TO_CLOWNTOWN__ArrowUtils.rootAllocator.newChildAllocator("toBatchIterator", 0, Long.MaxValue) val root = VectorSchemaRoot.create(arrowSchema, allocator) val unloader = new VectorUnloader(root) val arrowWriter = ArrowWriter.create(root) if (!context.isEmpty) { context.get.addTaskCompletionListener(new TaskCompletionListener { override def onTaskCompletion(context: TaskContext): Unit = { root.close() allocator.close() } }) } val toRow = RowEncoder(schema).createSerializer new Iterator[Array[Byte]] { override def hasNext: Boolean = rowIter.hasNext || { root.close() allocator.close() false } override def next(): Array[Byte] = { val out = new ByteArrayOutputStream() val writeChannel = new WriteChannel(Channels.newChannel(out)) tryWithSafeFinally { var rowCount = 0 while (rowIter.hasNext && (maxRecordsPerBatch <= 0 || rowCount < maxRecordsPerBatch)) { val row: Row = rowIter.next() val internalRow: InternalRow = toRow(row) arrowWriter.write(internalRow) rowCount += 1 } arrowWriter.finish() val batch = unloader.getRecordBatch() MessageSerializer.serialize(writeChannel, batch) batch.close() } { arrowWriter.reset() } out.toByteArray } } } def fromPayloadIterator( payloadIter: Iterator[Array[Byte]], context: TaskContext): ArrowRowIterator = { fromPayloadIterator(payloadIter, Option(context)) } /** * Maps Iterator from ArrowPayload to Row. Returns a pair containing the row iterator * and the schema from the first batch of Arrow data read. */ def fromPayloadIterator( payloadIter: Iterator[Array[Byte]], context: Option[TaskContext]): ArrowRowIterator = { val allocator = org.apache.spark.sql.util.__THIS_IS_THE_ROAD_TO_CLOWNTOWN__ArrowUtils.rootAllocator.newChildAllocator("fromPayloadIterator", 0, Long.MaxValue) new ArrowRowIterator { private var reader: ArrowStreamReader = null private var rowIter = if (payloadIter.hasNext) nextBatch() else Iterator.empty if (!context.isEmpty) { context.get.addTaskCompletionListener(new TaskCompletionListener { override def onTaskCompletion(context: TaskContext): Unit = { closeReader() allocator.close() } }) } override def hasNext: Boolean = rowIter.hasNext || { closeReader() if (payloadIter.hasNext) { rowIter = nextBatch() true } else { allocator.close() false } } override def next(): InternalRow = rowIter.next() private def closeReader(): Unit = { if (reader != null) { reader.close() reader = null } } private def nextBatch(): Iterator[InternalRow] = { val in = new ByteArrayReadableSeekableByteChannel(payloadIter.next()) reader = new ArrowStreamReader(in, allocator) reader.loadNextBatch() // throws IOException val root = reader.getVectorSchemaRoot // throws IOException val columns = root.getFieldVectors.asScala.map { vector => new ArrowColumnVector(vector).asInstanceOf[ColumnVector] }.toArray val batch = new ColumnarBatch(columns) batch.setNumRows(root.getRowCount) batch.rowIterator().asScala } } } } object ArrowConverters { def fromPayloadArray( records: Array[Array[Byte]], schema: StructType): Iterator[Row] = { val context = TaskContext.get() val singleRecords: Iterator[Array[Byte]] = records.map(record => {record}).iterator val iter: ArrowRowIterator = (new ArrowConvertersImpl()).fromPayloadIterator(singleRecords, Option.empty) iter.map({ val converter = CatalystTypeConverters.createToScalaConverter(schema) converter(_).asInstanceOf[Row] }) } def toArrowDataset( df: DataFrame, sparkSession: SparkSession, timeZoneId: String): Dataset[Array[Byte]] = { val schema = df.schema val maxRecordsPerBatch = sparkSession.sessionState.conf.arrowMaxRecordsPerBatch val encoder = org.apache.spark.sql.Encoders.BINARY df.mapPartitions( iter => (new ArrowConvertersImpl()).toBatchIterator(iter, schema, maxRecordsPerBatch, timeZoneId, TaskContext.get()) )(encoder) } def toArrowStream( df: DataFrame, timeZoneId: String, batchIter: Iterator[Array[Byte]]) : Array[Byte] = { val out = new ByteArrayOutputStream() val batchWriter = new ArrowBatchStreamWriter(df.schema, out, timeZoneId) batchWriter.writeOneBatch(batchIter) batchWriter.end() out.toByteArray() } def toArrowBatchRdd( df: DataFrame, sparkSession: SparkSession, timeZoneId: String): Array[Byte] = { val batches: Array[Array[Byte]] = toArrowDataset(df, sparkSession, timeZoneId).collect() val out = new ByteArrayOutputStream() val batchWriter = new ArrowBatchStreamWriter(df.schema, out, timeZoneId) batchWriter.writeBatches(batches.iterator) batchWriter.end() out.toByteArray() } def toDataFrame( payloadRDD: JavaRDD[Array[Byte]], schema: StructType, sparkSession: SparkSession): DataFrame = { toDataFrame(payloadRDD.rdd, schema, sparkSession) } def toDataFrame( payloadRDD: RDD[Array[Byte]], schema: StructType, sparkSession: SparkSession): DataFrame = { val rdd = payloadRDD.mapPartitions { iter => val converters = new ArrowConvertersImpl() val context = TaskContext.get() converters.fromPayloadIterator(iter, context) } val logger = new Logger("Arrow", 0) val invoke = new Invoke() var streaming: Boolean = false invoke.invoke( sparkSession.sqlContext.getClass, "", sparkSession.sqlContext, "internalCreateDataFrame", Array(rdd, schema, streaming.asInstanceOf[Object]), logger ).asInstanceOf[DataFrame] } }
rstudio/sparklyr
java/spark-3.0.0/arrowconverters.scala
Scala
apache-2.0
9,606
/** * Intel Intrinsics for Lightweight Modular Staging Framework * https://github.com/ivtoskov/lms-intrinsics * Department of Computer Science, ETH Zurich, Switzerland * __ _ __ _ _ * / /____ ___ _____ (_)____ / /_ _____ (_)____ _____ (_)_____ _____ * / // __ `__ \\ / ___/______ / // __ \\ / __// ___// // __ \\ / ___// // ___// ___/ * / // / / / / /(__ )/_____// // / / // /_ / / / // / / /(__ )/ // /__ (__ ) * /_//_/ /_/ /_//____/ /_//_/ /_/ \\__//_/ /_//_/ /_//____//_/ \\___//____/ * * Copyright (C) 2017 Ivaylo Toskov (itoskov@ethz.ch) * Alen Stojanov (astojanov@inf.ethz.ch) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.ethz.acl.intrinsics import ch.ethz.acl.intrinsics.MicroArchType._ import ch.ethz.acl.passera.unsigned.{UByte, UInt, ULong, UShort} import scala.reflect.SourceContext import scala.language.higherKinds trait SSE200 extends IntrinsicsBase { /** * Store 64-bit integer "a" into memory using a non-temporal hint to minimize * cache pollution. If the cache line containing address "mem_addr" is already in * the cache, the cache will be updated. * mem_addr: __int64*, a: __int64, mem_addrOffset: int */ case class MM_STREAM_SI64[A[_], U:Integral](mem_addr: Exp[A[Long]], a: Exp[Long], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave 8-bit integers from the low half of "a" and "b", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_UNPACKLO_EPI8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed unsigned 8-bit integers in "a" and "b", and store packed * minimum values in "dst". * a: __m128i, b: __m128i */ case class MM_MIN_EPU8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.SpecialMathFunctions) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed double-precision (64-bit) floating-point elements in "a" and * "b", and store packed minimum values in "dst". * a: __m128d, b: __m128d */ case class MM_MIN_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.SpecialMathFunctions) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Move the lower double-precision (64-bit) floating-point element from "b" to * the lower element of "dst", and copy the upper element from "a" to the upper * element of "dst". * a: __m128d, b: __m128d */ case class MM_MOVE_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Move) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point element in "a" and * "b" for greater-than-or-equal, and return the boolean result (0 or 1). This * instruction will not signal an exception for QNaNs. * a: __m128d, b: __m128d */ case class MM_UCOMIGE_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy double-precision (64-bit) floating-point element "a" to the lower element * of "dst", and zero the upper element. * a: double */ case class MM_SET_SD(a: Exp[Double]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point element in "a" and * "b" for not-equal, and return the boolean result (0 or 1). This instruction * will not signal an exception for QNaNs. * a: __m128d, b: __m128d */ case class MM_UCOMINEQ_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift "a" right by "imm8" bytes while shifting in zeros, and store the results * in "dst". * a: __m128i, imm8: int */ case class MM_BSRLI_SI128(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed double-precision (64-bit) floating-point elements in "a" to * packed single-precision (32-bit) floating-point elements, and store the * results in "dst". * a: __m128d */ case class MM_CVTPD_PS(a: Exp[__m128d]) extends IntrinsicsDef[__m128] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the lower double-precision (64-bit) floating-point element in "a" to a * 32-bit integer, and store the result in "dst". * a: __m128d */ case class MM_CVTSD_SI32(a: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy 64-bit integer "a" to the lower element of "dst", and zero the upper * element. * a: __int64 */ case class MM_CVTSI64X_SI128(a: Exp[Long]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 16-bit integers in "a" right by "count" while shifting in sign * bits, and store the results in "dst". * a: __m128i, count: __m128i */ case class MM_SRA_EPI16(a: Exp[__m128i], count: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 8-bit integers in "a" and "b" for less-than, and store the * results in "dst". Note: This intrinsic emits the pcmpgtb instruction with the * order of the operands switched. * a: __m128i, b: __m128i */ case class MM_CMPLT_EPI8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store 128-bits of integer data from "a" into memory. * "mem_addr" must be * aligned on a 16-byte boundary or a general-protection exception may be * generated. * mem_addr: __m128i*, a: __m128i, mem_addrOffset: int */ case class MM_STORE_SI128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], a: Exp[__m128i], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Invalidate and flush the cache line that contains "p" from all levels of the * cache hierarchy. * p: void const*, pOffset: int */ case class MM_CLFLUSH[A[_], T:Typ, U:Integral](p: Exp[A[T]], pOffset: Exp[U])(implicit val cont: Container[A]) extends VoidPointerIntrinsicsDef[T, U, Unit] { val category = List(IntrinsicsCategory.GeneralSupport) val intrinsicType = List() val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 32-bit integers in "a" right by "count" while shifting in sign * bits, and store the results in "dst". * a: __m128i, count: __m128i */ case class MM_SRA_EPI32(a: Exp[__m128i], count: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 32-bit integers in "a" right by "imm8" while shifting in sign * bits, and store the results in "dst". * a: __m128i, imm8: int */ case class MM_SRAI_EPI32(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift "a" right by "imm8" bytes while shifting in zeros, and store the results * in "dst". * a: __m128i, imm8: int */ case class MM_SRLI_SI128(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Broadcast double-precision (64-bit) floating-point value "a" to all elements * of "dst". * a: double */ case class MM_SET_PD1(a: Exp[Double]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point element in "a" and * "b" for not-equal, and return the boolean result (0 or 1). * a: __m128d, b: __m128d */ case class MM_COMINEQ_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b" for not-less-than, store the result in the lower element of "dst", and * copy the upper element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_CMPNLT_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower 32-bit integer in "a" to "dst". * a: __m512i */ case class MM512_CVTSI512_SI32(a: Exp[__m512i]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "immintrin.h" } /** * Convert packed single-precision (32-bit) floating-point elements in "a" to * packed 32-bit integers with truncation, and store the results in "dst". * a: __m128 */ case class MM_CVTTPS_EPI32(a: Exp[__m128]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Broadcast 64-bit integer "a" to all elements of "dst". * a: __m64 */ case class MM_SET1_EPI64(a: Exp[__m64]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the lower double-precision (64-bit) floating-point element in "a" to a * 64-bit integer, and store the result in "dst". * a: __m128d */ case class MM_CVTSD_SI64X(a: Exp[__m128d]) extends IntrinsicsDef[Long] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 16-bit integers in "a" left by "count" while shifting in zeros, * and store the results in "dst". * a: __m128i, count: __m128i */ case class MM_SLL_EPI16(a: Exp[__m128i], count: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the bitwise OR of packed double-precision (64-bit) floating-point * elements in "a" and "b", and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_OR_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Logical) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Load 128-bits of integer data from memory into "dst". * "mem_addr" must be * aligned on a 16-byte boundary or a general-protection exception may be * generated. * mem_addr: __m128i const*, mem_addrOffset: int */ case class MM_LOAD_SI128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, __m128i] { val category = List(IntrinsicsCategory.Load) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Add packed 32-bit integers in "a" and "b", and store the results in "dst". * a: __m128i, b: __m128i */ case class MM_ADD_EPI32(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Subtract packed double-precision (64-bit) floating-point elements in "b" from * packed double-precision (64-bit) floating-point elements in "a", and store the * results in "dst". * a: __m128d, b: __m128d */ case class MM_SUB_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave double-precision (64-bit) floating-point elements from * the high half of "a" and "b", and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_UNPACKHI_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Multiply the packed 16-bit integers in "a" and "b", producing intermediate * 32-bit integers, and store the low 16 bits of the intermediate integers in * "dst". * a: __m128i, b: __m128i */ case class MM_MULLO_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Set packed double-precision (64-bit) floating-point elements in "dst" with the * supplied values. * e1: double, e0: double */ case class MM_SET_PD(e1: Exp[Double], e0: Exp[Double]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed double-precision (64-bit) floating-point elements in "a" and * "b" for equality, and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_CMPEQ_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 8-bit integers in "a" and "b" for greater-than, and store the * results in "dst". * a: __m128i, b: __m128i */ case class MM_CMPGT_EPI8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the bitwise AND of 128 bits (representing integer data) in "a" and * "b", and store the result in "dst". * a: __m128i, b: __m128i */ case class MM_AND_SI128(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Logical) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b" for equality, store the result in the lower element of "dst", and copy the * upper element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_CMPEQ_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Load 64-bit integer from memory into the first element of "dst". * mem_addr: __m128i const*, mem_addrOffset: int */ case class MM_LOADL_EPI64[A[_], U:Integral](mem_addr: Exp[A[__m128i]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, __m128i] { val category = List(IntrinsicsCategory.Load) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 16-bit integers in "a" right by "imm8" while shifting in sign * bits, and store the results in "dst". * a: __m128i, imm8: int */ case class MM_SRAI_EPI16(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed 16-bit integers from "a" and "b" to packed 8-bit integers using * signed saturation, and store the results in "dst". * a: __m128i, b: __m128i */ case class MM_PACKS_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Miscellaneous) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 32-bit integers in "a" and "b" for less-than, and store the * results in "dst". Note: This intrinsic emits the pcmpgtd instruction with the * order of the operands switched. * a: __m128i, b: __m128i */ case class MM_CMPLT_EPI32(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the bitwise OR of 128 bits (representing integer data) in "a" and "b", * and store the result in "dst". * a: __m128i, b: __m128i */ case class MM_OR_SI128(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Logical) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the square root of packed double-precision (64-bit) floating-point * elements in "a", and store the results in "dst". * a: __m128d */ case class MM_SQRT_PD(a: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.ElementaryMathFunctions) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b" for not-equal, store the result in the lower element of "dst", and copy * the upper element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_CMPNEQ_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Broadcast double-precision (64-bit) floating-point value "a" to all elements * of "dst". * a: double */ case class MM_SET1_PD(a: Exp[Double]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Subtract packed 64-bit integers in "b" from packed 64-bit integers in "a", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_SUB_EPI64(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave 32-bit integers from the low half of "a" and "b", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_UNPACKLO_EPI32(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Set packed double-precision (64-bit) floating-point elements in "dst" with the * supplied values in reverse order. * e1: double, e0: double */ case class MM_SETR_PD(e1: Exp[Double], e0: Exp[Double]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store the lower double-precision (64-bit) floating-point element from "a" into * memory. * mem_addr: double*, a: __m128d, mem_addrOffset: int */ case class MM_STOREL_PD[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Cast vector of type __m128 to type __m128i. This intrinsic is only used for * compilation and does not generate any instructions, thus it has zero latency. * a: __m128 */ case class MM_CASTPS_SI128(a: Exp[__m128]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Cast) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store the lower double-precision (64-bit) floating-point element from "a" into * 2 contiguous elements in memory. "mem_addr" must be aligned on a 16-byte * boundary or a general-protection exception may be generated. * mem_addr: double*, a: __m128d, mem_addrOffset: int */ case class MM_STORE_PD1[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the bitwise NOT of 128 bits (representing integer data) in "a" and * then AND with "b", and store the result in "dst". * a: __m128i, b: __m128i */ case class MM_ANDNOT_SI128(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Logical) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Broadcast 8-bit integer "a" to all elements of "dst". This intrinsic may * generate "vpbroadcastb". * a: char */ case class MM_SET1_EPI8(a: Exp[Byte]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store 2 double-precision (64-bit) floating-point elements from "a" into memory * in reverse order. * "mem_addr" must be aligned on a 16-byte boundary or a * general-protection exception may be generated. * mem_addr: double*, a: __m128d, mem_addrOffset: int */ case class MM_STORER_PD[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Subtract the lower double-precision (64-bit) floating-point element in "b" * from the lower double-precision (64-bit) floating-point element in "a", store * the result in the lower element of "dst", and copy the upper element from "a" * to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_SUB_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Multiply packed double-precision (64-bit) floating-point elements in "a" and * "b", and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_MUL_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed double-precision (64-bit) floating-point elements in "a" and * "b" for greater-than-or-equal, and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_CMPGE_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 32-bit integers in "a" right by "count" while shifting in zeros, * and store the results in "dst". * a: __m128i, count: __m128i */ case class MM_SRL_EPI32(a: Exp[__m128i], count: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point element in "a" and * "b" for less-than, and return the boolean result (0 or 1). * a: __m128d, b: __m128d */ case class MM_COMILT_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave double-precision (64-bit) floating-point elements from * the low half of "a" and "b", and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_UNPACKLO_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shuffle 32-bit integers in "a" using the control in "imm8", and store the * results in "dst". * a: __m128i, imm8: int */ case class MM_SHUFFLE_EPI32(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 64-bit integers in "a" right by "count" while shifting in zeros, * and store the results in "dst". * a: __m128i, count: __m128i */ case class MM_SRL_EPI64(a: Exp[__m128i], count: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 16-bit integers in "a" right by "count" while shifting in zeros, * and store the results in "dst". * a: __m128i, count: __m128i */ case class MM_SRL_EPI16(a: Exp[__m128i], count: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed double-precision (64-bit) floating-point elements in "a" to * packed 32-bit integers, and store the results in "dst". * a: __m128d */ case class MM_CVTPD_PI32(a: Exp[__m128d]) extends IntrinsicsDef[__m64] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point element in "a" and * "b" for greater-than, and return the boolean result (0 or 1). * a: __m128d, b: __m128d */ case class MM_COMIGT_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shuffle 16-bit integers in the low 64 bits of "a" using the control in "imm8". * Store the results in the low 64 bits of "dst", with the high 64 bits being * copied from from "a" to "dst". * a: __m128i, imm8: int */ case class MM_SHUFFLELO_EPI16(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 16-bit integers in "a" and "b" for equality, and store the * results in "dst". * a: __m128i, b: __m128i */ case class MM_CMPEQ_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower 64-bit integer in "a" to the lower element of "dst", and zero * the upper element. * a: __m128i */ case class MM_MOVE_EPI64(a: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Move) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Load 128-bits (composed of 2 packed double-precision (64-bit) floating-point * elements) from memory into "dst". * "mem_addr" must be aligned on a 16-byte * boundary or a general-protection exception may be generated. * mem_addr: double const*, mem_addrOffset: int */ case class MM_LOAD_PD[A[_], U:Integral](mem_addr: Exp[A[Double]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, __m128d] { val category = List(IntrinsicsCategory.Load) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Add packed unsigned 16-bit integers in "a" and "b" using saturation, and store * the results in "dst". * a: __m128i, b: __m128i */ case class MM_ADDS_EPU16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave 32-bit integers from the high half of "a" and "b", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_UNPACKHI_EPI32(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower 64-bit integer in "a" to "dst". * a: __m128i */ case class MM_MOVEPI64_PI64(a: Exp[__m128i]) extends IntrinsicsDef[__m64] { val category = List(IntrinsicsCategory.Miscellaneous) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed double-precision (64-bit) floating-point elements in "a" to * packed 32-bit integers with truncation, and store the results in "dst". * a: __m128d */ case class MM_CVTTPD_EPI32(a: Exp[__m128d]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave 64-bit integers from the high half of "a" and "b", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_UNPACKHI_EPI64(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 32-bit integers in "a" and "b" for equality, and store the * results in "dst". * a: __m128i, b: __m128i */ case class MM_CMPEQ_EPI32(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed single-precision (32-bit) floating-point elements in "a" to * packed 32-bit integers, and store the results in "dst". * a: __m128 */ case class MM_CVTPS_EPI32(a: Exp[__m128]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the 64-bit integer "b" to a double-precision (64-bit) floating-point * element, store the result in the lower element of "dst", and copy the upper * element from "a" to the upper element of "dst". * a: __m128d, b: __int64 */ case class MM_CVTSI64_SD(a: Exp[__m128d], b: Exp[Long]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 32-bit integers in "a" left by "imm8" while shifting in zeros, * and store the results in "dst". * a: __m128i, imm8: int */ case class MM_SLLI_EPI32(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower 64-bit integer in "a" to "dst". * a: __m128i */ case class MM_CVTSI128_SI64(a: Exp[__m128i]) extends IntrinsicsDef[Long] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Provide a hint to the processor that the code sequence is a spin-wait loop. * This can help improve the performance and power consumption of spin-wait * loops. */ case class MM_PAUSE() extends IntrinsicsDef[Unit] { val category = List(IntrinsicsCategory.GeneralSupport) val intrinsicType = List() val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower double-precision (64-bit) floating-point element of "a" to * "dst". * a: __m512d */ case class MM512_CVTSD_F64(a: Exp[__m512d]) extends IntrinsicsDef[Double] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "immintrin.h" } /** * Cast vector of type __m128 to type __m128d. This intrinsic is only used for * compilation and does not generate any instructions, thus it has zero latency. * a: __m128 */ case class MM_CASTPS_PD(a: Exp[__m128]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Cast) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Cast vector of type __m128i to type __m128d. This intrinsic is only used for * compilation and does not generate any instructions, thus it has zero latency. * a: __m128i */ case class MM_CASTSI128_PD(a: Exp[__m128i]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Cast) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed double-precision (64-bit) floating-point elements in "a" and * "b" for less-than, and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_CMPLT_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 32-bit integers in "a" right by "imm8" while shifting in zeros, * and store the results in "dst". * a: __m128i, imm8: int */ case class MM_SRLI_EPI32(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point element in "a" and * "b" for less-than-or-equal, and return the boolean result (0 or 1). This * instruction will not signal an exception for QNaNs. * a: __m128d, b: __m128d */ case class MM_UCOMILE_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the 32-bit integer "b" to a double-precision (64-bit) floating-point * element, store the result in the lower element of "dst", and copy the upper * element from "a" to the upper element of "dst". * a: __m128d, b: int */ case class MM_CVTSI32_SD(a: Exp[__m128d], b: Exp[Int]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the bitwise XOR of packed double-precision (64-bit) floating-point * elements in "a" and "b", and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_XOR_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Logical) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Multiply the low unsigned 32-bit integers from each packed 64-bit element in * "a" and "b", and store the unsigned 64-bit results in "dst". * a: __m128i, b: __m128i */ case class MM_MUL_EPU32(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the bitwise AND of packed double-precision (64-bit) floating-point * elements in "a" and "b", and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_AND_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Logical) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Average packed unsigned 16-bit integers in "a" and "b", and store the results * in "dst". * a: __m128i, b: __m128i */ case class MM_AVG_EPU16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.ProbabilityStatistics) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shuffle double-precision (64-bit) floating-point elements using the control in * "imm8", and store the results in "dst". * a: __m128d, b: __m128d, imm8: int */ case class MM_SHUFFLE_PD(a: Exp[__m128d], b: Exp[__m128d], imm8: Exp[Int]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed double-precision (64-bit) floating-point elements in "a" to * packed 32-bit integers, and store the results in "dst". * a: __m128d */ case class MM_CVTPD_EPI32(a: Exp[__m128d]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Multiply packed signed 16-bit integers in "a" and "b", producing intermediate * signed 32-bit integers. Horizontally add adjacent pairs of intermediate 32-bit * integers, and pack the results in "dst". * a: __m128i, b: __m128i */ case class MM_MADD_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Set packed 64-bit integers in "dst" with the supplied values in reverse order. * e1: __m64, e0: __m64 */ case class MM_SETR_EPI64(e1: Exp[__m64], e0: Exp[__m64]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 16-bit integers in "a" and "b", and store packed minimum values * in "dst". * a: __m128i, b: __m128i */ case class MM_MIN_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.SpecialMathFunctions) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the 64-bit integer "a" to the lower element of "dst", and zero the upper * element. * a: __m64 */ case class MM_MOVPI64_EPI64(a: Exp[__m64]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Move) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower 32-bit integer in "a" to "dst". * a: __m128i */ case class MM_CVTSI128_SI32(a: Exp[__m128i]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Multiply the packed 16-bit integers in "a" and "b", producing intermediate * 32-bit integers, and store the high 16 bits of the intermediate integers in * "dst". * a: __m128i, b: __m128i */ case class MM_MULHI_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower 64-bit integer in "a" to "dst". * a: __m128i */ case class MM_CVTSI128_SI64X(a: Exp[__m128i]) extends IntrinsicsDef[Long] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 16-bit integers in "a" and "b" for less-than, and store the * results in "dst". Note: This intrinsic emits the pcmpgtw instruction with the * order of the operands switched. * a: __m128i, b: __m128i */ case class MM_CMPLT_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the lower double-precision (64-bit) floating-point element in "b" to a * single-precision (32-bit) floating-point element, store the result in the * lower element of "dst", and copy the upper element from "a" to the upper * element of "dst". * a: __m128, b: __m128d */ case class MM_CVTSD_SS(a: Exp[__m128], b: Exp[__m128d]) extends IntrinsicsDef[__m128] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Subtract packed 16-bit integers in "b" from packed 16-bit integers in "a", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_SUB_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower double-precision (64-bit) floating-point element of "a" to * "dst". * a: __m128d */ case class MM_CVTSD_F64(a: Exp[__m128d]) extends IntrinsicsDef[Double] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b" for not-greater-than, store the result in the lower element of "dst", and * copy the upper element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_CMPNGT_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Add packed 64-bit integers in "a" and "b", and store the results in "dst". * a: __m128i, b: __m128i */ case class MM_ADD_EPI64(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Conditionally store 8-bit integer elements from "a" into memory using "mask" * (elements are not stored when the highest bit is not set in the corresponding * element) and a non-temporal memory hint. "mem_addr" does not need to be * aligned on any particular boundary. * a: __m128i, mask: __m128i, mem_addr: char*, mem_addrOffset: int */ case class MM_MASKMOVEU_SI128[A[_], U:Integral](a: Exp[__m128i], mask: Exp[__m128i], mem_addr: Exp[A[Byte]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b", store the minimum value in the lower element of "dst", and copy the upper * element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_MIN_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.SpecialMathFunctions) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Set packed 8-bit integers in "dst" with the supplied values in reverse order. * e15: char, e14: char, e13: char, e12: char, e11: char, e10: char, e9: char, e8: char, e7: char, e6: char, e5: char, e4: char, e3: char, e2: char, e1: char, e0: char */ case class MM_SETR_EPI8(e15: Exp[Byte], e14: Exp[Byte], e13: Exp[Byte], e12: Exp[Byte], e11: Exp[Byte], e10: Exp[Byte], e9: Exp[Byte], e8: Exp[Byte], e7: Exp[Byte], e6: Exp[Byte], e5: Exp[Byte], e4: Exp[Byte], e3: Exp[Byte], e2: Exp[Byte], e1: Exp[Byte], e0: Exp[Byte]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy "a" to "dst", and insert the 16-bit integer "i" into "dst" at the * location specified by "imm8". * a: __m128i, i: int, imm8: int */ case class MM_INSERT_EPI16(a: Exp[__m128i], i: Exp[Int], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave 16-bit integers from the low half of "a" and "b", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_UNPACKLO_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b" for greater-than-or-equal, store the result in the lower element of "dst", * and copy the upper element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_CMPGE_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Load a double-precision (64-bit) floating-point element from memory into the * lower of "dst", and zero the upper element. "mem_addr" does not need to be * aligned on any particular boundary. * mem_addr: double const*, mem_addrOffset: int */ case class MM_LOAD_SD[A[_], U:Integral](mem_addr: Exp[A[Double]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, __m128d] { val category = List(IntrinsicsCategory.Load) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Subtract packed unsigned 8-bit integers in "b" from packed unsigned 8-bit * integers in "a" using saturation, and store the results in "dst". * a: __m128i, b: __m128i */ case class MM_SUBS_EPU8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store 64-bit integer from the first element of "a" into memory. * mem_addr: __m128i*, a: __m128i, mem_addrOffset: int */ case class MM_STOREL_EPI64[A[_], U:Integral](mem_addr: Exp[A[__m128i]], a: Exp[__m128i], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed double-precision (64-bit) floating-point elements in "a" and * "b" for greater-than, and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_CMPGT_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 64-bit integers in "a" left by "imm8" while shifting in zeros, * and store the results in "dst". * a: __m128i, imm8: int */ case class MM_SLLI_EPI64(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift "a" left by "imm8" bytes while shifting in zeros, and store the results * in "dst". * a: __m128i, imm8: int */ case class MM_BSLLI_SI128(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Divide the lower double-precision (64-bit) floating-point element in "a" by * the lower double-precision (64-bit) floating-point element in "b", store the * result in the lower element of "dst", and copy the upper element from "a" to * the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_DIV_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Subtract packed 8-bit integers in "b" from packed 8-bit integers in "a", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_SUB_EPI8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Set packed 16-bit integers in "dst" with the supplied values. * e7: short, e6: short, e5: short, e4: short, e3: short, e2: short, e1: short, e0: short */ case class MM_SET_EPI16(e7: Exp[Short], e6: Exp[Short], e5: Exp[Short], e4: Exp[Short], e3: Exp[Short], e2: Exp[Short], e1: Exp[Short], e0: Exp[Short]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Broadcast 64-bit integer "a" to all elements of "dst". This intrinsic may * generate the "vpbroadcastq". * a: __int64 */ case class MM_SET1_EPI64X(a: Exp[Long]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Load 128-bits of integer data from memory into "dst". * "mem_addr" does not * need to be aligned on any particular boundary. * mem_addr: __m128i const*, mem_addrOffset: int */ case class MM_LOADU_SI128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, __m128i] { val category = List(IntrinsicsCategory.Load) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the lower double-precision (64-bit) floating-point element in "a" to a * 32-bit integer with truncation, and store the result in "dst". * a: __m128d */ case class MM_CVTTSD_SI32(a: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift "a" left by "imm8" bytes while shifting in zeros, and store the results * in "dst". * a: __m128i, imm8: int */ case class MM_SLLI_SI128(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 16-bit integers in "a" left by "imm8" while shifting in zeros, * and store the results in "dst". * a: __m128i, imm8: int */ case class MM_SLLI_EPI16(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b" to see if either is NaN, store the result in the lower element of "dst", * and copy the upper element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_CMPUNORD_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Return vector of type __m128d with undefined elements. */ case class MM_UNDEFINED_PD() extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.GeneralSupport) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "immintrin.h" } /** * Load 128-bits (composed of 2 packed double-precision (64-bit) floating-point * elements) from memory into "dst". * "mem_addr" does not need to be aligned on * any particular boundary. * mem_addr: double const*, mem_addrOffset: int */ case class MM_LOADU_PD[A[_], U:Integral](mem_addr: Exp[A[Double]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, __m128d] { val category = List(IntrinsicsCategory.Load) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the lower single-precision (32-bit) floating-point element in "b" to a * double-precision (64-bit) floating-point element, store the result in the * lower element of "dst", and copy the upper element from "a" to the upper * element of "dst". * a: __m128d, b: __m128 */ case class MM_CVTSS_SD(a: Exp[__m128d], b: Exp[__m128]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Add packed 16-bit integers in "a" and "b" using saturation, and store the * results in "dst". * a: __m128i, b: __m128i */ case class MM_ADDS_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Cast vector of type __m128d to type __m128i. This intrinsic is only used for * compilation and does not generate any instructions, thus it has zero latency. * a: __m128d */ case class MM_CASTPD_SI128(a: Exp[__m128d]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Cast) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave 16-bit integers from the high half of "a" and "b", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_UNPACKHI_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b" for greater-than, store the result in the lower element of "dst", and copy * the upper element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_CMPGT_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the lower double-precision (64-bit) floating-point element in "a" to a * 64-bit integer with truncation, and store the result in "dst". * a: __m128d */ case class MM_CVTTSD_SI64X(a: Exp[__m128d]) extends IntrinsicsDef[Long] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point elements in "a" and * "b" for less-than, store the result in the lower element of "dst", and copy * the upper element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_CMPLT_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower double-precision (64-bit) floating-point element of "a" to * "dst". * a: __m256d */ case class MM256_CVTSD_F64(a: Exp[__m256d]) extends IntrinsicsDef[Double] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "immintrin.h" } /** * Add packed 8-bit integers in "a" and "b", and store the results in "dst". * a: __m128i, b: __m128i */ case class MM_ADD_EPI8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 32-bit integers in "a" and "b" for greater-than, and store the * results in "dst". * a: __m128i, b: __m128i */ case class MM_CMPGT_EPI32(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave 64-bit integers from the low half of "a" and "b", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_UNPACKLO_EPI64(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Load a double-precision (64-bit) floating-point element from memory into the * lower element of "dst", and copy the upper element from "a" to "dst". * "mem_addr" does not need to be aligned on any particular boundary. * a: __m128d, mem_addr: double const*, mem_addrOffset: int */ case class MM_LOADL_PD[A[_], U:Integral](a: Exp[__m128d], mem_addr: Exp[A[Double]], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, __m128d] { val category = List(IntrinsicsCategory.Load) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Set packed 16-bit integers in "dst" with the supplied values in reverse order. * e7: short, e6: short, e5: short, e4: short, e3: short, e2: short, e1: short, e0: short */ case class MM_SETR_EPI16(e7: Exp[Short], e6: Exp[Short], e5: Exp[Short], e4: Exp[Short], e3: Exp[Short], e2: Exp[Short], e1: Exp[Short], e0: Exp[Short]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Subtract packed 16-bit integers in "b" from packed 16-bit integers in "a" * using saturation, and store the results in "dst". * a: __m128i, b: __m128i */ case class MM_SUBS_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Multiply the lower double-precision (64-bit) floating-point element in "a" and * "b", store the result in the lower element of "dst", and copy the upper * element from "a" to the upper element of "dst". * a: __m128d, b: __m128d */ case class MM_MUL_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point * elements) from "a" into memory using a non-temporal memory hint. * "mem_addr" * must be aligned on a 16-byte boundary or a general-protection exception may be * generated. * mem_addr: double*, a: __m128d, mem_addrOffset: int */ case class MM_STREAM_PD[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the lower double-precision (64-bit) floating-point element in "a" to a * 64-bit integer with truncation, and store the result in "dst". * a: __m128d */ case class MM_CVTTSD_SI64(a: Exp[__m128d]) extends IntrinsicsDef[Long] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Create mask from the most significant bit of each 8-bit element in "a", and * store the result in "dst". * a: __m128i */ case class MM_MOVEMASK_EPI8(a: Exp[__m128i]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Miscellaneous) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed 32-bit integers in "a" to packed double-precision (64-bit) * floating-point elements, and store the results in "dst". * a: __m64 */ case class MM_CVTPI32_PD(a: Exp[__m64]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store 128-bits (composed of 2 packed double-precision (64-bit) floating-point * elements) from "a" into memory. * "mem_addr" must be aligned on a 16-byte * boundary or a general-protection exception may be generated. * mem_addr: double*, a: __m128d, mem_addrOffset: int */ case class MM_STORE_PD[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Unpack and interleave 8-bit integers from the high half of "a" and "b", and * store the results in "dst". * a: __m128i, b: __m128i */ case class MM_UNPACKHI_EPI8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the bitwise XOR of 128 bits (representing integer data) in "a" and * "b", and store the result in "dst". * a: __m128i, b: __m128i */ case class MM_XOR_SI128(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Logical) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed 32-bit integers in "a" to packed single-precision (32-bit) * floating-point elements, and store the results in "dst". * a: __m128i */ case class MM_CVTEPI32_PS(a: Exp[__m128i]) extends IntrinsicsDef[__m128] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert the lower double-precision (64-bit) floating-point element in "a" to a * 64-bit integer, and store the result in "dst". * a: __m128d */ case class MM_CVTSD_SI64(a: Exp[__m128d]) extends IntrinsicsDef[Long] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Perform a serializing operation on all load-from-memory instructions that were * issued prior to this instruction. Guarantees that every load instruction that * precedes, in program order, is globally visible before any load instruction * which follows the fence in program order. */ case class MM_LFENCE() extends IntrinsicsDef[Unit] { val category = List(IntrinsicsCategory.GeneralSupport) val intrinsicType = List() val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Copy the lower 32-bit integer in "a" to "dst". * a: __m256i */ case class MM256_CVTSI256_SI32(a: Exp[__m256i]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "immintrin.h" } /** * Convert packed 16-bit integers from "a" and "b" to packed 8-bit integers using * unsigned saturation, and store the results in "dst". * a: __m128i, b: __m128i */ case class MM_PACKUS_EPI16(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Miscellaneous) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store 128-bits of integer data from "a" into memory. * "mem_addr" does not need * to be aligned on any particular boundary. * mem_addr: __m128i*, a: __m128i, mem_addrOffset: int */ case class MM_STOREU_SI128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], a: Exp[__m128i], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed 32-bit integers from "a" and "b" to packed 16-bit integers * using signed saturation, and store the results in "dst". * a: __m128i, b: __m128i */ case class MM_PACKS_EPI32(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Miscellaneous) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed double-precision (64-bit) floating-point elements in "a" and * "b" to see if either is NaN, and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_CMPUNORD_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point element in "a" and * "b" for greater-than-or-equal, and return the boolean result (0 or 1). * a: __m128d, b: __m128d */ case class MM_COMIGE_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store the upper double-precision (64-bit) floating-point element from "a" into * memory. * mem_addr: double*, a: __m128d, mem_addrOffset: int */ case class MM_STOREH_PD[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed double-precision (64-bit) floating-point elements in "a" and * "b" for not-less-than, and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_CMPNLT_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Extract a 16-bit integer from "a", selected with "imm8", and store the result * in the lower element of "dst". * a: __m128i, imm8: int */ case class MM_EXTRACT_EPI16(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Swizzle) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compute the absolute differences of packed unsigned 8-bit integers in "a" and * "b", then horizontally sum each consecutive 8 differences to produce two * unsigned 16-bit integers, and pack these unsigned 16-bit integers in the low * 16 bits of 64-bit elements in "dst". * a: __m128i, b: __m128i */ case class MM_SAD_EPU8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic, IntrinsicsCategory.Miscellaneous) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Add packed 8-bit integers in "a" and "b" using saturation, and store the * results in "dst". * a: __m128i, b: __m128i */ case class MM_ADDS_EPI8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed 8-bit integers in "a" and "b" for equality, and store the * results in "dst". * a: __m128i, b: __m128i */ case class MM_CMPEQ_EPI8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare the lower double-precision (64-bit) floating-point element in "a" and * "b" for less-than, and return the boolean result (0 or 1). This instruction * will not signal an exception for QNaNs. * a: __m128d, b: __m128d */ case class MM_UCOMILT_SD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[Int] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Store 128-bits of integer data from "a" into memory using a non-temporal * memory hint. * "mem_addr" must be aligned on a 16-byte boundary or a * general-protection exception may be generated. * mem_addr: __m128i*, a: __m128i, mem_addrOffset: int */ case class MM_STREAM_SI128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], a: Exp[__m128i], mem_addrOffset: Exp[U])(implicit val cont: Container[A]) extends PointerIntrinsicsDef[U, Unit] { val category = List(IntrinsicsCategory.Store) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Shift packed 64-bit integers in "a" right by "imm8" while shifting in zeros, * and store the results in "dst". * a: __m128i, imm8: int */ case class MM_SRLI_EPI64(a: Exp[__m128i], imm8: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Shift) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Convert packed 32-bit integers in "a" to packed double-precision (64-bit) * floating-point elements, and store the results in "dst". * a: __m128i */ case class MM_CVTEPI32_PD(a: Exp[__m128i]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Convert) val intrinsicType = List(IntrinsicsType.FloatingPoint, IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Add packed unsigned 8-bit integers in "a" and "b" using saturation, and store * the results in "dst". * a: __m128i, b: __m128i */ case class MM_ADDS_EPU8(a: Exp[__m128i], b: Exp[__m128i]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Multiply the low unsigned 32-bit integers from "a" and "b", and store the * unsigned 64-bit result in "dst". * a: __m64, b: __m64 */ case class MM_MUL_SU32(a: Exp[__m64], b: Exp[__m64]) extends IntrinsicsDef[__m64] { val category = List(IntrinsicsCategory.Arithmetic) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Set packed 32-bit integers in "dst" with the supplied values in reverse order. * e3: int, e2: int, e1: int, e0: int */ case class MM_SETR_EPI32(e3: Exp[Int], e2: Exp[Int], e1: Exp[Int], e0: Exp[Int]) extends IntrinsicsDef[__m128i] { val category = List(IntrinsicsCategory.IntrinsicsSet) val intrinsicType = List(IntrinsicsType.Integer) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } /** * Compare packed double-precision (64-bit) floating-point elements in "a" and * "b" for less-than-or-equal, and store the results in "dst". * a: __m128d, b: __m128d */ case class MM_CMPLE_PD(a: Exp[__m128d], b: Exp[__m128d]) extends IntrinsicsDef[__m128d] { val category = List(IntrinsicsCategory.Compare) val intrinsicType = List(IntrinsicsType.FloatingPoint) val performance = Map.empty[MicroArchType, Performance] val header = "emmintrin.h" } def _mm_stream_si64[A[_], U:Integral](mem_addr: Exp[A[Long]], a: Exp[Long], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STREAM_SI64(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_unpacklo_epi8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_UNPACKLO_EPI8(a, b) } def _mm_min_epu8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_MIN_EPU8(a, b) } def _mm_min_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_MIN_PD(a, b) } def _mm_move_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_MOVE_SD(a, b) } def _mm_ucomige_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[Int] = { MM_UCOMIGE_SD(a, b) } def _mm_set_sd(a: Exp[Double]): Exp[__m128d] = { MM_SET_SD(a) } def _mm_ucomineq_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[Int] = { MM_UCOMINEQ_SD(a, b) } def _mm_bsrli_si128(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_BSRLI_SI128(a, imm8) } def _mm_cvtpd_ps(a: Exp[__m128d]): Exp[__m128] = { MM_CVTPD_PS(a) } def _mm_cvtsd_si32(a: Exp[__m128d]): Exp[Int] = { MM_CVTSD_SI32(a) } def _mm_cvtsi64x_si128(a: Exp[Long]): Exp[__m128i] = { MM_CVTSI64X_SI128(a) } def _mm_sra_epi16(a: Exp[__m128i], count: Exp[__m128i]): Exp[__m128i] = { MM_SRA_EPI16(a, count) } def _mm_cmplt_epi8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_CMPLT_EPI8(a, b) } def _mm_store_si128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], a: Exp[__m128i], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STORE_SI128(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_clflush[A[_], T:Typ, U:Integral](p: Exp[A[T]], pOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(p)(MM_CLFLUSH(p, pOffset)(typ[T], implicitly[Integral[U]], cont)) } def _mm_sra_epi32(a: Exp[__m128i], count: Exp[__m128i]): Exp[__m128i] = { MM_SRA_EPI32(a, count) } def _mm_srai_epi32(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SRAI_EPI32(a, imm8) } def _mm_srli_si128(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SRLI_SI128(a, imm8) } def _mm_set_pd1(a: Exp[Double]): Exp[__m128d] = { MM_SET_PD1(a) } def _mm_comineq_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[Int] = { MM_COMINEQ_SD(a, b) } def _mm_cmpnlt_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPNLT_SD(a, b) } def _mm512_cvtsi512_si32(a: Exp[__m512i]): Exp[Int] = { MM512_CVTSI512_SI32(a) } def _mm_cvttps_epi32(a: Exp[__m128]): Exp[__m128i] = { MM_CVTTPS_EPI32(a) } def _mm_set1_epi64(a: Exp[__m64]): Exp[__m128i] = { MM_SET1_EPI64(a) } def _mm_cvtsd_si64x(a: Exp[__m128d]): Exp[Long] = { MM_CVTSD_SI64X(a) } def _mm_sll_epi16(a: Exp[__m128i], count: Exp[__m128i]): Exp[__m128i] = { MM_SLL_EPI16(a, count) } def _mm_or_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_OR_PD(a, b) } def _mm_load_si128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[__m128i] = { cont.read(mem_addr)(MM_LOAD_SI128(mem_addr, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_add_epi32(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_ADD_EPI32(a, b) } def _mm_sub_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_SUB_PD(a, b) } def _mm_unpackhi_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_UNPACKHI_PD(a, b) } def _mm_mullo_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_MULLO_EPI16(a, b) } def _mm_set_pd(e1: Exp[Double], e0: Exp[Double]): Exp[__m128d] = { MM_SET_PD(e1, e0) } def _mm_cmpeq_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPEQ_PD(a, b) } def _mm_cmpgt_epi8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_CMPGT_EPI8(a, b) } def _mm_and_si128(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_AND_SI128(a, b) } def _mm_cmpeq_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPEQ_SD(a, b) } def _mm_loadl_epi64[A[_], U:Integral](mem_addr: Exp[A[__m128i]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[__m128i] = { cont.read(mem_addr)(MM_LOADL_EPI64(mem_addr, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_srai_epi16(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SRAI_EPI16(a, imm8) } def _mm_packs_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_PACKS_EPI16(a, b) } def _mm_cmplt_epi32(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_CMPLT_EPI32(a, b) } def _mm_or_si128(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_OR_SI128(a, b) } def _mm_sqrt_pd(a: Exp[__m128d]): Exp[__m128d] = { MM_SQRT_PD(a) } def _mm_cmpneq_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPNEQ_SD(a, b) } def _mm_set1_pd(a: Exp[Double]): Exp[__m128d] = { MM_SET1_PD(a) } def _mm_sub_epi64(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_SUB_EPI64(a, b) } def _mm_unpacklo_epi32(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_UNPACKLO_EPI32(a, b) } def _mm_setr_pd(e1: Exp[Double], e0: Exp[Double]): Exp[__m128d] = { MM_SETR_PD(e1, e0) } def _mm_storel_pd[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STOREL_PD(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_castps_si128(a: Exp[__m128]): Exp[__m128i] = { MM_CASTPS_SI128(a) } def _mm_store_pd1[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STORE_PD1(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_andnot_si128(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_ANDNOT_SI128(a, b) } def _mm_set1_epi8(a: Exp[Byte]): Exp[__m128i] = { MM_SET1_EPI8(a) } def _mm_storer_pd[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STORER_PD(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_sub_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_SUB_SD(a, b) } def _mm_mul_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_MUL_PD(a, b) } def _mm_cmpge_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPGE_PD(a, b) } def _mm_srl_epi32(a: Exp[__m128i], count: Exp[__m128i]): Exp[__m128i] = { MM_SRL_EPI32(a, count) } def _mm_comilt_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[Int] = { MM_COMILT_SD(a, b) } def _mm_unpacklo_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_UNPACKLO_PD(a, b) } def _mm_shuffle_epi32(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SHUFFLE_EPI32(a, imm8) } def _mm_srl_epi64(a: Exp[__m128i], count: Exp[__m128i]): Exp[__m128i] = { MM_SRL_EPI64(a, count) } def _mm_srl_epi16(a: Exp[__m128i], count: Exp[__m128i]): Exp[__m128i] = { MM_SRL_EPI16(a, count) } def _mm_cvtpd_pi32(a: Exp[__m128d]): Exp[__m64] = { MM_CVTPD_PI32(a) } def _mm_comigt_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[Int] = { MM_COMIGT_SD(a, b) } def _mm_shufflelo_epi16(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SHUFFLELO_EPI16(a, imm8) } def _mm_cmpeq_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_CMPEQ_EPI16(a, b) } def _mm_move_epi64(a: Exp[__m128i]): Exp[__m128i] = { MM_MOVE_EPI64(a) } def _mm_load_pd[A[_], U:Integral](mem_addr: Exp[A[Double]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[__m128d] = { cont.read(mem_addr)(MM_LOAD_PD(mem_addr, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_adds_epu16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_ADDS_EPU16(a, b) } def _mm_unpackhi_epi32(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_UNPACKHI_EPI32(a, b) } def _mm_movepi64_pi64(a: Exp[__m128i]): Exp[__m64] = { MM_MOVEPI64_PI64(a) } def _mm_cvttpd_epi32(a: Exp[__m128d]): Exp[__m128i] = { MM_CVTTPD_EPI32(a) } def _mm_unpackhi_epi64(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_UNPACKHI_EPI64(a, b) } def _mm_cmpeq_epi32(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_CMPEQ_EPI32(a, b) } def _mm_cvtps_epi32(a: Exp[__m128]): Exp[__m128i] = { MM_CVTPS_EPI32(a) } def _mm_cvtsi64_sd(a: Exp[__m128d], b: Exp[Long]): Exp[__m128d] = { MM_CVTSI64_SD(a, b) } def _mm_slli_epi32(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SLLI_EPI32(a, imm8) } def _mm_cvtsi128_si64(a: Exp[__m128i]): Exp[Long] = { MM_CVTSI128_SI64(a) } def _mm_pause(): Exp[Unit] = { reflectEffect(MM_PAUSE()) } def _mm512_cvtsd_f64(a: Exp[__m512d]): Exp[Double] = { MM512_CVTSD_F64(a) } def _mm_castps_pd(a: Exp[__m128]): Exp[__m128d] = { MM_CASTPS_PD(a) } def _mm_castsi128_pd(a: Exp[__m128i]): Exp[__m128d] = { MM_CASTSI128_PD(a) } def _mm_cmplt_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPLT_PD(a, b) } def _mm_srli_epi32(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SRLI_EPI32(a, imm8) } def _mm_ucomile_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[Int] = { MM_UCOMILE_SD(a, b) } def _mm_cvtsi32_sd(a: Exp[__m128d], b: Exp[Int]): Exp[__m128d] = { MM_CVTSI32_SD(a, b) } def _mm_xor_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_XOR_PD(a, b) } def _mm_mul_epu32(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_MUL_EPU32(a, b) } def _mm_and_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_AND_PD(a, b) } def _mm_avg_epu16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_AVG_EPU16(a, b) } def _mm_shuffle_pd(a: Exp[__m128d], b: Exp[__m128d], imm8: Exp[Int]): Exp[__m128d] = { MM_SHUFFLE_PD(a, b, imm8) } def _mm_cvtpd_epi32(a: Exp[__m128d]): Exp[__m128i] = { MM_CVTPD_EPI32(a) } def _mm_madd_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_MADD_EPI16(a, b) } def _mm_setr_epi64(e1: Exp[__m64], e0: Exp[__m64]): Exp[__m128i] = { MM_SETR_EPI64(e1, e0) } def _mm_min_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_MIN_EPI16(a, b) } def _mm_movpi64_epi64(a: Exp[__m64]): Exp[__m128i] = { MM_MOVPI64_EPI64(a) } def _mm_cvtsi128_si32(a: Exp[__m128i]): Exp[Int] = { MM_CVTSI128_SI32(a) } def _mm_mulhi_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_MULHI_EPI16(a, b) } def _mm_cvtsi128_si64x(a: Exp[__m128i]): Exp[Long] = { MM_CVTSI128_SI64X(a) } def _mm_cmplt_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_CMPLT_EPI16(a, b) } def _mm_cvtsd_ss(a: Exp[__m128], b: Exp[__m128d]): Exp[__m128] = { MM_CVTSD_SS(a, b) } def _mm_sub_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_SUB_EPI16(a, b) } def _mm_cvtsd_f64(a: Exp[__m128d]): Exp[Double] = { MM_CVTSD_F64(a) } def _mm_cmpngt_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPNGT_SD(a, b) } def _mm_add_epi64(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_ADD_EPI64(a, b) } def _mm_maskmoveu_si128[A[_], U:Integral](a: Exp[__m128i], mask: Exp[__m128i], mem_addr: Exp[A[Byte]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_MASKMOVEU_SI128(a, mask, mem_addr, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_min_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_MIN_SD(a, b) } def _mm_setr_epi8(e15: Exp[Byte], e14: Exp[Byte], e13: Exp[Byte], e12: Exp[Byte], e11: Exp[Byte], e10: Exp[Byte], e9: Exp[Byte], e8: Exp[Byte], e7: Exp[Byte], e6: Exp[Byte], e5: Exp[Byte], e4: Exp[Byte], e3: Exp[Byte], e2: Exp[Byte], e1: Exp[Byte], e0: Exp[Byte]): Exp[__m128i] = { MM_SETR_EPI8(e15, e14, e13, e12, e11, e10, e9, e8, e7, e6, e5, e4, e3, e2, e1, e0) } def _mm_insert_epi16(a: Exp[__m128i], i: Exp[Int], imm8: Exp[Int]): Exp[__m128i] = { MM_INSERT_EPI16(a, i, imm8) } def _mm_unpacklo_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_UNPACKLO_EPI16(a, b) } def _mm_cmpge_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPGE_SD(a, b) } def _mm_load_sd[A[_], U:Integral](mem_addr: Exp[A[Double]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[__m128d] = { cont.read(mem_addr)(MM_LOAD_SD(mem_addr, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_subs_epu8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_SUBS_EPU8(a, b) } def _mm_storel_epi64[A[_], U:Integral](mem_addr: Exp[A[__m128i]], a: Exp[__m128i], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STOREL_EPI64(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_cmpgt_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPGT_PD(a, b) } def _mm_slli_epi64(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SLLI_EPI64(a, imm8) } def _mm_bslli_si128(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_BSLLI_SI128(a, imm8) } def _mm_div_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_DIV_SD(a, b) } def _mm_sub_epi8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_SUB_EPI8(a, b) } def _mm_set_epi16(e7: Exp[Short], e6: Exp[Short], e5: Exp[Short], e4: Exp[Short], e3: Exp[Short], e2: Exp[Short], e1: Exp[Short], e0: Exp[Short]): Exp[__m128i] = { MM_SET_EPI16(e7, e6, e5, e4, e3, e2, e1, e0) } def _mm_set1_epi64x(a: Exp[Long]): Exp[__m128i] = { MM_SET1_EPI64X(a) } def _mm_loadu_si128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[__m128i] = { cont.read(mem_addr)(MM_LOADU_SI128(mem_addr, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_cvttsd_si32(a: Exp[__m128d]): Exp[Int] = { MM_CVTTSD_SI32(a) } def _mm_slli_si128(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SLLI_SI128(a, imm8) } def _mm_slli_epi16(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SLLI_EPI16(a, imm8) } def _mm_cmpunord_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPUNORD_SD(a, b) } def _mm_undefined_pd(): Exp[__m128d] = { MM_UNDEFINED_PD() } def _mm_loadu_pd[A[_], U:Integral](mem_addr: Exp[A[Double]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[__m128d] = { cont.read(mem_addr)(MM_LOADU_PD(mem_addr, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_cvtss_sd(a: Exp[__m128d], b: Exp[__m128]): Exp[__m128d] = { MM_CVTSS_SD(a, b) } def _mm_adds_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_ADDS_EPI16(a, b) } def _mm_castpd_si128(a: Exp[__m128d]): Exp[__m128i] = { MM_CASTPD_SI128(a) } def _mm_unpackhi_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_UNPACKHI_EPI16(a, b) } def _mm_cmpgt_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPGT_SD(a, b) } def _mm_cvttsd_si64x(a: Exp[__m128d]): Exp[Long] = { MM_CVTTSD_SI64X(a) } def _mm_cmplt_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPLT_SD(a, b) } def _mm256_cvtsd_f64(a: Exp[__m256d]): Exp[Double] = { MM256_CVTSD_F64(a) } def _mm_add_epi8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_ADD_EPI8(a, b) } def _mm_cmpgt_epi32(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_CMPGT_EPI32(a, b) } def _mm_unpacklo_epi64(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_UNPACKLO_EPI64(a, b) } def _mm_loadl_pd[A[_], U:Integral](a: Exp[__m128d], mem_addr: Exp[A[Double]], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[__m128d] = { cont.read(mem_addr)(MM_LOADL_PD(a, mem_addr, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_setr_epi16(e7: Exp[Short], e6: Exp[Short], e5: Exp[Short], e4: Exp[Short], e3: Exp[Short], e2: Exp[Short], e1: Exp[Short], e0: Exp[Short]): Exp[__m128i] = { MM_SETR_EPI16(e7, e6, e5, e4, e3, e2, e1, e0) } def _mm_subs_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_SUBS_EPI16(a, b) } def _mm_mul_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_MUL_SD(a, b) } def _mm_stream_pd[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STREAM_PD(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_cvttsd_si64(a: Exp[__m128d]): Exp[Long] = { MM_CVTTSD_SI64(a) } def _mm_movemask_epi8(a: Exp[__m128i]): Exp[Int] = { MM_MOVEMASK_EPI8(a) } def _mm_cvtpi32_pd(a: Exp[__m64]): Exp[__m128d] = { MM_CVTPI32_PD(a) } def _mm_store_pd[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STORE_PD(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_unpackhi_epi8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_UNPACKHI_EPI8(a, b) } def _mm_xor_si128(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_XOR_SI128(a, b) } def _mm_cvtepi32_ps(a: Exp[__m128i]): Exp[__m128] = { MM_CVTEPI32_PS(a) } def _mm_cvtsd_si64(a: Exp[__m128d]): Exp[Long] = { MM_CVTSD_SI64(a) } def _mm_lfence(): Exp[Unit] = { reflectEffect(MM_LFENCE()) } def _mm256_cvtsi256_si32(a: Exp[__m256i]): Exp[Int] = { MM256_CVTSI256_SI32(a) } def _mm_packus_epi16(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_PACKUS_EPI16(a, b) } def _mm_storeu_si128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], a: Exp[__m128i], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STOREU_SI128(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_packs_epi32(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_PACKS_EPI32(a, b) } def _mm_cmpunord_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPUNORD_PD(a, b) } def _mm_comige_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[Int] = { MM_COMIGE_SD(a, b) } def _mm_storeh_pd[A[_], U:Integral](mem_addr: Exp[A[Double]], a: Exp[__m128d], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STOREH_PD(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_cmpnlt_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPNLT_PD(a, b) } def _mm_extract_epi16(a: Exp[__m128i], imm8: Exp[Int]): Exp[Int] = { MM_EXTRACT_EPI16(a, imm8) } def _mm_sad_epu8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_SAD_EPU8(a, b) } def _mm_adds_epi8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_ADDS_EPI8(a, b) } def _mm_cmpeq_epi8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_CMPEQ_EPI8(a, b) } def _mm_ucomilt_sd(a: Exp[__m128d], b: Exp[__m128d]): Exp[Int] = { MM_UCOMILT_SD(a, b) } def _mm_stream_si128[A[_], U:Integral](mem_addr: Exp[A[__m128i]], a: Exp[__m128i], mem_addrOffset: Exp[U])(implicit cont: Container[A]): Exp[Unit] = { cont.write(mem_addr)(MM_STREAM_SI128(mem_addr, a, mem_addrOffset)(implicitly[Integral[U]], cont)) } def _mm_srli_epi64(a: Exp[__m128i], imm8: Exp[Int]): Exp[__m128i] = { MM_SRLI_EPI64(a, imm8) } def _mm_cvtepi32_pd(a: Exp[__m128i]): Exp[__m128d] = { MM_CVTEPI32_PD(a) } def _mm_adds_epu8(a: Exp[__m128i], b: Exp[__m128i]): Exp[__m128i] = { MM_ADDS_EPU8(a, b) } def _mm_mul_su32(a: Exp[__m64], b: Exp[__m64]): Exp[__m64] = { MM_MUL_SU32(a, b) } def _mm_setr_epi32(e3: Exp[Int], e2: Exp[Int], e1: Exp[Int], e0: Exp[Int]): Exp[__m128i] = { MM_SETR_EPI32(e3, e2, e1, e0) } def _mm_cmple_pd(a: Exp[__m128d], b: Exp[__m128d]): Exp[__m128d] = { MM_CMPLE_PD(a, b) } override def mirror[A:Typ](e: Def[A], f: Transformer)(implicit pos: SourceContext): Exp[A] = (e match { case iDef@MM_STREAM_SI64 (mem_addr, a, mem_addrOffset) => _mm_stream_si64(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_UNPACKLO_EPI8 (a, b) => _mm_unpacklo_epi8(f(a), f(b)) case MM_MIN_EPU8 (a, b) => _mm_min_epu8(f(a), f(b)) case MM_MIN_PD (a, b) => _mm_min_pd(f(a), f(b)) case MM_MOVE_SD (a, b) => _mm_move_sd(f(a), f(b)) case MM_UCOMIGE_SD (a, b) => _mm_ucomige_sd(f(a), f(b)) case MM_SET_SD (a) => _mm_set_sd(f(a)) case MM_UCOMINEQ_SD (a, b) => _mm_ucomineq_sd(f(a), f(b)) case MM_BSRLI_SI128 (a, imm8) => _mm_bsrli_si128(f(a), f(imm8)) case MM_CVTPD_PS (a) => _mm_cvtpd_ps(f(a)) case MM_CVTSD_SI32 (a) => _mm_cvtsd_si32(f(a)) case MM_CVTSI64X_SI128 (a) => _mm_cvtsi64x_si128(f(a)) case MM_SRA_EPI16 (a, count) => _mm_sra_epi16(f(a), f(count)) case MM_CMPLT_EPI8 (a, b) => _mm_cmplt_epi8(f(a), f(b)) case iDef@MM_STORE_SI128 (mem_addr, a, mem_addrOffset) => _mm_store_si128(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case iDef@MM_CLFLUSH (p, pOffset) => _mm_clflush(iDef.cont.applyTransformer(p, f), iDef.cont.applyTransformer(pOffset, f))(iDef.voidType, iDef.integralType, iDef.cont) case MM_SRA_EPI32 (a, count) => _mm_sra_epi32(f(a), f(count)) case MM_SRAI_EPI32 (a, imm8) => _mm_srai_epi32(f(a), f(imm8)) case MM_SRLI_SI128 (a, imm8) => _mm_srli_si128(f(a), f(imm8)) case MM_SET_PD1 (a) => _mm_set_pd1(f(a)) case MM_COMINEQ_SD (a, b) => _mm_comineq_sd(f(a), f(b)) case MM_CMPNLT_SD (a, b) => _mm_cmpnlt_sd(f(a), f(b)) case MM512_CVTSI512_SI32 (a) => _mm512_cvtsi512_si32(f(a)) case MM_CVTTPS_EPI32 (a) => _mm_cvttps_epi32(f(a)) case MM_SET1_EPI64 (a) => _mm_set1_epi64(f(a)) case MM_CVTSD_SI64X (a) => _mm_cvtsd_si64x(f(a)) case MM_SLL_EPI16 (a, count) => _mm_sll_epi16(f(a), f(count)) case MM_OR_PD (a, b) => _mm_or_pd(f(a), f(b)) case iDef@MM_LOAD_SI128 (mem_addr, mem_addrOffset) => _mm_load_si128(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_ADD_EPI32 (a, b) => _mm_add_epi32(f(a), f(b)) case MM_SUB_PD (a, b) => _mm_sub_pd(f(a), f(b)) case MM_UNPACKHI_PD (a, b) => _mm_unpackhi_pd(f(a), f(b)) case MM_MULLO_EPI16 (a, b) => _mm_mullo_epi16(f(a), f(b)) case MM_SET_PD (e1, e0) => _mm_set_pd(f(e1), f(e0)) case MM_CMPEQ_PD (a, b) => _mm_cmpeq_pd(f(a), f(b)) case MM_CMPGT_EPI8 (a, b) => _mm_cmpgt_epi8(f(a), f(b)) case MM_AND_SI128 (a, b) => _mm_and_si128(f(a), f(b)) case MM_CMPEQ_SD (a, b) => _mm_cmpeq_sd(f(a), f(b)) case iDef@MM_LOADL_EPI64 (mem_addr, mem_addrOffset) => _mm_loadl_epi64(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_SRAI_EPI16 (a, imm8) => _mm_srai_epi16(f(a), f(imm8)) case MM_PACKS_EPI16 (a, b) => _mm_packs_epi16(f(a), f(b)) case MM_CMPLT_EPI32 (a, b) => _mm_cmplt_epi32(f(a), f(b)) case MM_OR_SI128 (a, b) => _mm_or_si128(f(a), f(b)) case MM_SQRT_PD (a) => _mm_sqrt_pd(f(a)) case MM_CMPNEQ_SD (a, b) => _mm_cmpneq_sd(f(a), f(b)) case MM_SET1_PD (a) => _mm_set1_pd(f(a)) case MM_SUB_EPI64 (a, b) => _mm_sub_epi64(f(a), f(b)) case MM_UNPACKLO_EPI32 (a, b) => _mm_unpacklo_epi32(f(a), f(b)) case MM_SETR_PD (e1, e0) => _mm_setr_pd(f(e1), f(e0)) case iDef@MM_STOREL_PD (mem_addr, a, mem_addrOffset) => _mm_storel_pd(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_CASTPS_SI128 (a) => _mm_castps_si128(f(a)) case iDef@MM_STORE_PD1 (mem_addr, a, mem_addrOffset) => _mm_store_pd1(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_ANDNOT_SI128 (a, b) => _mm_andnot_si128(f(a), f(b)) case MM_SET1_EPI8 (a) => _mm_set1_epi8(f(a)) case iDef@MM_STORER_PD (mem_addr, a, mem_addrOffset) => _mm_storer_pd(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_SUB_SD (a, b) => _mm_sub_sd(f(a), f(b)) case MM_MUL_PD (a, b) => _mm_mul_pd(f(a), f(b)) case MM_CMPGE_PD (a, b) => _mm_cmpge_pd(f(a), f(b)) case MM_SRL_EPI32 (a, count) => _mm_srl_epi32(f(a), f(count)) case MM_COMILT_SD (a, b) => _mm_comilt_sd(f(a), f(b)) case MM_UNPACKLO_PD (a, b) => _mm_unpacklo_pd(f(a), f(b)) case MM_SHUFFLE_EPI32 (a, imm8) => _mm_shuffle_epi32(f(a), f(imm8)) case MM_SRL_EPI64 (a, count) => _mm_srl_epi64(f(a), f(count)) case MM_SRL_EPI16 (a, count) => _mm_srl_epi16(f(a), f(count)) case MM_CVTPD_PI32 (a) => _mm_cvtpd_pi32(f(a)) case MM_COMIGT_SD (a, b) => _mm_comigt_sd(f(a), f(b)) case MM_SHUFFLELO_EPI16 (a, imm8) => _mm_shufflelo_epi16(f(a), f(imm8)) case MM_CMPEQ_EPI16 (a, b) => _mm_cmpeq_epi16(f(a), f(b)) case MM_MOVE_EPI64 (a) => _mm_move_epi64(f(a)) case iDef@MM_LOAD_PD (mem_addr, mem_addrOffset) => _mm_load_pd(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_ADDS_EPU16 (a, b) => _mm_adds_epu16(f(a), f(b)) case MM_UNPACKHI_EPI32 (a, b) => _mm_unpackhi_epi32(f(a), f(b)) case MM_MOVEPI64_PI64 (a) => _mm_movepi64_pi64(f(a)) case MM_CVTTPD_EPI32 (a) => _mm_cvttpd_epi32(f(a)) case MM_UNPACKHI_EPI64 (a, b) => _mm_unpackhi_epi64(f(a), f(b)) case MM_CMPEQ_EPI32 (a, b) => _mm_cmpeq_epi32(f(a), f(b)) case MM_CVTPS_EPI32 (a) => _mm_cvtps_epi32(f(a)) case MM_CVTSI64_SD (a, b) => _mm_cvtsi64_sd(f(a), f(b)) case MM_SLLI_EPI32 (a, imm8) => _mm_slli_epi32(f(a), f(imm8)) case MM_CVTSI128_SI64 (a) => _mm_cvtsi128_si64(f(a)) case MM_PAUSE () => _mm_pause() case MM512_CVTSD_F64 (a) => _mm512_cvtsd_f64(f(a)) case MM_CASTPS_PD (a) => _mm_castps_pd(f(a)) case MM_CASTSI128_PD (a) => _mm_castsi128_pd(f(a)) case MM_CMPLT_PD (a, b) => _mm_cmplt_pd(f(a), f(b)) case MM_SRLI_EPI32 (a, imm8) => _mm_srli_epi32(f(a), f(imm8)) case MM_UCOMILE_SD (a, b) => _mm_ucomile_sd(f(a), f(b)) case MM_CVTSI32_SD (a, b) => _mm_cvtsi32_sd(f(a), f(b)) case MM_XOR_PD (a, b) => _mm_xor_pd(f(a), f(b)) case MM_MUL_EPU32 (a, b) => _mm_mul_epu32(f(a), f(b)) case MM_AND_PD (a, b) => _mm_and_pd(f(a), f(b)) case MM_AVG_EPU16 (a, b) => _mm_avg_epu16(f(a), f(b)) case MM_SHUFFLE_PD (a, b, imm8) => _mm_shuffle_pd(f(a), f(b), f(imm8)) case MM_CVTPD_EPI32 (a) => _mm_cvtpd_epi32(f(a)) case MM_MADD_EPI16 (a, b) => _mm_madd_epi16(f(a), f(b)) case MM_SETR_EPI64 (e1, e0) => _mm_setr_epi64(f(e1), f(e0)) case MM_MIN_EPI16 (a, b) => _mm_min_epi16(f(a), f(b)) case MM_MOVPI64_EPI64 (a) => _mm_movpi64_epi64(f(a)) case MM_CVTSI128_SI32 (a) => _mm_cvtsi128_si32(f(a)) case MM_MULHI_EPI16 (a, b) => _mm_mulhi_epi16(f(a), f(b)) case MM_CVTSI128_SI64X (a) => _mm_cvtsi128_si64x(f(a)) case MM_CMPLT_EPI16 (a, b) => _mm_cmplt_epi16(f(a), f(b)) case MM_CVTSD_SS (a, b) => _mm_cvtsd_ss(f(a), f(b)) case MM_SUB_EPI16 (a, b) => _mm_sub_epi16(f(a), f(b)) case MM_CVTSD_F64 (a) => _mm_cvtsd_f64(f(a)) case MM_CMPNGT_SD (a, b) => _mm_cmpngt_sd(f(a), f(b)) case MM_ADD_EPI64 (a, b) => _mm_add_epi64(f(a), f(b)) case iDef@MM_MASKMOVEU_SI128 (a, mask, mem_addr, mem_addrOffset) => _mm_maskmoveu_si128(iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mask, f), iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_MIN_SD (a, b) => _mm_min_sd(f(a), f(b)) case MM_SETR_EPI8 (e15, e14, e13, e12, e11, e10, e9, e8, e7, e6, e5, e4, e3, e2, e1, e0) => _mm_setr_epi8(f(e15), f(e14), f(e13), f(e12), f(e11), f(e10), f(e9), f(e8), f(e7), f(e6), f(e5), f(e4), f(e3), f(e2), f(e1), f(e0)) case MM_INSERT_EPI16 (a, i, imm8) => _mm_insert_epi16(f(a), f(i), f(imm8)) case MM_UNPACKLO_EPI16 (a, b) => _mm_unpacklo_epi16(f(a), f(b)) case MM_CMPGE_SD (a, b) => _mm_cmpge_sd(f(a), f(b)) case iDef@MM_LOAD_SD (mem_addr, mem_addrOffset) => _mm_load_sd(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_SUBS_EPU8 (a, b) => _mm_subs_epu8(f(a), f(b)) case iDef@MM_STOREL_EPI64 (mem_addr, a, mem_addrOffset) => _mm_storel_epi64(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_CMPGT_PD (a, b) => _mm_cmpgt_pd(f(a), f(b)) case MM_SLLI_EPI64 (a, imm8) => _mm_slli_epi64(f(a), f(imm8)) case MM_BSLLI_SI128 (a, imm8) => _mm_bslli_si128(f(a), f(imm8)) case MM_DIV_SD (a, b) => _mm_div_sd(f(a), f(b)) case MM_SUB_EPI8 (a, b) => _mm_sub_epi8(f(a), f(b)) case MM_SET_EPI16 (e7, e6, e5, e4, e3, e2, e1, e0) => _mm_set_epi16(f(e7), f(e6), f(e5), f(e4), f(e3), f(e2), f(e1), f(e0)) case MM_SET1_EPI64X (a) => _mm_set1_epi64x(f(a)) case iDef@MM_LOADU_SI128 (mem_addr, mem_addrOffset) => _mm_loadu_si128(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_CVTTSD_SI32 (a) => _mm_cvttsd_si32(f(a)) case MM_SLLI_SI128 (a, imm8) => _mm_slli_si128(f(a), f(imm8)) case MM_SLLI_EPI16 (a, imm8) => _mm_slli_epi16(f(a), f(imm8)) case MM_CMPUNORD_SD (a, b) => _mm_cmpunord_sd(f(a), f(b)) case MM_UNDEFINED_PD () => _mm_undefined_pd() case iDef@MM_LOADU_PD (mem_addr, mem_addrOffset) => _mm_loadu_pd(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_CVTSS_SD (a, b) => _mm_cvtss_sd(f(a), f(b)) case MM_ADDS_EPI16 (a, b) => _mm_adds_epi16(f(a), f(b)) case MM_CASTPD_SI128 (a) => _mm_castpd_si128(f(a)) case MM_UNPACKHI_EPI16 (a, b) => _mm_unpackhi_epi16(f(a), f(b)) case MM_CMPGT_SD (a, b) => _mm_cmpgt_sd(f(a), f(b)) case MM_CVTTSD_SI64X (a) => _mm_cvttsd_si64x(f(a)) case MM_CMPLT_SD (a, b) => _mm_cmplt_sd(f(a), f(b)) case MM256_CVTSD_F64 (a) => _mm256_cvtsd_f64(f(a)) case MM_ADD_EPI8 (a, b) => _mm_add_epi8(f(a), f(b)) case MM_CMPGT_EPI32 (a, b) => _mm_cmpgt_epi32(f(a), f(b)) case MM_UNPACKLO_EPI64 (a, b) => _mm_unpacklo_epi64(f(a), f(b)) case iDef@MM_LOADL_PD (a, mem_addr, mem_addrOffset) => _mm_loadl_pd(iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_SETR_EPI16 (e7, e6, e5, e4, e3, e2, e1, e0) => _mm_setr_epi16(f(e7), f(e6), f(e5), f(e4), f(e3), f(e2), f(e1), f(e0)) case MM_SUBS_EPI16 (a, b) => _mm_subs_epi16(f(a), f(b)) case MM_MUL_SD (a, b) => _mm_mul_sd(f(a), f(b)) case iDef@MM_STREAM_PD (mem_addr, a, mem_addrOffset) => _mm_stream_pd(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_CVTTSD_SI64 (a) => _mm_cvttsd_si64(f(a)) case MM_MOVEMASK_EPI8 (a) => _mm_movemask_epi8(f(a)) case MM_CVTPI32_PD (a) => _mm_cvtpi32_pd(f(a)) case iDef@MM_STORE_PD (mem_addr, a, mem_addrOffset) => _mm_store_pd(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_UNPACKHI_EPI8 (a, b) => _mm_unpackhi_epi8(f(a), f(b)) case MM_XOR_SI128 (a, b) => _mm_xor_si128(f(a), f(b)) case MM_CVTEPI32_PS (a) => _mm_cvtepi32_ps(f(a)) case MM_CVTSD_SI64 (a) => _mm_cvtsd_si64(f(a)) case MM_LFENCE () => _mm_lfence() case MM256_CVTSI256_SI32 (a) => _mm256_cvtsi256_si32(f(a)) case MM_PACKUS_EPI16 (a, b) => _mm_packus_epi16(f(a), f(b)) case iDef@MM_STOREU_SI128 (mem_addr, a, mem_addrOffset) => _mm_storeu_si128(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_PACKS_EPI32 (a, b) => _mm_packs_epi32(f(a), f(b)) case MM_CMPUNORD_PD (a, b) => _mm_cmpunord_pd(f(a), f(b)) case MM_COMIGE_SD (a, b) => _mm_comige_sd(f(a), f(b)) case iDef@MM_STOREH_PD (mem_addr, a, mem_addrOffset) => _mm_storeh_pd(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_CMPNLT_PD (a, b) => _mm_cmpnlt_pd(f(a), f(b)) case MM_EXTRACT_EPI16 (a, imm8) => _mm_extract_epi16(f(a), f(imm8)) case MM_SAD_EPU8 (a, b) => _mm_sad_epu8(f(a), f(b)) case MM_ADDS_EPI8 (a, b) => _mm_adds_epi8(f(a), f(b)) case MM_CMPEQ_EPI8 (a, b) => _mm_cmpeq_epi8(f(a), f(b)) case MM_UCOMILT_SD (a, b) => _mm_ucomilt_sd(f(a), f(b)) case iDef@MM_STREAM_SI128 (mem_addr, a, mem_addrOffset) => _mm_stream_si128(iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont) case MM_SRLI_EPI64 (a, imm8) => _mm_srli_epi64(f(a), f(imm8)) case MM_CVTEPI32_PD (a) => _mm_cvtepi32_pd(f(a)) case MM_ADDS_EPU8 (a, b) => _mm_adds_epu8(f(a), f(b)) case MM_MUL_SU32 (a, b) => _mm_mul_su32(f(a), f(b)) case MM_SETR_EPI32 (e3, e2, e1, e0) => _mm_setr_epi32(f(e3), f(e2), f(e1), f(e0)) case MM_CMPLE_PD (a, b) => _mm_cmple_pd(f(a), f(b)) case Reflect(iDef@MM_STREAM_SI64 (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STREAM_SI64 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKLO_EPI8 (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKLO_EPI8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MIN_EPU8 (a, b), u, es) => reflectMirrored(Reflect(MM_MIN_EPU8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MIN_PD (a, b), u, es) => reflectMirrored(Reflect(MM_MIN_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MOVE_SD (a, b), u, es) => reflectMirrored(Reflect(MM_MOVE_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UCOMIGE_SD (a, b), u, es) => reflectMirrored(Reflect(MM_UCOMIGE_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SET_SD (a), u, es) => reflectMirrored(Reflect(MM_SET_SD (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UCOMINEQ_SD (a, b), u, es) => reflectMirrored(Reflect(MM_UCOMINEQ_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_BSRLI_SI128 (a, imm8), u, es) => reflectMirrored(Reflect(MM_BSRLI_SI128 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTPD_PS (a), u, es) => reflectMirrored(Reflect(MM_CVTPD_PS (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSD_SI32 (a), u, es) => reflectMirrored(Reflect(MM_CVTSD_SI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSI64X_SI128 (a), u, es) => reflectMirrored(Reflect(MM_CVTSI64X_SI128 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRA_EPI16 (a, count), u, es) => reflectMirrored(Reflect(MM_SRA_EPI16 (f(a), f(count)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPLT_EPI8 (a, b), u, es) => reflectMirrored(Reflect(MM_CMPLT_EPI8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STORE_SI128 (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STORE_SI128 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_CLFLUSH (p, pOffset), u, es) => reflectMirrored(Reflect(MM_CLFLUSH (iDef.cont.applyTransformer(p, f), iDef.cont.applyTransformer(pOffset, f))(iDef.voidType, iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRA_EPI32 (a, count), u, es) => reflectMirrored(Reflect(MM_SRA_EPI32 (f(a), f(count)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRAI_EPI32 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SRAI_EPI32 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRLI_SI128 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SRLI_SI128 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SET_PD1 (a), u, es) => reflectMirrored(Reflect(MM_SET_PD1 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_COMINEQ_SD (a, b), u, es) => reflectMirrored(Reflect(MM_COMINEQ_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPNLT_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPNLT_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM512_CVTSI512_SI32 (a), u, es) => reflectMirrored(Reflect(MM512_CVTSI512_SI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTTPS_EPI32 (a), u, es) => reflectMirrored(Reflect(MM_CVTTPS_EPI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SET1_EPI64 (a), u, es) => reflectMirrored(Reflect(MM_SET1_EPI64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSD_SI64X (a), u, es) => reflectMirrored(Reflect(MM_CVTSD_SI64X (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SLL_EPI16 (a, count), u, es) => reflectMirrored(Reflect(MM_SLL_EPI16 (f(a), f(count)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_OR_PD (a, b), u, es) => reflectMirrored(Reflect(MM_OR_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_LOAD_SI128 (mem_addr, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_LOAD_SI128 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_ADD_EPI32 (a, b), u, es) => reflectMirrored(Reflect(MM_ADD_EPI32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SUB_PD (a, b), u, es) => reflectMirrored(Reflect(MM_SUB_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKHI_PD (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKHI_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MULLO_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_MULLO_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SET_PD (e1, e0), u, es) => reflectMirrored(Reflect(MM_SET_PD (f(e1), f(e0)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPEQ_PD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPEQ_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPGT_EPI8 (a, b), u, es) => reflectMirrored(Reflect(MM_CMPGT_EPI8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_AND_SI128 (a, b), u, es) => reflectMirrored(Reflect(MM_AND_SI128 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPEQ_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPEQ_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_LOADL_EPI64 (mem_addr, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_LOADL_EPI64 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRAI_EPI16 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SRAI_EPI16 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_PACKS_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_PACKS_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPLT_EPI32 (a, b), u, es) => reflectMirrored(Reflect(MM_CMPLT_EPI32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_OR_SI128 (a, b), u, es) => reflectMirrored(Reflect(MM_OR_SI128 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SQRT_PD (a), u, es) => reflectMirrored(Reflect(MM_SQRT_PD (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPNEQ_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPNEQ_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SET1_PD (a), u, es) => reflectMirrored(Reflect(MM_SET1_PD (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SUB_EPI64 (a, b), u, es) => reflectMirrored(Reflect(MM_SUB_EPI64 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKLO_EPI32 (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKLO_EPI32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SETR_PD (e1, e0), u, es) => reflectMirrored(Reflect(MM_SETR_PD (f(e1), f(e0)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STOREL_PD (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STOREL_PD (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CASTPS_SI128 (a), u, es) => reflectMirrored(Reflect(MM_CASTPS_SI128 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STORE_PD1 (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STORE_PD1 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_ANDNOT_SI128 (a, b), u, es) => reflectMirrored(Reflect(MM_ANDNOT_SI128 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SET1_EPI8 (a), u, es) => reflectMirrored(Reflect(MM_SET1_EPI8 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STORER_PD (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STORER_PD (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SUB_SD (a, b), u, es) => reflectMirrored(Reflect(MM_SUB_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MUL_PD (a, b), u, es) => reflectMirrored(Reflect(MM_MUL_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPGE_PD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPGE_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRL_EPI32 (a, count), u, es) => reflectMirrored(Reflect(MM_SRL_EPI32 (f(a), f(count)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_COMILT_SD (a, b), u, es) => reflectMirrored(Reflect(MM_COMILT_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKLO_PD (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKLO_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SHUFFLE_EPI32 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SHUFFLE_EPI32 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRL_EPI64 (a, count), u, es) => reflectMirrored(Reflect(MM_SRL_EPI64 (f(a), f(count)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRL_EPI16 (a, count), u, es) => reflectMirrored(Reflect(MM_SRL_EPI16 (f(a), f(count)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTPD_PI32 (a), u, es) => reflectMirrored(Reflect(MM_CVTPD_PI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_COMIGT_SD (a, b), u, es) => reflectMirrored(Reflect(MM_COMIGT_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SHUFFLELO_EPI16 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SHUFFLELO_EPI16 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPEQ_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_CMPEQ_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MOVE_EPI64 (a), u, es) => reflectMirrored(Reflect(MM_MOVE_EPI64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_LOAD_PD (mem_addr, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_LOAD_PD (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_ADDS_EPU16 (a, b), u, es) => reflectMirrored(Reflect(MM_ADDS_EPU16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKHI_EPI32 (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKHI_EPI32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MOVEPI64_PI64 (a), u, es) => reflectMirrored(Reflect(MM_MOVEPI64_PI64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTTPD_EPI32 (a), u, es) => reflectMirrored(Reflect(MM_CVTTPD_EPI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKHI_EPI64 (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKHI_EPI64 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPEQ_EPI32 (a, b), u, es) => reflectMirrored(Reflect(MM_CMPEQ_EPI32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTPS_EPI32 (a), u, es) => reflectMirrored(Reflect(MM_CVTPS_EPI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSI64_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CVTSI64_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SLLI_EPI32 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SLLI_EPI32 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSI128_SI64 (a), u, es) => reflectMirrored(Reflect(MM_CVTSI128_SI64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_PAUSE (), u, es) => reflectMirrored(Reflect(MM_PAUSE (), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM512_CVTSD_F64 (a), u, es) => reflectMirrored(Reflect(MM512_CVTSD_F64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CASTPS_PD (a), u, es) => reflectMirrored(Reflect(MM_CASTPS_PD (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CASTSI128_PD (a), u, es) => reflectMirrored(Reflect(MM_CASTSI128_PD (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPLT_PD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPLT_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRLI_EPI32 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SRLI_EPI32 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UCOMILE_SD (a, b), u, es) => reflectMirrored(Reflect(MM_UCOMILE_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSI32_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CVTSI32_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_XOR_PD (a, b), u, es) => reflectMirrored(Reflect(MM_XOR_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MUL_EPU32 (a, b), u, es) => reflectMirrored(Reflect(MM_MUL_EPU32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_AND_PD (a, b), u, es) => reflectMirrored(Reflect(MM_AND_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_AVG_EPU16 (a, b), u, es) => reflectMirrored(Reflect(MM_AVG_EPU16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SHUFFLE_PD (a, b, imm8), u, es) => reflectMirrored(Reflect(MM_SHUFFLE_PD (f(a), f(b), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTPD_EPI32 (a), u, es) => reflectMirrored(Reflect(MM_CVTPD_EPI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MADD_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_MADD_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SETR_EPI64 (e1, e0), u, es) => reflectMirrored(Reflect(MM_SETR_EPI64 (f(e1), f(e0)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MIN_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_MIN_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MOVPI64_EPI64 (a), u, es) => reflectMirrored(Reflect(MM_MOVPI64_EPI64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSI128_SI32 (a), u, es) => reflectMirrored(Reflect(MM_CVTSI128_SI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MULHI_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_MULHI_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSI128_SI64X (a), u, es) => reflectMirrored(Reflect(MM_CVTSI128_SI64X (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPLT_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_CMPLT_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSD_SS (a, b), u, es) => reflectMirrored(Reflect(MM_CVTSD_SS (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SUB_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_SUB_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSD_F64 (a), u, es) => reflectMirrored(Reflect(MM_CVTSD_F64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPNGT_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPNGT_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_ADD_EPI64 (a, b), u, es) => reflectMirrored(Reflect(MM_ADD_EPI64 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_MASKMOVEU_SI128 (a, mask, mem_addr, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_MASKMOVEU_SI128 (iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mask, f), iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MIN_SD (a, b), u, es) => reflectMirrored(Reflect(MM_MIN_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SETR_EPI8 (e15, e14, e13, e12, e11, e10, e9, e8, e7, e6, e5, e4, e3, e2, e1, e0), u, es) => reflectMirrored(Reflect(MM_SETR_EPI8 (f(e15), f(e14), f(e13), f(e12), f(e11), f(e10), f(e9), f(e8), f(e7), f(e6), f(e5), f(e4), f(e3), f(e2), f(e1), f(e0)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_INSERT_EPI16 (a, i, imm8), u, es) => reflectMirrored(Reflect(MM_INSERT_EPI16 (f(a), f(i), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKLO_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKLO_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPGE_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPGE_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_LOAD_SD (mem_addr, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_LOAD_SD (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SUBS_EPU8 (a, b), u, es) => reflectMirrored(Reflect(MM_SUBS_EPU8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STOREL_EPI64 (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STOREL_EPI64 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPGT_PD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPGT_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SLLI_EPI64 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SLLI_EPI64 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_BSLLI_SI128 (a, imm8), u, es) => reflectMirrored(Reflect(MM_BSLLI_SI128 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_DIV_SD (a, b), u, es) => reflectMirrored(Reflect(MM_DIV_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SUB_EPI8 (a, b), u, es) => reflectMirrored(Reflect(MM_SUB_EPI8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SET_EPI16 (e7, e6, e5, e4, e3, e2, e1, e0), u, es) => reflectMirrored(Reflect(MM_SET_EPI16 (f(e7), f(e6), f(e5), f(e4), f(e3), f(e2), f(e1), f(e0)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SET1_EPI64X (a), u, es) => reflectMirrored(Reflect(MM_SET1_EPI64X (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_LOADU_SI128 (mem_addr, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_LOADU_SI128 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTTSD_SI32 (a), u, es) => reflectMirrored(Reflect(MM_CVTTSD_SI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SLLI_SI128 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SLLI_SI128 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SLLI_EPI16 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SLLI_EPI16 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPUNORD_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPUNORD_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNDEFINED_PD (), u, es) => reflectMirrored(Reflect(MM_UNDEFINED_PD (), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_LOADU_PD (mem_addr, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_LOADU_PD (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSS_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CVTSS_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_ADDS_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_ADDS_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CASTPD_SI128 (a), u, es) => reflectMirrored(Reflect(MM_CASTPD_SI128 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKHI_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKHI_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPGT_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPGT_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTTSD_SI64X (a), u, es) => reflectMirrored(Reflect(MM_CVTTSD_SI64X (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPLT_SD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPLT_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM256_CVTSD_F64 (a), u, es) => reflectMirrored(Reflect(MM256_CVTSD_F64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_ADD_EPI8 (a, b), u, es) => reflectMirrored(Reflect(MM_ADD_EPI8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPGT_EPI32 (a, b), u, es) => reflectMirrored(Reflect(MM_CMPGT_EPI32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKLO_EPI64 (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKLO_EPI64 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_LOADL_PD (a, mem_addr, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_LOADL_PD (iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SETR_EPI16 (e7, e6, e5, e4, e3, e2, e1, e0), u, es) => reflectMirrored(Reflect(MM_SETR_EPI16 (f(e7), f(e6), f(e5), f(e4), f(e3), f(e2), f(e1), f(e0)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SUBS_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_SUBS_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MUL_SD (a, b), u, es) => reflectMirrored(Reflect(MM_MUL_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STREAM_PD (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STREAM_PD (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTTSD_SI64 (a), u, es) => reflectMirrored(Reflect(MM_CVTTSD_SI64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MOVEMASK_EPI8 (a), u, es) => reflectMirrored(Reflect(MM_MOVEMASK_EPI8 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTPI32_PD (a), u, es) => reflectMirrored(Reflect(MM_CVTPI32_PD (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STORE_PD (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STORE_PD (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UNPACKHI_EPI8 (a, b), u, es) => reflectMirrored(Reflect(MM_UNPACKHI_EPI8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_XOR_SI128 (a, b), u, es) => reflectMirrored(Reflect(MM_XOR_SI128 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTEPI32_PS (a), u, es) => reflectMirrored(Reflect(MM_CVTEPI32_PS (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTSD_SI64 (a), u, es) => reflectMirrored(Reflect(MM_CVTSD_SI64 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_LFENCE (), u, es) => reflectMirrored(Reflect(MM_LFENCE (), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM256_CVTSI256_SI32 (a), u, es) => reflectMirrored(Reflect(MM256_CVTSI256_SI32 (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_PACKUS_EPI16 (a, b), u, es) => reflectMirrored(Reflect(MM_PACKUS_EPI16 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STOREU_SI128 (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STOREU_SI128 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_PACKS_EPI32 (a, b), u, es) => reflectMirrored(Reflect(MM_PACKS_EPI32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPUNORD_PD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPUNORD_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_COMIGE_SD (a, b), u, es) => reflectMirrored(Reflect(MM_COMIGE_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STOREH_PD (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STOREH_PD (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPNLT_PD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPNLT_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_EXTRACT_EPI16 (a, imm8), u, es) => reflectMirrored(Reflect(MM_EXTRACT_EPI16 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SAD_EPU8 (a, b), u, es) => reflectMirrored(Reflect(MM_SAD_EPU8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_ADDS_EPI8 (a, b), u, es) => reflectMirrored(Reflect(MM_ADDS_EPI8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPEQ_EPI8 (a, b), u, es) => reflectMirrored(Reflect(MM_CMPEQ_EPI8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_UCOMILT_SD (a, b), u, es) => reflectMirrored(Reflect(MM_UCOMILT_SD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(iDef@MM_STREAM_SI128 (mem_addr, a, mem_addrOffset), u, es) => reflectMirrored(Reflect(MM_STREAM_SI128 (iDef.cont.applyTransformer(mem_addr, f), iDef.cont.applyTransformer(a, f), iDef.cont.applyTransformer(mem_addrOffset, f))(iDef.integralType, iDef.cont), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SRLI_EPI64 (a, imm8), u, es) => reflectMirrored(Reflect(MM_SRLI_EPI64 (f(a), f(imm8)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CVTEPI32_PD (a), u, es) => reflectMirrored(Reflect(MM_CVTEPI32_PD (f(a)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_ADDS_EPU8 (a, b), u, es) => reflectMirrored(Reflect(MM_ADDS_EPU8 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_MUL_SU32 (a, b), u, es) => reflectMirrored(Reflect(MM_MUL_SU32 (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_SETR_EPI32 (e3, e2, e1, e0), u, es) => reflectMirrored(Reflect(MM_SETR_EPI32 (f(e3), f(e2), f(e1), f(e0)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case Reflect(MM_CMPLE_PD (a, b), u, es) => reflectMirrored(Reflect(MM_CMPLE_PD (f(a), f(b)), mapOver(f,u), f(es)))(mtype(typ[A]), pos) case _ => super.mirror(e, f) }).asInstanceOf[Exp[A]] // why?? } trait CGenSSE200 extends CGenIntrinsics { val IR: SSE2 import IR._ override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match { case iDef@MM_STREAM_SI64(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_stream_si64((__int64*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_UNPACKLO_EPI8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpacklo_epi8(${quote(a)}, ${quote(b)})") case iDef@MM_MIN_EPU8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_min_epu8(${quote(a)}, ${quote(b)})") case iDef@MM_MIN_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_min_pd(${quote(a)}, ${quote(b)})") case iDef@MM_MOVE_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_move_sd(${quote(a)}, ${quote(b)})") case iDef@MM_UCOMIGE_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_ucomige_sd(${quote(a)}, ${quote(b)})") case iDef@MM_SET_SD(a) => headers += iDef.header emitValDef(sym, s"_mm_set_sd(${quote(a)})") case iDef@MM_UCOMINEQ_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_ucomineq_sd(${quote(a)}, ${quote(b)})") case iDef@MM_BSRLI_SI128(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_bsrli_si128(${quote(a)}, ${quote(imm8)})") case iDef@MM_CVTPD_PS(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtpd_ps(${quote(a)})") case iDef@MM_CVTSD_SI32(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtsd_si32(${quote(a)})") case iDef@MM_CVTSI64X_SI128(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtsi64x_si128(${quote(a)})") case iDef@MM_SRA_EPI16(a, count) => headers += iDef.header emitValDef(sym, s"_mm_sra_epi16(${quote(a)}, ${quote(count)})") case iDef@MM_CMPLT_EPI8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmplt_epi8(${quote(a)}, ${quote(b)})") case iDef@MM_STORE_SI128(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_store_si128((__m128i*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_CLFLUSH(p, pOffset) => headers += iDef.header stream.println(s"_mm_clflush((void const*) (${quote(p) + (if(pOffset == Const(0)) "" else " + " + quote(pOffset))}));") case iDef@MM_SRA_EPI32(a, count) => headers += iDef.header emitValDef(sym, s"_mm_sra_epi32(${quote(a)}, ${quote(count)})") case iDef@MM_SRAI_EPI32(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_srai_epi32(${quote(a)}, ${quote(imm8)})") case iDef@MM_SRLI_SI128(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_srli_si128(${quote(a)}, ${quote(imm8)})") case iDef@MM_SET_PD1(a) => headers += iDef.header emitValDef(sym, s"_mm_set_pd1(${quote(a)})") case iDef@MM_COMINEQ_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_comineq_sd(${quote(a)}, ${quote(b)})") case iDef@MM_CMPNLT_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpnlt_sd(${quote(a)}, ${quote(b)})") case iDef@MM512_CVTSI512_SI32(a) => headers += iDef.header emitValDef(sym, s"_mm512_cvtsi512_si32(${quote(a)})") case iDef@MM_CVTTPS_EPI32(a) => headers += iDef.header emitValDef(sym, s"_mm_cvttps_epi32(${quote(a)})") case iDef@MM_SET1_EPI64(a) => headers += iDef.header emitValDef(sym, s"_mm_set1_epi64(${quote(a)})") case iDef@MM_CVTSD_SI64X(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtsd_si64x(${quote(a)})") case iDef@MM_SLL_EPI16(a, count) => headers += iDef.header emitValDef(sym, s"_mm_sll_epi16(${quote(a)}, ${quote(count)})") case iDef@MM_OR_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_or_pd(${quote(a)}, ${quote(b)})") case iDef@MM_LOAD_SI128(mem_addr, mem_addrOffset) => headers += iDef.header emitValDef(sym, s"_mm_load_si128((__m128i const*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}))") case iDef@MM_ADD_EPI32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_add_epi32(${quote(a)}, ${quote(b)})") case iDef@MM_SUB_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_sub_pd(${quote(a)}, ${quote(b)})") case iDef@MM_UNPACKHI_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpackhi_pd(${quote(a)}, ${quote(b)})") case iDef@MM_MULLO_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_mullo_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_SET_PD(e1, e0) => headers += iDef.header emitValDef(sym, s"_mm_set_pd(${quote(e1)}, ${quote(e0)})") case iDef@MM_CMPEQ_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpeq_pd(${quote(a)}, ${quote(b)})") case iDef@MM_CMPGT_EPI8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpgt_epi8(${quote(a)}, ${quote(b)})") case iDef@MM_AND_SI128(a, b) => headers += iDef.header emitValDef(sym, s"_mm_and_si128(${quote(a)}, ${quote(b)})") case iDef@MM_CMPEQ_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpeq_sd(${quote(a)}, ${quote(b)})") case iDef@MM_LOADL_EPI64(mem_addr, mem_addrOffset) => headers += iDef.header emitValDef(sym, s"_mm_loadl_epi64((__m128i const*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}))") case iDef@MM_SRAI_EPI16(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_srai_epi16(${quote(a)}, ${quote(imm8)})") case iDef@MM_PACKS_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_packs_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_CMPLT_EPI32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmplt_epi32(${quote(a)}, ${quote(b)})") case iDef@MM_OR_SI128(a, b) => headers += iDef.header emitValDef(sym, s"_mm_or_si128(${quote(a)}, ${quote(b)})") case iDef@MM_SQRT_PD(a) => headers += iDef.header emitValDef(sym, s"_mm_sqrt_pd(${quote(a)})") case iDef@MM_CMPNEQ_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpneq_sd(${quote(a)}, ${quote(b)})") case iDef@MM_SET1_PD(a) => headers += iDef.header emitValDef(sym, s"_mm_set1_pd(${quote(a)})") case iDef@MM_SUB_EPI64(a, b) => headers += iDef.header emitValDef(sym, s"_mm_sub_epi64(${quote(a)}, ${quote(b)})") case iDef@MM_UNPACKLO_EPI32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpacklo_epi32(${quote(a)}, ${quote(b)})") case iDef@MM_SETR_PD(e1, e0) => headers += iDef.header emitValDef(sym, s"_mm_setr_pd(${quote(e1)}, ${quote(e0)})") case iDef@MM_STOREL_PD(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_storel_pd((double*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_CASTPS_SI128(a) => headers += iDef.header emitValDef(sym, s"_mm_castps_si128(${quote(a)})") case iDef@MM_STORE_PD1(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_store_pd1((double*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_ANDNOT_SI128(a, b) => headers += iDef.header emitValDef(sym, s"_mm_andnot_si128(${quote(a)}, ${quote(b)})") case iDef@MM_SET1_EPI8(a) => headers += iDef.header emitValDef(sym, s"_mm_set1_epi8(${quote(a)})") case iDef@MM_STORER_PD(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_storer_pd((double*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_SUB_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_sub_sd(${quote(a)}, ${quote(b)})") case iDef@MM_MUL_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_mul_pd(${quote(a)}, ${quote(b)})") case iDef@MM_CMPGE_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpge_pd(${quote(a)}, ${quote(b)})") case iDef@MM_SRL_EPI32(a, count) => headers += iDef.header emitValDef(sym, s"_mm_srl_epi32(${quote(a)}, ${quote(count)})") case iDef@MM_COMILT_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_comilt_sd(${quote(a)}, ${quote(b)})") case iDef@MM_UNPACKLO_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpacklo_pd(${quote(a)}, ${quote(b)})") case iDef@MM_SHUFFLE_EPI32(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_shuffle_epi32(${quote(a)}, ${quote(imm8)})") case iDef@MM_SRL_EPI64(a, count) => headers += iDef.header emitValDef(sym, s"_mm_srl_epi64(${quote(a)}, ${quote(count)})") case iDef@MM_SRL_EPI16(a, count) => headers += iDef.header emitValDef(sym, s"_mm_srl_epi16(${quote(a)}, ${quote(count)})") case iDef@MM_CVTPD_PI32(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtpd_pi32(${quote(a)})") case iDef@MM_COMIGT_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_comigt_sd(${quote(a)}, ${quote(b)})") case iDef@MM_SHUFFLELO_EPI16(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_shufflelo_epi16(${quote(a)}, ${quote(imm8)})") case iDef@MM_CMPEQ_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpeq_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_MOVE_EPI64(a) => headers += iDef.header emitValDef(sym, s"_mm_move_epi64(${quote(a)})") case iDef@MM_LOAD_PD(mem_addr, mem_addrOffset) => headers += iDef.header emitValDef(sym, s"_mm_load_pd((double const*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}))") case iDef@MM_ADDS_EPU16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_adds_epu16(${quote(a)}, ${quote(b)})") case iDef@MM_UNPACKHI_EPI32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpackhi_epi32(${quote(a)}, ${quote(b)})") case iDef@MM_MOVEPI64_PI64(a) => headers += iDef.header emitValDef(sym, s"_mm_movepi64_pi64(${quote(a)})") case iDef@MM_CVTTPD_EPI32(a) => headers += iDef.header emitValDef(sym, s"_mm_cvttpd_epi32(${quote(a)})") case iDef@MM_UNPACKHI_EPI64(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpackhi_epi64(${quote(a)}, ${quote(b)})") case iDef@MM_CMPEQ_EPI32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpeq_epi32(${quote(a)}, ${quote(b)})") case iDef@MM_CVTPS_EPI32(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtps_epi32(${quote(a)})") case iDef@MM_CVTSI64_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cvtsi64_sd(${quote(a)}, ${quote(b)})") case iDef@MM_SLLI_EPI32(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_slli_epi32(${quote(a)}, ${quote(imm8)})") case iDef@MM_CVTSI128_SI64(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtsi128_si64(${quote(a)})") case iDef@MM_PAUSE() => headers += iDef.header stream.println(s"_mm_pause();") case iDef@MM512_CVTSD_F64(a) => headers += iDef.header emitValDef(sym, s"_mm512_cvtsd_f64(${quote(a)})") case iDef@MM_CASTPS_PD(a) => headers += iDef.header emitValDef(sym, s"_mm_castps_pd(${quote(a)})") case iDef@MM_CASTSI128_PD(a) => headers += iDef.header emitValDef(sym, s"_mm_castsi128_pd(${quote(a)})") case iDef@MM_CMPLT_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmplt_pd(${quote(a)}, ${quote(b)})") case iDef@MM_SRLI_EPI32(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_srli_epi32(${quote(a)}, ${quote(imm8)})") case iDef@MM_UCOMILE_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_ucomile_sd(${quote(a)}, ${quote(b)})") case iDef@MM_CVTSI32_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cvtsi32_sd(${quote(a)}, ${quote(b)})") case iDef@MM_XOR_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_xor_pd(${quote(a)}, ${quote(b)})") case iDef@MM_MUL_EPU32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_mul_epu32(${quote(a)}, ${quote(b)})") case iDef@MM_AND_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_and_pd(${quote(a)}, ${quote(b)})") case iDef@MM_AVG_EPU16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_avg_epu16(${quote(a)}, ${quote(b)})") case iDef@MM_SHUFFLE_PD(a, b, imm8) => headers += iDef.header emitValDef(sym, s"_mm_shuffle_pd(${quote(a)}, ${quote(b)}, ${quote(imm8)})") case iDef@MM_CVTPD_EPI32(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtpd_epi32(${quote(a)})") case iDef@MM_MADD_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_madd_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_SETR_EPI64(e1, e0) => headers += iDef.header emitValDef(sym, s"_mm_setr_epi64(${quote(e1)}, ${quote(e0)})") case iDef@MM_MIN_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_min_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_MOVPI64_EPI64(a) => headers += iDef.header emitValDef(sym, s"_mm_movpi64_epi64(${quote(a)})") case iDef@MM_CVTSI128_SI32(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtsi128_si32(${quote(a)})") case iDef@MM_MULHI_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_mulhi_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_CVTSI128_SI64X(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtsi128_si64x(${quote(a)})") case iDef@MM_CMPLT_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmplt_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_CVTSD_SS(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cvtsd_ss(${quote(a)}, ${quote(b)})") case iDef@MM_SUB_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_sub_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_CVTSD_F64(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtsd_f64(${quote(a)})") case iDef@MM_CMPNGT_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpngt_sd(${quote(a)}, ${quote(b)})") case iDef@MM_ADD_EPI64(a, b) => headers += iDef.header emitValDef(sym, s"_mm_add_epi64(${quote(a)}, ${quote(b)})") case iDef@MM_MASKMOVEU_SI128(a, mask, mem_addr, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_maskmoveu_si128(${quote(a)}, ${quote(mask)}, (char*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}));") case iDef@MM_MIN_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_min_sd(${quote(a)}, ${quote(b)})") case iDef@MM_SETR_EPI8(e15, e14, e13, e12, e11, e10, e9, e8, e7, e6, e5, e4, e3, e2, e1, e0) => headers += iDef.header emitValDef(sym, s"_mm_setr_epi8(${quote(e15)}, ${quote(e14)}, ${quote(e13)}, ${quote(e12)}, ${quote(e11)}, ${quote(e10)}, ${quote(e9)}, ${quote(e8)}, ${quote(e7)}, ${quote(e6)}, ${quote(e5)}, ${quote(e4)}, ${quote(e3)}, ${quote(e2)}, ${quote(e1)}, ${quote(e0)})") case iDef@MM_INSERT_EPI16(a, i, imm8) => headers += iDef.header emitValDef(sym, s"_mm_insert_epi16(${quote(a)}, ${quote(i)}, ${quote(imm8)})") case iDef@MM_UNPACKLO_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpacklo_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_CMPGE_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpge_sd(${quote(a)}, ${quote(b)})") case iDef@MM_LOAD_SD(mem_addr, mem_addrOffset) => headers += iDef.header emitValDef(sym, s"_mm_load_sd((double const*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}))") case iDef@MM_SUBS_EPU8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_subs_epu8(${quote(a)}, ${quote(b)})") case iDef@MM_STOREL_EPI64(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_storel_epi64((__m128i*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_CMPGT_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpgt_pd(${quote(a)}, ${quote(b)})") case iDef@MM_SLLI_EPI64(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_slli_epi64(${quote(a)}, ${quote(imm8)})") case iDef@MM_BSLLI_SI128(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_bslli_si128(${quote(a)}, ${quote(imm8)})") case iDef@MM_DIV_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_div_sd(${quote(a)}, ${quote(b)})") case iDef@MM_SUB_EPI8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_sub_epi8(${quote(a)}, ${quote(b)})") case iDef@MM_SET_EPI16(e7, e6, e5, e4, e3, e2, e1, e0) => headers += iDef.header emitValDef(sym, s"_mm_set_epi16(${quote(e7)}, ${quote(e6)}, ${quote(e5)}, ${quote(e4)}, ${quote(e3)}, ${quote(e2)}, ${quote(e1)}, ${quote(e0)})") case iDef@MM_SET1_EPI64X(a) => headers += iDef.header emitValDef(sym, s"_mm_set1_epi64x(${quote(a)})") case iDef@MM_LOADU_SI128(mem_addr, mem_addrOffset) => headers += iDef.header emitValDef(sym, s"_mm_loadu_si128((__m128i const*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}))") case iDef@MM_CVTTSD_SI32(a) => headers += iDef.header emitValDef(sym, s"_mm_cvttsd_si32(${quote(a)})") case iDef@MM_SLLI_SI128(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_slli_si128(${quote(a)}, ${quote(imm8)})") case iDef@MM_SLLI_EPI16(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_slli_epi16(${quote(a)}, ${quote(imm8)})") case iDef@MM_CMPUNORD_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpunord_sd(${quote(a)}, ${quote(b)})") case iDef@MM_UNDEFINED_PD() => headers += iDef.header emitValDef(sym, s"_mm_undefined_pd()") case iDef@MM_LOADU_PD(mem_addr, mem_addrOffset) => headers += iDef.header emitValDef(sym, s"_mm_loadu_pd((double const*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}))") case iDef@MM_CVTSS_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cvtss_sd(${quote(a)}, ${quote(b)})") case iDef@MM_ADDS_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_adds_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_CASTPD_SI128(a) => headers += iDef.header emitValDef(sym, s"_mm_castpd_si128(${quote(a)})") case iDef@MM_UNPACKHI_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpackhi_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_CMPGT_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpgt_sd(${quote(a)}, ${quote(b)})") case iDef@MM_CVTTSD_SI64X(a) => headers += iDef.header emitValDef(sym, s"_mm_cvttsd_si64x(${quote(a)})") case iDef@MM_CMPLT_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmplt_sd(${quote(a)}, ${quote(b)})") case iDef@MM256_CVTSD_F64(a) => headers += iDef.header emitValDef(sym, s"_mm256_cvtsd_f64(${quote(a)})") case iDef@MM_ADD_EPI8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_add_epi8(${quote(a)}, ${quote(b)})") case iDef@MM_CMPGT_EPI32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpgt_epi32(${quote(a)}, ${quote(b)})") case iDef@MM_UNPACKLO_EPI64(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpacklo_epi64(${quote(a)}, ${quote(b)})") case iDef@MM_LOADL_PD(a, mem_addr, mem_addrOffset) => headers += iDef.header emitValDef(sym, s"_mm_loadl_pd(${quote(a)}, (double const*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}))") case iDef@MM_SETR_EPI16(e7, e6, e5, e4, e3, e2, e1, e0) => headers += iDef.header emitValDef(sym, s"_mm_setr_epi16(${quote(e7)}, ${quote(e6)}, ${quote(e5)}, ${quote(e4)}, ${quote(e3)}, ${quote(e2)}, ${quote(e1)}, ${quote(e0)})") case iDef@MM_SUBS_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_subs_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_MUL_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_mul_sd(${quote(a)}, ${quote(b)})") case iDef@MM_STREAM_PD(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_stream_pd((double*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_CVTTSD_SI64(a) => headers += iDef.header emitValDef(sym, s"_mm_cvttsd_si64(${quote(a)})") case iDef@MM_MOVEMASK_EPI8(a) => headers += iDef.header emitValDef(sym, s"_mm_movemask_epi8(${quote(a)})") case iDef@MM_CVTPI32_PD(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtpi32_pd(${quote(a)})") case iDef@MM_STORE_PD(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_store_pd((double*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_UNPACKHI_EPI8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_unpackhi_epi8(${quote(a)}, ${quote(b)})") case iDef@MM_XOR_SI128(a, b) => headers += iDef.header emitValDef(sym, s"_mm_xor_si128(${quote(a)}, ${quote(b)})") case iDef@MM_CVTEPI32_PS(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtepi32_ps(${quote(a)})") case iDef@MM_CVTSD_SI64(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtsd_si64(${quote(a)})") case iDef@MM_LFENCE() => headers += iDef.header stream.println(s"_mm_lfence();") case iDef@MM256_CVTSI256_SI32(a) => headers += iDef.header emitValDef(sym, s"_mm256_cvtsi256_si32(${quote(a)})") case iDef@MM_PACKUS_EPI16(a, b) => headers += iDef.header emitValDef(sym, s"_mm_packus_epi16(${quote(a)}, ${quote(b)})") case iDef@MM_STOREU_SI128(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_storeu_si128((__m128i*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_PACKS_EPI32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_packs_epi32(${quote(a)}, ${quote(b)})") case iDef@MM_CMPUNORD_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpunord_pd(${quote(a)}, ${quote(b)})") case iDef@MM_COMIGE_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_comige_sd(${quote(a)}, ${quote(b)})") case iDef@MM_STOREH_PD(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_storeh_pd((double*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_CMPNLT_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpnlt_pd(${quote(a)}, ${quote(b)})") case iDef@MM_EXTRACT_EPI16(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_extract_epi16(${quote(a)}, ${quote(imm8)})") case iDef@MM_SAD_EPU8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_sad_epu8(${quote(a)}, ${quote(b)})") case iDef@MM_ADDS_EPI8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_adds_epi8(${quote(a)}, ${quote(b)})") case iDef@MM_CMPEQ_EPI8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmpeq_epi8(${quote(a)}, ${quote(b)})") case iDef@MM_UCOMILT_SD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_ucomilt_sd(${quote(a)}, ${quote(b)})") case iDef@MM_STREAM_SI128(mem_addr, a, mem_addrOffset) => headers += iDef.header stream.println(s"_mm_stream_si128((__m128i*) (${quote(mem_addr) + (if(mem_addrOffset == Const(0)) "" else " + " + quote(mem_addrOffset))}), ${quote(a)});") case iDef@MM_SRLI_EPI64(a, imm8) => headers += iDef.header emitValDef(sym, s"_mm_srli_epi64(${quote(a)}, ${quote(imm8)})") case iDef@MM_CVTEPI32_PD(a) => headers += iDef.header emitValDef(sym, s"_mm_cvtepi32_pd(${quote(a)})") case iDef@MM_ADDS_EPU8(a, b) => headers += iDef.header emitValDef(sym, s"_mm_adds_epu8(${quote(a)}, ${quote(b)})") case iDef@MM_MUL_SU32(a, b) => headers += iDef.header emitValDef(sym, s"_mm_mul_su32(${quote(a)}, ${quote(b)})") case iDef@MM_SETR_EPI32(e3, e2, e1, e0) => headers += iDef.header emitValDef(sym, s"_mm_setr_epi32(${quote(e3)}, ${quote(e2)}, ${quote(e1)}, ${quote(e0)})") case iDef@MM_CMPLE_PD(a, b) => headers += iDef.header emitValDef(sym, s"_mm_cmple_pd(${quote(a)}, ${quote(b)})") case _ => super.emitNode(sym, rhs) } }
ivtoskov/lms-intrinsics
src/main/scala/ch/ethz/acl/intrinsics/SSE200.scala
Scala
apache-2.0
181,634
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.csv import scala.util.Try import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources import org.apache.spark.sql.types.{BooleanType, StructType} /** * An instance of the class compiles filters to predicates and allows to * apply the predicates to an internal row with partially initialized values * converted from parsed CSV fields. * * @param filters The filters pushed down to CSV datasource. * @param requiredSchema The schema with only fields requested by the upper layer. */ class CSVFilters(filters: Seq[sources.Filter], requiredSchema: StructType) { /** * Converted filters to predicates and grouped by maximum field index * in the read schema. For example, if an filter refers to 2 attributes * attrA with field index 5 and attrB with field index 10 in the read schema: * 0 === $"attrA" or $"attrB" < 100 * the filter is compiled to a predicate, and placed to the `predicates` * array at the position 10. In this way, if there is a row with initialized * fields from the 0 to 10 index, the predicate can be applied to the row * to check that the row should be skipped or not. * Multiple predicates with the same maximum reference index are combined * by the `And` expression. */ private val predicates: Array[BasePredicate] = { val len = requiredSchema.fields.length val groupedPredicates = Array.fill[BasePredicate](len)(null) if (SQLConf.get.csvFilterPushDown) { val groupedFilters = Array.fill(len)(Seq.empty[sources.Filter]) for (filter <- filters) { val refs = filter.references val index = if (refs.isEmpty) { // For example, AlwaysTrue and AlwaysFalse doesn't have any references // Filters w/o refs always return the same result. Taking into account // that predicates are combined via And, we can apply such filters only // once at the position 0. 0 } else { // readSchema must contain attributes of all filters. // Accordingly, fieldIndex() returns a valid index always. refs.map(requiredSchema.fieldIndex).max } groupedFilters(index) :+= filter } if (len > 0 && !groupedFilters(0).isEmpty) { // We assume that filters w/o refs like AlwaysTrue and AlwaysFalse // can be evaluated faster that others. We put them in front of others. val (literals, others) = groupedFilters(0).partition(_.references.isEmpty) groupedFilters(0) = literals ++ others } for (i <- 0 until len) { if (!groupedFilters(i).isEmpty) { val reducedExpr = groupedFilters(i) .flatMap(CSVFilters.filterToExpression(_, toRef)) .reduce(And) groupedPredicates(i) = Predicate.create(reducedExpr) } } } groupedPredicates } /** * Applies all filters that refer to row fields at the positions from 0 to index. * @param row The internal row to check. * @param index Maximum field index. The function assumes that all fields * from 0 to index position are set. * @return false iff row fields at the position from 0 to index pass filters * or there are no applicable filters * otherwise false if at least one of the filters returns false. */ def skipRow(row: InternalRow, index: Int): Boolean = { val predicate = predicates(index) predicate != null && !predicate.eval(row) } // Finds a filter attribute in the read schema and converts it to a `BoundReference` private def toRef(attr: String): Option[BoundReference] = { requiredSchema.getFieldIndex(attr).map { index => val field = requiredSchema(index) BoundReference(requiredSchema.fieldIndex(attr), field.dataType, field.nullable) } } } object CSVFilters { private def checkFilterRefs(filter: sources.Filter, schema: StructType): Boolean = { val fieldNames = schema.fields.map(_.name).toSet filter.references.forall(fieldNames.contains(_)) } /** * Returns the filters currently supported by CSV datasource. * @param filters The filters pushed down to CSV datasource. * @param schema data schema of CSV files. * @return a sub-set of `filters` that can be handled by CSV datasource. */ def pushedFilters(filters: Array[sources.Filter], schema: StructType): Array[sources.Filter] = { filters.filter(checkFilterRefs(_, schema)) } private def zip[A, B](a: Option[A], b: Option[B]): Option[(A, B)] = { a.zip(b).headOption } private def toLiteral(value: Any): Option[Literal] = { Try(Literal(value)).toOption } /** * Converts a filter to an expression and binds it to row positions. * * @param filter The filter to convert. * @param toRef The function converts a filter attribute to a bound reference. * @return some expression with resolved attributes or None if the conversion * of the given filter to an expression is impossible. */ def filterToExpression( filter: sources.Filter, toRef: String => Option[BoundReference]): Option[Expression] = { def zipAttributeAndValue(name: String, value: Any): Option[(BoundReference, Literal)] = { zip(toRef(name), toLiteral(value)) } def translate(filter: sources.Filter): Option[Expression] = filter match { case sources.And(left, right) => zip(translate(left), translate(right)).map(And.tupled) case sources.Or(left, right) => zip(translate(left), translate(right)).map(Or.tupled) case sources.Not(child) => translate(child).map(Not) case sources.EqualTo(attribute, value) => zipAttributeAndValue(attribute, value).map(EqualTo.tupled) case sources.EqualNullSafe(attribute, value) => zipAttributeAndValue(attribute, value).map(EqualNullSafe.tupled) case sources.IsNull(attribute) => toRef(attribute).map(IsNull) case sources.IsNotNull(attribute) => toRef(attribute).map(IsNotNull) case sources.In(attribute, values) => val literals = values.toSeq.flatMap(toLiteral) if (literals.length == values.length) { toRef(attribute).map(In(_, literals)) } else { None } case sources.GreaterThan(attribute, value) => zipAttributeAndValue(attribute, value).map(GreaterThan.tupled) case sources.GreaterThanOrEqual(attribute, value) => zipAttributeAndValue(attribute, value).map(GreaterThanOrEqual.tupled) case sources.LessThan(attribute, value) => zipAttributeAndValue(attribute, value).map(LessThan.tupled) case sources.LessThanOrEqual(attribute, value) => zipAttributeAndValue(attribute, value).map(LessThanOrEqual.tupled) case sources.StringContains(attribute, value) => zipAttributeAndValue(attribute, value).map(Contains.tupled) case sources.StringStartsWith(attribute, value) => zipAttributeAndValue(attribute, value).map(StartsWith.tupled) case sources.StringEndsWith(attribute, value) => zipAttributeAndValue(attribute, value).map(EndsWith.tupled) case sources.AlwaysTrue() => Some(Literal(true, BooleanType)) case sources.AlwaysFalse() => Some(Literal(false, BooleanType)) } translate(filter) } }
goldmedal/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVFilters.scala
Scala
apache-2.0
8,252
package org.jetbrains.plugins.scala.testingSupport.test import com.intellij.codeInsight.TestFrameworks import com.intellij.execution.TestStateStorage import com.intellij.execution.lineMarker.{ExecutorAction, RunLineMarkerContributor} import com.intellij.execution.testframework.TestIconMapper import com.intellij.icons.AllIcons import com.intellij.openapi.project.Project import com.intellij.psi.{PsiClass, PsiElement, PsiMethod} import com.intellij.psi.impl.source.tree.LeafPsiElement import com.intellij.psi.util.PsiTreeUtil import com.intellij.testIntegration.{TestFramework, TestRunLineMarkerProvider} import com.intellij.util.Function import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes import org.jetbrains.plugins.scala.testingSupport.test.scalatest.ScalaTestTestFramework import org.jetbrains.plugins.scala.testingSupport.test.specs2.Specs2TestFramework import org.jetbrains.plugins.scala.testingSupport.test.utest.UTestTestFramework /** * Created by Roman.Shein on 02.11.2016. * <p/> * Mostly copy-paste from [[com.intellij.testIntegration.TestRunLineMarkerProvider]], inaviodable due to private methods. */ class ScalaTestRunLineMarkerProvider extends TestRunLineMarkerProvider { override def getInfo(e: PsiElement): RunLineMarkerContributor.Info = { if (isIdentifier(e)) { val element: PsiElement = e.getParent element match { case cl: PsiClass => val framework: TestFramework = TestFrameworks.detectFramework(cl) if (framework != null && framework.isTestClass(cl)) { val url = framework match { case _: UTestTestFramework | _: ScalaTestTestFramework | _: Specs2TestFramework => //TODO do nothing for now; gutter icons for classes require proper url processing for each framework return null case _ => "java:suite://" + cl.getQualifiedName } return getInfo(url, e.getProject, isClass = true) } case _: PsiMethod => val psiClass: PsiClass = PsiTreeUtil.getParentOfType(element, classOf[PsiClass]) if (psiClass != null) { val framework: TestFramework = TestFrameworks.detectFramework(psiClass) if (framework != null && framework.isTestMethod(element)) { val url = framework match { case _: UTestTestFramework | _: ScalaTestTestFramework | _: Specs2TestFramework => //TODO do nothing for now; gutter icons for methods require proper url processing for each framework return null case _ => "java:test://" + psiClass.getQualifiedName + "." + element.asInstanceOf[PsiMethod].getName } return getInfo(url, e.getProject, isClass = false) } } case _ => } } null } override def isIdentifier(e: PsiElement): Boolean = e match { case l: LeafPsiElement => l.getElementType == ScalaTokenTypes.tIDENTIFIER case _ => false } private def getInfo(url: String, project: Project, isClass: Boolean) = { import com.intellij.execution.testframework.sm.runner.states.TestStateInfo.Magnitude._ def defaultIcon = if (isClass) AllIcons.RunConfigurations.TestState.Run_run else AllIcons.RunConfigurations.TestState.Run val icon = Option(TestStateStorage.getInstance(project).getState(url)) .map(state => TestIconMapper.getMagnitude(state.magnitude)) .map { case ERROR_INDEX | FAILED_INDEX => AllIcons.RunConfigurations.TestState.Red2 case PASSED_INDEX | COMPLETE_INDEX => AllIcons.RunConfigurations.TestState.Green2 case _ => defaultIcon }.getOrElse(defaultIcon) new RunLineMarkerContributor.Info(icon, ScalaTestRunLineMarkerProvider.TOOLTIP_PROVIDER, ExecutorAction.getActions(1): _*) } } object ScalaTestRunLineMarkerProvider { val TOOLTIP_PROVIDER: Function[PsiElement, String] = new Function[PsiElement, String] { override def fun(param: PsiElement): String = "Run Test" } }
jastice/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/testingSupport/test/ScalaTestRunLineMarkerProvider.scala
Scala
apache-2.0
4,082
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.local import org.apache.spark.sql.SQLConf import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.Attribute /** * An operator that scans some local data collection in the form of Scala Seq. */ case class SeqScanNode(conf: SQLConf, output: Seq[Attribute], data: Seq[InternalRow]) extends LeafLocalNode(conf) { private[this] var iterator: Iterator[InternalRow] = _ private[this] var currentRow: InternalRow = _ override def open(): Unit = { iterator = data.iterator } override def next(): Boolean = { if (iterator.hasNext) { currentRow = iterator.next() true } else { false } } override def fetch(): InternalRow = currentRow override def close(): Unit = { // Do nothing } }
chenc10/Spark-PAF
sql/core/src/main/scala/org/apache/spark/sql/execution/local/SeqScanNode.scala
Scala
apache-2.0
1,607
/* * Copyright (c) 2011 Sony Pictures Imageworks Inc. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution. Neither the name of Sony Pictures Imageworks nor the * names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.imageworks.migration.tests.alter_column import com.imageworks.migration.{ IrreversibleMigrationException, Limit, Migration, VarcharType } class Migrate_20110214060042_AlterColumn extends Migration { val tableName = "scala_migrations_altering" def up() { alterColumn(tableName, "name", VarcharType, Limit(255)) } def down() { throw new IrreversibleMigrationException } }
imageworks/scala-migrations
src/test/scala/com/imageworks/migration/tests/alter_column/Migrate_20110214060042_AlterColumn.scala
Scala
bsd-3-clause
2,003
package io.eels.component.avro import org.apache.avro.{Schema, SchemaBuilder} import org.scalatest.{Matchers, WordSpec} class ConverterTest extends WordSpec with Matchers { "Converter" should { "convert to long" in { AvroSerializer(SchemaBuilder.builder().longType()).serialize("123") shouldBe 123l AvroSerializer(SchemaBuilder.builder().longType()).serialize(14555) shouldBe 14555l } "convert to String" in { AvroSerializer(SchemaBuilder.builder().stringType()).serialize(123l) shouldBe "123" AvroSerializer(SchemaBuilder.builder().stringType).serialize(124) shouldBe "124" AvroSerializer(SchemaBuilder.builder().stringType).serialize("Qweqwe") shouldBe "Qweqwe" } "convert to boolean" in { AvroSerializer(SchemaBuilder.builder().booleanType).serialize(true) shouldBe true AvroSerializer(SchemaBuilder.builder().booleanType).serialize(false) shouldBe false AvroSerializer(SchemaBuilder.builder().booleanType).serialize("true") shouldBe true AvroSerializer(SchemaBuilder.builder().booleanType()).serialize("false") shouldBe false } "convert to Double" in { AvroSerializer(SchemaBuilder.builder().doubleType).serialize("213.4") shouldBe 213.4d AvroSerializer(SchemaBuilder.builder().doubleType).serialize("345.11") shouldBe 345.11d AvroSerializer(SchemaBuilder.builder().doubleType()).serialize(345) shouldBe 345.0 } } }
stheppi/eel
eel-components/src/test/scala/io/eels/component/avro/ConverterTest.scala
Scala
apache-2.0
1,431
// Databricks notebook source // MAGIC %md // MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html) // COMMAND ---------- // MAGIC %md // MAGIC # [The Alternating Least Squares method (ALS)]() // COMMAND ---------- // MAGIC %md // MAGIC // MAGIC ## The ALS algorithm // MAGIC // MAGIC // MAGIC The ALS algorithm was proposed in 2008 by *F. Zhang, E.Shang, Y. Xu and X. Wu* in a paper titled : **Large-scale Parallel Colaborative Filtering for the Netflix Prize** [(paper)](https://link.springer.com/chapter/10.1007/978-3-540-68880-8_32). We will briefly describe the main ideas behind the ALS algorithm. // MAGIC #### What are we learning ? // MAGIC In order to finding the missing values of the rating matrix R, the authors of the ALS algorithm considered approximating this matrix by a product of two tall matrices U and M of low rank. In other words, the goal is to find a low rank approximation of the ratings matrix R: // MAGIC // MAGIC $$ // MAGIC R \\approx U M^\\top = \\begin{bmatrix} u_1 & \\dots & u_N \\end{bmatrix}^\\top \\begin{bmatrix} // MAGIC m_1 & \\dots & m_M \\end{bmatrix} \\qquad \\text{where} \\qquad U \\in \\mathbb{R}^{N \\times K}, M \\in \\mathbb{R}^{M \\times K} // MAGIC $$ // MAGIC // MAGIC Intuitively we think of U (resp. M) as a matrix of users' features (resp. movies features) and we may rewrite this approximation entrywise as // MAGIC // MAGIC $$ // MAGIC \\forall i,j \\qquad r_{i,j} \\approx u_i^\\top m_j. // MAGIC $$ // MAGIC // MAGIC #### The loss function // MAGIC If all entries of the rating matrix R were known, one may use an SVD decomposition to reconstruct U and M. However, not all ratings are known therefore one has to learn the matrices U and M. The authors of the paper proposed to minimize the following loss which corresponds to the sum of squares errors with a Thikonov rigularization that weighs the users matrix U (resp. the movies matrix M) using the Gamma_U (resp. Gamma_M) // MAGIC // MAGIC $$ // MAGIC \\mathcal{L}_{U,M}^{wheighted}(R) = \\sum_{(i,j)\\in S} // MAGIC (r_{i,j} - u_i^\\top m_j)^2 + \\lambda \\Vert M \\Gamma_m \\Vert^2 + \\lambda \\Vert U \\Gamma_u \\Vert^2 // MAGIC $$ // MAGIC // MAGIC where S corresponds to the set of known ratings, \\lambda is a regularaziation parameter. In fact this loss corresponds to the Alternating Least Squares with Weigted Regularization (ALS-WR). We will be using a variant of that algorithm a.k.a. the ALS algorithm which corresponds to minimizing the following slighltly similar loss without wheighing: // MAGIC // MAGIC $$ // MAGIC \\mathcal{L}_{U,M}(R) = \\sum_{(i,j)\\in S} // MAGIC (r_{i,j} - u_i^\\top m_j)^2 + \\lambda \\Vert M \\Vert^2 + \\lambda \\Vert U \\Vert^2 // MAGIC $$ // MAGIC // MAGIC and the goal of the algorithm will be find a condidate (U,M) that // MAGIC // MAGIC $$ // MAGIC \\min_{U,M} \\mathcal{L}_{U,M}(R) // MAGIC $$ // MAGIC // MAGIC #### The ALS algorithm // MAGIC The authors approach to solve the aforementioned minimization problem as follows: // MAGIC - **Step 1.** Initialize matrix M, by assigning the average rating for that movie as the first row and small random numbers for the remaining entries. // MAGIC - **Step 2.** Fix M, Solve for U by minimizing the aformentioned loss. // MAGIC - **Step 3.** Fix U, solve for M by minimizing the aformentioned loss similarly. // MAGIC - **Step 4.** Repeat Steps 2 and 3 until a stopping criterion is satisfied. // MAGIC // MAGIC Note that when one of the matrices is fixed, say M, the loss becomes quadratic in U and the solution corresponds to that of the least squares. // MAGIC // MAGIC #### Key parameters of the algorithm // MAGIC The key parameters of the lagorithm are the **rank K**, the **regularization parameter lambda**, and the **number of iterations** befor stopping the algorithm. Indeed, since we don not have full knowledge of the matrix R, we do not know its rank. To find the best rank we will use cross-validation and dedicate part of the data to that. There is no straight way to choosing the regularization parameter, we will base our choice on reported values that work for the considered datasets. As for the number of iterations, we will proceed similarly. // MAGIC // MAGIC // MAGIC #### Practically speaking // MAGIC We will use the following mllib library in scala wich contain classes dedicated to recommendation systems // MAGIC (See [http://spark.apache.org/docs/latest/api/scala/index.html#org.apache.spark.mllib.recommendation.ALS](http://spark.apache.org/docs/latest/api/scala/index.html#org.apache.spark.mllib.recommendation.ALS)). More specifically, it contains the ALS class which allows for using the ALS algorithm as described earlier. // COMMAND ---------- import org.apache.spark.mllib.recommendation.ALS import org.apache.spark.mllib.recommendation.MatrixFactorizationModel import org.apache.spark.mllib.recommendation.Rating // COMMAND ---------- // MAGIC %md // MAGIC # [On a small dataset]() // MAGIC // MAGIC This part of the notebook is borrowed from the notebook on the ALS we had in the course. // COMMAND ---------- display(dbutils.fs.ls("/databricks-datasets/cs100/lab4/data-001/")) // The data is already here // COMMAND ---------- // MAGIC %md // MAGIC ### Loading the data // MAGIC We read in each of the files and create an RDD consisting of parsed lines. // MAGIC Each line in the ratings dataset (`ratings.dat.gz`) is formatted as: // MAGIC `UserID::MovieID::Rating::Timestamp` // MAGIC Each line in the movies (`movies.dat`) dataset is formatted as: // MAGIC `MovieID::Title::Genres` // MAGIC The `Genres` field has the format // MAGIC `Genres1|Genres2|Genres3|...` // MAGIC The format of these files is uniform and simple, so we can use `split()`. // MAGIC // MAGIC Parsing the two files yields two RDDs // MAGIC // MAGIC * For each line in the ratings dataset, we create a tuple of (UserID, MovieID, Rating). We drop the timestamp because we do not need it for this exercise. // MAGIC * For each line in the movies dataset, we create a tuple of (MovieID, Title). We drop the Genres because we do not need them for this exercise. // COMMAND ---------- // take a peek at what's in the rating file sc.textFile("/databricks-datasets/cs100/lab4/data-001/ratings.dat.gz").map { line => line.split("::") }.take(5) // COMMAND ---------- val timedRatingsRDD = sc.textFile("/databricks-datasets/cs100/lab4/data-001/ratings.dat.gz").map { line => val fields = line.split("::") // format: (timestamp % 10, Rating(userId, movieId, rating)) (fields(3).toLong % 10, Rating(fields(0).toInt, fields(1).toInt, fields(2).toDouble)) } timedRatingsRDD.take(10).map(println) // COMMAND ---------- val ratingsRDD = sc.textFile("/databricks-datasets/cs100/lab4/data-001/ratings.dat.gz").map { line => val fields = line.split("::") // format: Rating(userId, movieId, rating) Rating(fields(0).toInt, fields(1).toInt, fields(2).toDouble) } ratingsRDD.take(10).map(println) // COMMAND ---------- val movies = sc.textFile("/databricks-datasets/cs100/lab4/data-001/movies.dat").map { line => val fields = line.split("::") // format: (movieId, movieName) (fields(0).toInt, fields(1)) }.collect.toMap // COMMAND ---------- // MAGIC %md // MAGIC Let's make a data frame to visually explore the data next. // COMMAND ---------- sc.textFile("/databricks-datasets/cs100/lab4/data-001/ratings.dat.gz").map { line => line.split("::") }.take(5) // COMMAND ---------- val timedRatingsDF = sc.textFile("/databricks-datasets/cs100/lab4/data-001/ratings.dat.gz").map { line => val fields = line.split("::") // format: (timestamp % 10, Rating(userId, movieId, rating)) (fields(3).toLong, fields(0).toInt, fields(1).toInt, fields(2).toDouble) }.toDF("timestamp", "userId", "movieId", "rating") display(timedRatingsDF) // COMMAND ---------- // MAGIC %md // MAGIC Here we simply check the size of the datasets we are using // COMMAND ---------- val numRatings = ratingsRDD.count val numUsers = ratingsRDD.map(_.user).distinct.count val numMovies = ratingsRDD.map(_.product).distinct.count println("Got " + numRatings + " ratings from " + numUsers + " users on " + numMovies + " movies.") // COMMAND ---------- // MAGIC %md // MAGIC Now that we have the dataset we need, let's make a recommender system. // MAGIC // MAGIC **Creating a Training Set, test Set and Validation Set** // MAGIC // MAGIC Before we jump into using machine learning, we need to break up the `ratingsRDD` dataset into three pieces: // MAGIC // MAGIC * A training set (RDD), which we will use to train models // MAGIC * A validation set (RDD), which we will use to choose the best model // MAGIC * A test set (RDD), which we will use for our experiments // MAGIC // MAGIC To randomly split the dataset into the multiple groups, we can use the `randomSplit()` transformation. `randomSplit()` takes a set of splits and seed and returns multiple RDDs. // COMMAND ---------- val Array(trainingRDD, validationRDD, testRDD) = ratingsRDD.randomSplit(Array(0.60, 0.20, 0.20), 0L) // let's find the exact sizes we have next println(" training data size = " + trainingRDD.count() + ", validation data size = " + validationRDD.count() + ", test data size = " + testRDD.count() + ".") // COMMAND ---------- // MAGIC %md // MAGIC After splitting the dataset, your training set has about 293,000 entries and the validation and test sets each have about 97,000 entries (the exact number of entries in each dataset varies slightly due to the random nature of the `randomSplit()` transformation. // COMMAND ---------- // let's find the exact sizes we have next println(" training data size = " + trainingRDD.count() + ", validation data size = " + validationRDD.count() + ", test data size = " + testRDD.count() + ".") // COMMAND ---------- // MAGIC %md // MAGIC ## Training the recommender system // COMMAND ---------- // Build the recommendation model using ALS by fitting to the validation data // just trying three different hyper-parameter (rank) values to optimise over val ranks = List(4, 8, 12); var rank=0; for ( rank <- ranks ){ val numIterations = 10 val regularizationParameter = 0.01 val model = ALS.train(trainingRDD, rank, numIterations, regularizationParameter) // Evaluate the model on test data val usersProductsValidate = validationRDD.map { case Rating(user, product, rate) => (user, product) } // get the predictions on test data val predictions = model.predict(usersProductsValidate) .map { case Rating(user, product, rate) => ((user, product), rate) } // find the actual ratings and join with predictions val ratesAndPreds = validationRDD.map { case Rating(user, product, rate) => ((user, product), rate) }.join(predictions) val MSE = ratesAndPreds.map { case ((user, product), (r1, r2)) => val err = (r1 - r2) err * err }.mean() println("rank and Mean Squared Error = " + rank + " and " + MSE) } // end of loop over ranks // COMMAND ---------- // MAGIC %md // MAGIC Here we have the best model // COMMAND ---------- val rank = 4 val numIterations = 10 val regularizationParameter = 0.01 val model = ALS.train(trainingRDD, rank, numIterations, regularizationParameter) // Evaluate the model on test data val usersProductsTest = testRDD.map { case Rating(user, product, rate) => (user, product) } // get the predictions on test data val predictions = model.predict(usersProductsTest) .map { case Rating(user, product, rate) => ((user, product), rate) } // find the actual ratings and join with predictions val ratesAndPreds = testRDD.map { case Rating(user, product, rate) => ((user, product), rate) }.join(predictions) val MSE = ratesAndPreds.map { case ((user, product), (r1, r2)) => val err = (r1 - r2) err * err }.mean() println("rank and Mean Squared Error for test data = " + rank + " and " + MSE) // COMMAND ---------- // MAGIC // MAGIC %md // MAGIC // MAGIC # [On a large dataset - Netflix dataset]() // MAGIC // MAGIC ## Loading the data // MAGIC // MAGIC Netflix held a competition to improve recommendation systems. The dataset can be found in [kaggle](https://www.kaggle.com/netflix-inc/netflix-prize-data). Briefly speaking, the dataset contains users' ratings to movies, with 480189 users and 17770 movies. Ratings are given on an integral scale from 1 to 5. The first step is to download the data and store it in databricks. Originally, the dataset is plit into four files each with the following format: // MAGIC // MAGIC ``` // MAGIC MovieID: // MAGIC UserID, rating, date // MAGIC . // MAGIC . // MAGIC . // MAGIC MovieID: // MAGIC UserID, rating, date // MAGIC . // MAGIC . // MAGIC . // MAGIC ``` // MAGIC // MAGIC We process these files so that each line has the format `MovieID, UserID, rating, date` // COMMAND ---------- // Path where the data is stored display(dbutils.fs.ls("/FileStore/tables/Netflix")) // COMMAND ---------- // MAGIC %md Let us load first the movie titles. // COMMAND ---------- // Create a Movie class case class Movie(movieID: Int, year: Int, tilte: String) // Load the movie titles in an RDD val moviesTitlesRDD: RDD[Movie] = sc.textFile("/FileStore/tables/Netflix/movie_titles.csv").map { line => val fields = line.split(",") // format: Rating(movieId, year, title) Movie(fields(0).toInt, fields(1).toInt, fields(2)) } // Print the titles of the first 3 movies moviesTitlesRDD.take(5).foreach(println) // COMMAND ---------- import org.apache.spark.mllib.recommendation.ALS import org.apache.spark.mllib.recommendation.MatrixFactorizationModel import org.apache.spark.mllib.recommendation.Rating // COMMAND ---------- val RatingsRDD_1 = sc.textFile("/FileStore/tables/Netflix/formatted_combined_data_1_txt.gz").map { line => val fields = line.split(",") // format: Rating(userId, movieId, rating)) Rating(fields(1).toInt, fields(0).toInt, fields(2).toDouble) } val RatingsRDD_2 = sc.textFile("/FileStore/tables/Netflix/formatted_combined_data_2_txt.gz").map { line => val fields = line.split(",") // format: Rating(userId, movieId, rating)) Rating(fields(1).toInt, fields(0).toInt, fields(2).toDouble) } val RatingsRDD_3 = sc.textFile("/FileStore/tables/Netflix/formatted_combined_data_3_txt.gz").map { line => val fields = line.split(",") // format: Rating(userId, movieId, rating)) Rating(fields(1).toInt, fields(0).toInt, fields(2).toDouble) } val RatingsRDD_4 = sc.textFile("/FileStore/tables/Netflix/formatted_combined_data_4_txt.gz").map { line => val fields = line.split(",") // format: Rating(userId, movieId, rating)) Rating(fields(1).toInt, fields(0).toInt, fields(2).toDouble) } RatingsRDD_4.take(5).foreach(println) // COMMAND ---------- // Concatenating the ratings RDDs (could not find a nice way of doing this) val r1 = RatingsRDD_1.union(RatingsRDD_2) val r2 = r1.union(RatingsRDD_3) val RatingsRDD = r2.union(RatingsRDD_4) RatingsRDD.take(5).foreach(println) // COMMAND ---------- // MAGIC %md // MAGIC Let us put our dataset in a dataframe to visulaize it more nicely // COMMAND ---------- val RatingsDF = RatingsRDD.toDF display(RatingsDF) // COMMAND ---------- // MAGIC %md // MAGIC ## Training the movie recommender system // MAGIC // MAGIC In the training process we will start by splitting the dataset into // MAGIC - a training set (60%) // MAGIC - a validation set (20%) // MAGIC - a test set (20%) // COMMAND ---------- // Splitting the dataset val Array(trainingRDD, validationRDD, testRDD) = RatingsRDD.randomSplit(Array(0.60, 0.20, 0.20), 0L) // COMMAND ---------- // MAGIC %md // MAGIC After splitting the dataset, your training set has about 60,288,922 entries and the validation and test sets each have about 20,097,527 entries (the exact number of entries in each dataset varies slightly due to the random nature of the `randomSplit()` transformation. // COMMAND ---------- // let's find the exact sizes we have next println(" training data size = " + trainingRDD.count() + ", validation data size = " + validationRDD.count() + ", test data size = " + testRDD.count() + ".") // COMMAND ---------- // Build the recommendation model using ALS by fitting to the validation data // just trying three different hyper-parameter (rank) values to optimise over val ranks = List(50, 100, 150, 300, 400, 500); var rank=0; for ( rank <- ranks ){ val numIterations = 12 val regularizationParameter = 0.05 val model = ALS.train(trainingRDD, rank, numIterations, regularizationParameter) // Evaluate the model on test data val usersProductsValidate = validationRDD.map { case Rating(user, product, rate) => (user, product) } // get the predictions on test data val predictions = model.predict(usersProductsValidate) .map { case Rating(user, product, rate) => ((user, product), rate) } // find the actual ratings and join with predictions val ratesAndPreds = validationRDD.map { case Rating(user, product, rate) => ((user, product), rate) }.join(predictions) val MSE = ratesAndPreds.map { case ((user, product), (r1, r2)) => val err = (r1 - r2) err * err }.mean() println("rank and Mean Squared Error = " + rank + " and " + MSE) } // end of loop over ranks
lamastex/scalable-data-science
dbcArchives/2021/000_0-sds-3-x-projects/student-project-18_group-ProjectRL/01_The_ALS_method.scala
Scala
unlicense
18,206
package com.adlawson.json4s import org.json4s._ /** * Serialize scala Types and Type Aliases * * A is the Type Alias * B is a constructor type for the type alias * * The constructor type is needed only during the [de]serialization stages * and isn't exposed in any other way. * * Example: * ``` * import com.adlawson.json4s._ * import org.json4s.DefaultFormats * * type FooBar = Foo with Bar * * case class Foo(a: Int) * trait Bar { def b: Int } * * // Dirty type alias constructor * private case class _FooBar(a: Int, b: Int) * * // Serializer * object FooBarSerializer extends TypeSerializer[FooBar, _FooBar] { * override def wrap(f: FooBar) = _FooBar(f.a, f.b) * override def unwrap(f: _FooBar) = new Foo(f.a) with Bar { val b = f.b } * } * * // Add to implicit formats * implicit val format = DefaultFormats + FooBarSerializer * ``` */ abstract class TypeSerializer[A : Manifest, B : Manifest] extends Serializer[A] { def unwrap(b: B): A def wrap(a: A): B protected val Class = implicitly[Manifest[A]].runtimeClass def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), A] = { case (TypeInfo(Class, _), json) => json match { case JNull => null.asInstanceOf[A] case value: JValue if (value.extractOpt[B].isDefined) => unwrap(value.extract[B]) case value => throw new MappingException(s"Can't convert $value to $Class") } } def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { case a: A => Extraction.decompose(wrap(a)) } }
adlawson/scala-json4s
src/main/scala/TypeSerializer.scala
Scala
mit
1,583
package com.haskforce.tooling.ghcPkg final class CachedPkgs( val toMap: Map[String, CachedPkgs.Versions] ) extends AnyVal { def named(name: String): Option[CachedPkgs.Versions] = toMap.get(name) def add(pkg: Pkg): CachedPkgs = { new CachedPkgs( toMap.updated( pkg.name, toMap.getOrElse(pkg.name, CachedPkgs.Versions.empty).add(pkg) ) ) } } object CachedPkgs { def empty: CachedPkgs = new CachedPkgs(Map.empty) def fromIterator(it: Iterator[Pkg]): CachedPkgs = { it.foldLeft(empty)((pkgs, pkg) => pkgs.add(pkg)) } final class Versions(val toMap: Map[String, Pkg]) extends AnyVal { def versioned(version: String): Option[Pkg] = toMap.get(version) def add(pkg: Pkg): Versions = { new Versions(toMap.updated(pkg.version, pkg)) } def head: Option[Pkg] = toMap.headOption.map(_._2) } object Versions { def empty: Versions = new Versions(Map.empty) } }
carymrobbins/intellij-haskforce
src/com/haskforce/tooling/ghcPkg/CachedPkgs.scala
Scala
apache-2.0
944
package com.twitter.gizzard.nameserver import org.specs.mock.{ClassMocker, JMocker} import com.twitter.conversions.time._ import com.twitter.gizzard.thrift.{JobInjectorService, TThreadServer, JobInjector} import com.twitter.gizzard.ConfiguredSpecification object JobRelaySpec extends ConfiguredSpecification { val relay = new JobRelayFactory(2, 1.second)(Map( "normal" -> Seq(Host("localhost1", 8000, "normal", HostStatus.Normal), Host("localhost2", 8000, "normal", HostStatus.Normal)), "blackholed" -> Seq(Host("localhost3", 8000, "blackholed", HostStatus.Blackholed), Host("localhost4", 8000, "blackholed", HostStatus.Blackholed)), "blocked" -> Seq(Host("localhost5", 8000, "blocked", HostStatus.Blocked), Host("localhost6", 8000, "blocked", HostStatus.Blocked)), "blackholedAndBlocked" -> Seq(Host("localhost11", 8000, "blackholedAndBlocked", HostStatus.Blackholed), Host("localhost12", 8000, "blackholedAndBlocked", HostStatus.Blocked)), "blockedAndNormal" -> Seq(Host("localhost7", 8000, "blockedAndNormal", HostStatus.Blocked), Host("localhost8", 8000, "blockedAndNormal", HostStatus.Normal)), "blackholedAndNormal" -> Seq(Host("localhost9", 8000, "blackholedAndNormal", HostStatus.Blackholed), Host("localhost10", 8000, "blackholedAndNormal", HostStatus.Normal)), "mixed" -> Seq(Host("localhost13", 8000, "mixed", HostStatus.Blackholed), Host("localhost14", 8000, "mixed", HostStatus.Blocked), Host("localhost15", 8000, "mixed", HostStatus.Normal)) )) "JobRelay" should { "return a list of online and blocked clusters" in { relay.clusters must haveTheSameElementsAs(List( "normal", "blocked", "blackholedAndBlocked", "blockedAndNormal", "blackholedAndNormal", "mixed" )) } "return a normal relay cluster for a set of online hosts" in { relay("normal") must notBe(NullJobRelayCluster) relay("normal") must notHaveClass[BlockedJobRelayCluster] } "return a null relay cluster for a nonexistent cluster" in { relay("nonexistent") mustEqual NullJobRelayCluster } "return a null relay cluster for a set of blackholed hosts" in { relay("blackholed") mustEqual NullJobRelayCluster } "return a blocked relay cluster for a set of blocked hosts" in { relay("blocked") must haveClass[BlockedJobRelayCluster] } "return a normal relay cluster for a mixed set of online/blackholed/blocked hosts" in { relay("mixed") must notBe(NullJobRelayCluster) relay("mixed") must notHaveClass[BlockedJobRelayCluster] relay("blockedAndNormal") must notBe(NullJobRelayCluster) relay("blockedAndNormal") must notHaveClass[BlockedJobRelayCluster] relay("blackholedAndNormal") must notBe(NullJobRelayCluster) relay("blackholedAndNormal") must notHaveClass[BlockedJobRelayCluster] } "return a blocked relay cluster for a mixed set of blocked/blackholed hosts" in { relay("blackholedAndBlocked") must haveClass[BlockedJobRelayCluster] } } }
kmiku7/gizzard
src/test/scala/com/twitter/gizzard/nameserver/JobRelaySpec.scala
Scala
apache-2.0
3,133
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs // Licence: http://www.gnu.org/licenses/gpl-3.0.en.html package org.ensime.sexp.formats import BigIntConvertor._ import org.scalacheck.{ Arbitrary, Gen } import org.scalatest._ import org.scalatest.prop.GeneratorDrivenPropertyChecks import scala.collection.immutable.BitSet class BigIntConvertorSpec extends FunSpec { private val examples = List( BitSet() -> BigInt(0), BitSet(0) -> BigInt(1), BitSet(1) -> BigInt(2), BitSet(64) -> BigInt("18446744073709551616"), BitSet(0, 64) -> BigInt("18446744073709551617"), BitSet(1, 64) -> BigInt("18446744073709551618") ) it("should convert basic BigSet to BitInt") { examples foreach { case (bitset, bigint) => assert(fromBitSet(bitset) === bigint) } } it("should convert basic BigInt to BitSet") { examples foreach { case (bitset, bigint) => assert(toBitSet(bigint) === bitset) } } } class BigIntConvertorCheck extends FunSpec with GeneratorDrivenPropertyChecks { def positiveIntStream: Arbitrary[Stream[Int]] = Arbitrary { Gen.containerOf[Stream, Int](Gen.chooseNum(0, 2 * Short.MaxValue)) } implicit def arbitraryBitSet: Arbitrary[BitSet] = Arbitrary { for (seq <- positiveIntStream.arbitrary) yield BitSet(seq: _*) } it("should round-trip BigInt <=> BitSet") { forAll { (bigint: BigInt) => whenever(bigint >= 0) { // the exact rules for which negative numbers are allowed // seems to be quite complex, but certainly it is sometimes // valid. assert(fromBitSet(toBitSet(bigint)) === bigint) } } } it("should round-trip BitSet <=> BigInt") { forAll { (bitset: BitSet) => assert(toBitSet(fromBitSet(bitset)) === bitset) } } }
j-mckitrick/ensime-sbt
src/sbt-test/ensime-sbt/ensime-server/s-express/src/test/scala/org/ensime/sexp/formats/BigIntConvertorSpec.scala
Scala
apache-2.0
1,805
package dregex import org.scalatest.funsuite.AnyFunSuite class EmbeddedFlagTest extends AnyFunSuite { test("embedded flags") { // OK Regex.compile("(?x)a") // flags in the middle intercept[InvalidRegexException] { Regex.compile(" (?x)a") } intercept[InvalidRegexException] { Regex.compile("(?x)a(?x)") } // unknown flag intercept[InvalidRegexException] { Regex.compile("(?w)a") } } }
marianobarrios/dregex
src/test/scala/dregex/EmbeddedFlagTest.scala
Scala
bsd-2-clause
452
package ch.epfl.bluebrain.nexus.iam.client import akka.actor.ActorSystem import akka.http.scaladsl.client.RequestBuilding._ import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.OAuth2BearerToken import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller import akka.stream.scaladsl.Sink import akka.stream.{ActorMaterializer, Materializer} import cats.effect.{Effect, IO, LiftIO} import cats.syntax.applicativeError._ import cats.syntax.apply._ import cats.syntax.flatMap._ import cats.syntax.functor._ import ch.epfl.bluebrain.nexus.commons.http.HttpClient import ch.epfl.bluebrain.nexus.commons.http.HttpClient._ import ch.epfl.bluebrain.nexus.commons.http.JsonLdCirceSupport._ import ch.epfl.bluebrain.nexus.commons.rdf.syntax._ import ch.epfl.bluebrain.nexus.iam.client.IamClientError.{Forbidden, Unauthorized, UnknownError, UnmarshallingError} import ch.epfl.bluebrain.nexus.iam.client.config.IamClientConfig import ch.epfl.bluebrain.nexus.iam.client.types._ import ch.epfl.bluebrain.nexus.iam.client.types.events.Event import ch.epfl.bluebrain.nexus.iam.client.types.events.Event.{AclEvent, PermissionsEvent, RealmEvent} import ch.epfl.bluebrain.nexus.rdf.Iri.{AbsoluteIri, Path} import io.circe.generic.auto._ import io.circe.syntax._ import io.circe.{DecodingFailure, Json, ParsingFailure} import journal.Logger import scala.concurrent.{ExecutionContext, ExecutionContextExecutor, Future} import scala.reflect.ClassTag class IamClient[F[_]] private[client] ( source: EventSource[Event], config: IamClientConfig, aclsClient: HttpClient[F, AccessControlLists], callerClient: HttpClient[F, Caller], permissionsClient: HttpClient[F, Permissions], jsonClient: HttpClient[F, Json] )(implicit F: Effect[F], mt: Materializer) { /** * Retrieve the current ''acls'' for some particular ''path''. * * @param path the target resource * @param ancestors matches only the exact ''path'' (false) or its ancestors also (true) * @param self matches only the caller identities * @param credentials an optionally available token */ def acls(path: Path, ancestors: Boolean = false, self: Boolean = false)( implicit credentials: Option[AuthToken]): F[AccessControlLists] = { val endpoint = config.aclsIri + path val req = requestFrom(endpoint, Query("ancestors" -> ancestors.toString, "self" -> self.toString)) aclsClient(req) } /** * Retrieve the identities on a [[Caller]] object from the implicitly optional [[AuthToken]] * */ def identities(implicit credentials: Option[AuthToken]): F[Caller] = { credentials .map(_ => callerClient(requestFrom(config.identitiesIri))) .getOrElse(F.pure(Caller.anonymous)) } /** * Fetch available permissions. * * @param credentials an optionally available token * @return available permissions */ def permissions(implicit credentials: Option[AuthToken]): F[Set[Permission]] = permissionsClient(requestFrom(config.permissionsIri)).map(_.permissions) /** * Replace ACL at a given path. * * @param path [[Path]] for which to replace the ACL * @param acl updated [[AccessControlList]] * @param rev current revision * @param credentials an optionally available token */ def putAcls(path: Path, acl: AccessControlList, rev: Option[Long] = None)( implicit credentials: Option[AuthToken]): F[Unit] = { implicit val _ = config val endpoint = config.aclsIri + path val entity = HttpEntity(ContentTypes.`application/json`, acl.asJson.noSpaces) val query = rev.map(r => Query("rev" -> r.toString)).getOrElse(Query.Empty) val request = Put(endpoint.toAkkaUri.withQuery(query), entity) val requestWithCredentials = credentials.map(token => request.addCredentials(OAuth2BearerToken(token.value))).getOrElse(request) jsonClient(requestWithCredentials) *> F.unit } /** * Checks the presence of a specific ''permission'' on a particular ''path''. * * @param path the target resource * @param permission the permission to check * @param credentials an optionally available token */ def hasPermission(path: Path, permission: Permission)(implicit credentials: Option[AuthToken]): F[Boolean] = acls(path, ancestors = true, self = true).flatMap { acls => val found = acls.value.exists { case (_, acl) => acl.value.permissions.contains(permission) } if (found) F.pure(true) else F.pure(false) } /** * It applies the provided function ''f'' to the ACLs Server-sent events (SSE) * * @param f the function that gets executed when a new [[AclEvent]] appears * @param offset the optional offset from where to start streaming the events */ def aclEvents(f: AclEvent => F[Unit], offset: Option[String] = None)(implicit cred: Option[AuthToken]): Unit = { val pf: PartialFunction[Event, F[Unit]] = { case ev: AclEvent => f(ev) } events(config.aclsIri + "events", pf, offset) } /** * It applies the provided function ''f'' to the Permissions Server-sent events (SSE) * * @param f the function that gets executed when a new [[PermissionsEvent]] appears * @param offset the optional offset from where to start streaming the events */ def permissionEvents(f: PermissionsEvent => F[Unit], offset: Option[String] = None)( implicit cred: Option[AuthToken]): Unit = { val pf: PartialFunction[Event, F[Unit]] = { case ev: PermissionsEvent => f(ev) } events(config.permissionsIri + "events", pf, offset) } /** * It applies the provided function ''f'' to the Realms Server-sent events (SSE) * * @param f the function that gets executed when a new [[RealmEvent]] appears * @param offset the optional offset from where to start streaming the events */ def realmEvents(f: RealmEvent => F[Unit], offset: Option[String] = None)(implicit cred: Option[AuthToken]): Unit = { val pf: PartialFunction[Event, F[Unit]] = { case ev: RealmEvent => f(ev) } events(config.realmsIri + "events", pf, offset) } /** * It applies the provided function ''f'' to the Server-sent events (SSE) * * @param f the function that gets executed when a new [[Event]] appears * @param offset the optional offset from where to start streaming the events */ def events(f: Event => F[Unit], offset: Option[String] = None)(implicit cred: Option[AuthToken]): Unit = { val pf: PartialFunction[Event, F[Unit]] = { case ev: Event => f(ev) } events(config.internalIri + "events", pf, offset) } private def events(iri: AbsoluteIri, f: PartialFunction[Event, F[Unit]], offset: Option[String])( implicit cred: Option[AuthToken]): Unit = source(iri, offset) .mapAsync(1) { event => f.lift(event) match { case Some(evaluated) => F.toIO(evaluated).unsafeToFuture() case _ => Future.unit } } .to(Sink.ignore) .mapMaterializedValue(_ => ()) .run() private def requestFrom(iri: AbsoluteIri, query: Query = Query.Empty)(implicit credentials: Option[AuthToken]) = { val request = Get(iri.toAkkaUri.withQuery(query)) credentials.map(token => request.addCredentials(OAuth2BearerToken(token.value))).getOrElse(request) } } // $COVERAGE-OFF$ object IamClient { private def httpClient[F[_], A: ClassTag]( implicit L: LiftIO[F], F: Effect[F], ec: ExecutionContext, mt: Materializer, cl: UntypedHttpClient[F], um: FromEntityUnmarshaller[A] ): HttpClient[F, A] = new HttpClient[F, A] { private val logger = Logger(s"IamHttpClient[${implicitly[ClassTag[A]]}]") override def apply(req: HttpRequest): F[A] = cl.apply(req).flatMap { resp => resp.status match { case StatusCodes.Unauthorized => cl.toString(resp.entity).flatMap { entityAsString => F.raiseError[A](Unauthorized(entityAsString)) } case StatusCodes.Forbidden => logger.error(s"Received Forbidden when accessing '${req.method.name()} ${req.uri.toString()}'.") cl.toString(resp.entity).flatMap { entityAsString => F.raiseError[A](Forbidden(entityAsString)) } case other if other.isSuccess() => val value = L.liftIO(IO.fromFuture(IO(um(resp.entity)))) value.recoverWith { case pf: ParsingFailure => logger.error( s"Failed to parse a successful response of '${req.method.name()} ${req.getUri().toString}'.") F.raiseError[A](UnmarshallingError(pf.getMessage())) case df: DecodingFailure => logger.error( s"Failed to decode a successful response of '${req.method.name()} ${req.getUri().toString}'.") F.raiseError(UnmarshallingError(df.getMessage())) } case other => cl.toString(resp.entity).flatMap { entityAsString => logger.error( s"Received '${other.value}' when accessing '${req.method.name()} ${req.uri.toString()}', response entity as string: '$entityAsString.'") F.raiseError[A](UnknownError(other, entityAsString)) } } } override def discardBytes(entity: HttpEntity): F[HttpMessage.DiscardedEntity] = cl.discardBytes(entity) override def toString(entity: HttpEntity): F[String] = cl.toString(entity) } /** * Constructs an ''IamClient[F]'' from implicitly available instances of [[IamClientConfig]], [[ActorSystem]] and [[Effect]]. * * @tparam F the effect type * @return a new [[IamClient]] */ final def apply[F[_]: Effect](implicit config: IamClientConfig, as: ActorSystem): IamClient[F] = { implicit val mt: ActorMaterializer = ActorMaterializer() implicit val ec: ExecutionContextExecutor = as.dispatcher implicit val ucl: UntypedHttpClient[F] = HttpClient.untyped[F] val aclsClient: HttpClient[F, AccessControlLists] = httpClient[F, AccessControlLists] val callerClient: HttpClient[F, Caller] = httpClient[F, Caller] val permissionsClient: HttpClient[F, Permissions] = httpClient[F, Permissions] val jsonClient: HttpClient[F, Json] = httpClient[F, Json] val sse: EventSource[Event] = EventSource[Event](config) new IamClient(sse, config, aclsClient, callerClient, permissionsClient, jsonClient) } } // $COVERAGE-ON$
hygt/nexus-iam
client/src/main/scala/ch/epfl/bluebrain/nexus/iam/client/IamClient.scala
Scala
apache-2.0
10,660
import java.lang.RuntimeException import org.apache.spark.SparkContext import org.apache.spark.SparkConf import org.apache.spark.sql.hive.HiveContext import scala.sys.process._ import scalax.file.{Path} import scalax.io.StandardOpenOption._ import scopt.OptionParser import org.apache.hadoop.io._ import org.apache.spark.sql.types.{StructType,StructField,StringType}; import org.apache.spark.sql.Row; object CaffeAppSeq { private case class Params( input: String = null, output: String = null ) def classifyImage(binaryFile :(Text, BytesWritable)): (String, String) = { print("The filename is: " + binaryFile._1 + " !!!\\n") val tmpDir = Path.createTempDirectory(prefix = "caffe", deleteOnExit = false) val f = tmpDir \\ "caffe.img" val fOut = f.outputStream(WriteTruncate:_*) fOut.write(binaryFile._2.getBytes) //Call Docker container val cmd ="docker run --rm -v " + tmpDir.path + ":/data nlesc/imagenet1000 data/caffe.img" var output = "" try { output = cmd.!! } catch { case msg: RuntimeException => { println(msg) println(cmd) } } tmpDir.deleteRecursively() return (binaryFile._1.toString, output) } def main(args: Array[String]) { val defaultParams = Params() val parser = new OptionParser[Params]("CaffeApp") { head("CaffeApp: categorize images.") arg[String]("<input>") .text("sequence file with filename and filecontent of the images.") .required() .action((x, c) => c.copy(input = x)) arg[String]("<output>") .text("orc file with filename and labels") .required() .action((x, c) => c.copy(output = x)) } parser.parse(args, defaultParams).map { params => run(params) }.getOrElse { parser.showUsageAsError sys.exit(1) } } def run(params: Params) { val conf = new SparkConf().setAppName("Caffe App") val sc = new SparkContext(conf) val sqlContext = new HiveContext(sc) val images = sc.sequenceFile(params.input, classOf[Text], classOf[BytesWritable]) val labels = images.map(image => this.classifyImage(image)).map(row => Row(row._1, row._2)) val schemaString = "filename labels" val labelsSchema = StructType( schemaString.split(" ").map(fieldName => StructField(fieldName, StringType, true))) sqlContext.createDataFrame(labels, labelsSchema).write.format("orc").mode("append").save(params.output) } }
nlesc-sherlock/spark-docker
caffeseq/src/main/scala/CaffeAppSeq.scala
Scala
apache-2.0
2,379
/* * SPDX-License-Identifier: Apache-2.0 * Copyright 2016-2020 Daniel Urban and contributors listed in NOTICE.txt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dev.tauri.choam package kcas final object BenchmarkAccess { def setBirthEpochOpaque[T, M <: IBRManaged[T, M]](b: IBRManaged[T, M], e: Long): Unit = { b.setBirthEpochOpaque(e) } }
durban/exp-reagents
bench/src/main/scala/dev/tauri/choam/kcas/BenchmarkAccess.scala
Scala
apache-2.0
876
package com.caibowen.prma.logger.logback import ch.qos.logback.classic.spi.{ILoggingEvent, IThrowableProxy, StackTraceElementProxy} import com.caibowen.prma.api.model.ExceptionVO import com.caibowen.prma.core.filter.StrFilter import scala.collection.immutable.{Vector, List} import scala.collection.mutable.ArrayBuffer /** * @author BowenCai * @since 06/12/2014. */ class FilteredAdaptor(private[this] val classFilter: StrFilter, private[this] val stackTraceFilter: StrFilter) extends LogbackEventAdaptor { private def takeClass = (name: String)=>classFilter.accept(name) != 1 private def takeStackTrace = (sp: StackTraceElementProxy)=>stackTraceFilter.accept(sp.getStackTraceElement.getClassName) != 1 override def getExcepts(event: ILoggingEvent): Vector[ExceptionVO] = { val px = event.getThrowableProxy if (px == null || !takeClass(px.getClassName)) return null val toExceptVO = (px: IThrowableProxy, start: Int) => { val stps = px.getStackTraceElementProxyArray val len = stps.length - start val stacks = if (len > 0) stps.take(stps.length - start) .filter(takeStackTrace) .map(_.getStackTraceElement) .toVector else null.asInstanceOf[Vector[StackTraceElement]] new ExceptionVO(px.getClassName, px.getMessage, stacks) } var cause = px.getCause if (cause == null || !takeClass(cause.getClassName)) return Vector(toExceptVO(px, 0)) val buf = Vector.newBuilder[ExceptionVO] buf.sizeHint(16) val cs = cause.getCommonFrames do { buf += toExceptVO(cause, cs) cause = cause.getCause } while (cause != null && takeClass(cause.getClassName)) buf.result() } }
xkommando/PRMA
logger/src/main/scala/com/caibowen/prma/logger/logback/FilteredAdaptor.scala
Scala
lgpl-3.0
1,743
package jgo.tools.compiler package transl package func import interm._ import instr._ trait Returns extends FuncTranslBase { protected override def translateInstr(i: Instr): Unit = i match { case Return => gen.returnValue() //hopefully, this works. case ValueReturn => gen.returnValue() case _ => super.translateInstr(i) } }
thomasmodeneis/jgo
src/src/main/scala/jgo/tools/compiler/transl/func/Returns.scala
Scala
gpl-3.0
352
package net.sansa_stack.rdf.spark.io import java.net.URI import scala.reflect.ClassTag import com.google.common.base.Predicates import com.google.common.collect.Iterators import net.sansa_stack.rdf.benchmark.io.ReadableByteChannelFromIterator import net.sansa_stack.rdf.common.io.riot.lang.LangNTriplesSkipBad import net.sansa_stack.rdf.common.io.riot.tokens.TokenizerTextForgiving import org.apache.jena.atlas.io.PeekReader import org.apache.jena.atlas.iterator.IteratorResourceClosing import org.apache.jena.graph.Triple import org.apache.jena.riot.{ RIOT, SysRIOT } import org.apache.jena.riot.SysRIOT.fmtMessage import org.apache.jena.riot.lang.RiotParsers import org.apache.jena.riot.system._ import org.apache.spark.rdd.RDD import org.apache.spark.sql.SparkSession import org.slf4j.{ Logger, LoggerFactory } /** * An N-Triples reader. One triple per line is assumed. * * @author Lorenz Buehmann */ object NTripleReader { /** * Loads N-Triples data from a file or directory into an RDD. * * @param session the Spark session * @param path the path to the N-Triples file(s) * @return the RDD of triples */ def load(session: SparkSession, path: URI): RDD[Triple] = { load(session, path.toString) } /** * Loads N-Triples data from a set of files or directories into an RDD. * The path can also contain multiple paths * and even wildcards, e.g. * `"/my/dir1,/my/paths/part-00[0-5]*,/another/dir,/a/specific/file"` * * @param session the Spark session * @param paths the path to the N-Triples file(s) * @return the RDD of triples */ def load(session: SparkSession, paths: Seq[URI]): RDD[Triple] = { load(session, paths.mkString(",")) } /** * Loads N-Triples data from a file or directory into an RDD. * The path can also contain multiple paths * and even wildcards, e.g. * `"/my/dir1,/my/paths/part-00[0-5]*,/another/dir,/a/specific/file"` * * === Handling of errors=== * * By default, it stops once a parse error occurs, i.e. a [[org.apache.jena.riot.RiotException]] will be thrown * generated by the underlying parser. * * The following options exist: * - STOP the whole data loading process will be stopped and a `org.apache.jena.net.sansa_stack.rdf.spark.riot.RiotException` will be thrown * - SKIP the line will be skipped but the data loading process will continue, an error message will be logged * * * ===Handling of warnings=== * * If the additional checking of RDF terms is enabled, warnings during parsing can occur. For example, * a wrong lexical form of a literal w.r.t. to its datatype will lead to a warning. * * The following can be done with those warnings: * - IGNORE the warning will just be logged to the configured logger * - STOP similar to the error handling mode, the whole data loading process will be stopped and a * [[org.apache.jena.riot.RiotException]] will be thrown * - SKIP similar to the error handling mode, the line will be skipped but the data loading process will continue * * * ===Checking of RDF terms=== * Set whether to perform checking of NTriples - defaults to no checking. * * Checking adds warnings over and above basic syntax errors. * This can also be used to turn warnings into exceptions if the option `stopOnWarnings` is set to STOP or SKIP. * * - IRIs - whether IRIs confirm to all the rules of the IRI scheme * - Literals: whether the lexical form conforms to the rules for the datatype. * - Triples: check slots have a valid kind of RDF term (parsers usually make this a syntax error anyway). * * * See also the optional `errorLog` argument to control the output. The default is to log. * * * @param session the Spark session * @param path the path to the N-Triples file(s) * @param stopOnBadTerm stop parsing on encountering a bad RDF term * @param stopOnWarnings stop parsing on encountering a warning * @param checkRDFTerms run with checking of literals and IRIs either on or off * @param errorLog the logger used for error message handling * @return the RDD of triples */ def load(session: SparkSession, path: String, stopOnBadTerm: ErrorParseMode.Value = ErrorParseMode.STOP, stopOnWarnings: WarningParseMode.Value = WarningParseMode.IGNORE, checkRDFTerms: Boolean = false, errorLog: Logger = ErrorHandlerFactory.stdLogger): RDD[Triple] = { // parse the text file first val rdd = session.sparkContext .textFile(path, minPartitions = 20) val strict = stopOnBadTerm == ErrorParseMode.STOP && stopOnWarnings == WarningParseMode.STOP // create the error handler profile val profileWrapper = NonSerializableObjectWrapper { val errorHandler = if (strict) { ErrorHandlerFactory.errorHandlerStrict(errorLog) } else { if (stopOnBadTerm == ErrorParseMode.STOP) { if (stopOnWarnings == WarningParseMode.STOP || stopOnWarnings == WarningParseMode.SKIP) { ErrorHandlerFactory.errorHandlerStrict(errorLog) } else { ErrorHandlerFactory.errorHandlerStd(errorLog) } } else { // ErrorHandlerFactory.errorHandlerWarn new CustomErrorHandler() } } new ParserProfileStd(RiotLib.factoryRDF, errorHandler, IRIResolver.create, PrefixMapFactory.createForInput, RIOT.getContext.copy, checkRDFTerms || strict, strict) } import scala.collection.JavaConverters._ // parse each partition rdd.mapPartitions(p => { // convert iterator to input stream val input = ReadableByteChannelFromIterator.toInputStream(p.asJava) // create the parsing iterator val it = if (stopOnBadTerm == ErrorParseMode.STOP || stopOnWarnings == WarningParseMode.STOP) { // this is the default behaviour of Jena, i.e. once a parse error occurs the whole process stops RiotParsers.createIteratorNTriples(input, null, profileWrapper.get) } else { // here we "simply" skip illegal triples // we need a custom tokenizer val tokenizer = new TokenizerTextForgiving(PeekReader.makeUTF8(input)) tokenizer.setErrorHandler(ErrorHandlerFactory.errorHandlerWarn) // which is used by a custom N-Triples iterator val it = new LangNTriplesSkipBad(tokenizer, profileWrapper.get, null) // filter out null values Iterators.filter(it, Predicates.notNull[Triple]()) } new IteratorResourceClosing[Triple](it, input).asScala }) } def main(args: Array[String]): Unit = { if (args.length == 0) println("Usage: NTripleReader <PATH_TO_FILE>") val path = args(0) val sparkSession = SparkSession.builder .master("local") .appName("N-Triples reader") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") // .config("spark.kryo.registrationRequired", "true") // .config("spark.eventLog.enabled", "true") // .config("spark.kryo.registrator", String.join(", ", // "net.sansa_stack.rdf.spark.io.JenaKryoRegistrator")) .config("spark.default.parallelism", "4") .config("spark.sql.shuffle.partitions", "4") .getOrCreate() val rdd = NTripleReader.load( sparkSession, path, stopOnBadTerm = ErrorParseMode.SKIP, stopOnWarnings = WarningParseMode.SKIP, checkRDFTerms = true, LoggerFactory.getLogger("errorLog")) println(rdd.count()) println("result:\\n" + rdd.take(1000).map { _.toString.replaceAll("[\\\\x00-\\\\x1f]", "???") }.mkString("\\n")) } } object ErrorParseMode extends Enumeration { val STOP, SKIP = Value } object WarningParseMode extends Enumeration { val STOP, SKIP, IGNORE = Value } private class NonSerializableObjectWrapper[T: ClassTag](constructor: => T) extends AnyRef with Serializable { @transient private lazy val instance: T = constructor def get: T = instance } private object NonSerializableObjectWrapper { def apply[T: ClassTag](constructor: => T): NonSerializableObjectWrapper[T] = new NonSerializableObjectWrapper[T](constructor) } /** * A custom error handler that doesn't throw an exception on fatal parse errors. This allows for simply skipping those * triples instead of aborting the whole parse process. * * @param log an optional logger */ class CustomErrorHandler(val log: Logger = SysRIOT.getLogger) extends ErrorHandler { /** report a warning */ def logWarning(message: String, line: Long, col: Long): Unit = { if (log != null) log.warn(fmtMessage(message, line, col)) } /** report an error */ def logError(message: String, line: Long, col: Long): Unit = { if (log != null) log.error(fmtMessage(message, line, col)) } /** report a catastrophic error */ def logFatal(message: String, line: Long, col: Long): Unit = { if (log != null) logError(message, line, col) } override def warning(message: String, line: Long, col: Long): Unit = logWarning(message, line, col) override def error(message: String, line: Long, col: Long): Unit = logError(message, line, col) override def fatal(message: String, line: Long, col: Long): Unit = logFatal(message, line, col) } // sealed trait ErrorParseMode { // case object STOP extends ErrorParseMode // case object SKIP extends ErrorParseMode // } // sealed trait WarningParseMode { // case object STOP extends WarningParseMode // case object SKIP extends WarningParseMode // case object IGNORE extends WarningParseMode // } // @enum trait ErrorParseMode { // object STOP // object SKIP // }
SANSA-Stack/Spark-RDF
sansa-rdf-spark/src/main/scala/net/sansa_stack/rdf/spark/io/NTripleReader.scala
Scala
gpl-3.0
9,746
package com.productfoundry.akka.cqrs.process import akka.actor.ActorRef import com.productfoundry.akka.cqrs._ import com.productfoundry.akka.cqrs.publish.EventPublication import com.productfoundry.akka.messaging.ConfirmDelivery import com.productfoundry.support.EntityTestSupport import org.scalatest.prop.GeneratorDrivenPropertyChecks class ProcessManagerSpec extends EntityTestSupport with GeneratorDrivenPropertyChecks with Fixtures { implicit def DummyProcessManagerIdResolution = DummyProcessManager.idResolution implicit def DummyProcessManagerFactory = DummyProcessManager.factory() implicit val supervisorFactory = entityContext.entitySupervisorFactory[DummyProcessManager] val supervisor: ActorRef = EntitySupervisor.forType[DummyProcessManager] "Event publications" must { "be received" in new ProcessManagerFixture { forAll { commit: Commit => val publications = createUniquePublications(commit) publications.foreach { publication => supervisor ! publication } val events = receiveN(publications.size).map(_.asInstanceOf[AggregateEvent]) publications.map(_.eventRecord.event) should contain theSameElementsAs events } expectNoMsg() } "be confirmed" in new ProcessManagerFixture { var nextDeliveryId = 1L forAll { commit: Commit => val publications = createUniquePublications(commit) publications.foreach { publication => supervisor ! publication.requestConfirmation(nextDeliveryId) nextDeliveryId = nextDeliveryId + 1 } if (publications.nonEmpty) { val results = receiveN(publications.size * 2) val events = results.filter(p => classOf[AggregateEvent].isAssignableFrom(p.getClass)) publications.map(_.eventRecord.event) should contain theSameElementsAs events val confirmations = results.filter(p => classOf[ConfirmDelivery].isAssignableFrom(p.getClass)) confirmations.size should be(events.size) } } expectNoMsg() } "be deduplicated" in new ProcessManagerFixture { forAll { commit: Commit => val publications = createUniquePublications(commit) publications.foreach { publication => supervisor ! publication supervisor ! publication } val events = receiveN(publications.size) publications.map(_.eventRecord.event) should contain theSameElementsAs events } expectNoMsg() } } trait ProcessManagerFixture { system.eventStream.subscribe(self, classOf[Any]) def createUniquePublications(commit: Commit): Seq[EventPublication] = { commit.records.map(eventRecord => EventPublication(eventRecord)).groupBy(_.eventRecord.tag).map(_._2.head).toSeq } } }
Product-Foundry/akka-cqrs
core/src/test/scala/com/productfoundry/akka/cqrs/process/ProcessManagerSpec.scala
Scala
apache-2.0
2,818
package com.dt.scala.implicits /** * @author Wang Jialin * Date 2015/7/19 * Contact Information: * WeChat: 18610086859 * QQ: 1740415547 * Email: 18610086859@126.com * Tel: 18610086859 */ object Implicit_Conversions_with_Implicit_Parameters { def main(args: Array[String]) { def bigger[T](a: T, b: T)(implicit ordered: T => Ordered[T]) = if (a > b) a else b println(bigger(4,3)) println(bigger(4.2,3)) println(bigger("Spark","Hadoop")) } }
slieer/scala-tutorials
src/main/scala/com/dt/scala/implicits/Implicit_Conversions_with_Implicit_Parameters.scala
Scala
apache-2.0
494
def flatMap[A,B](f: Rand[A])(g: A => Rand[B]): Rand[B] = rng => { val (a, r1) = f(rng) g(a)(r1) // We pass the new state along } def nonNegativeLessThan(n: Int): Rand[Int] = { flatMap(nonNegativeInt) { i => val mod = i % n if (i + (n-1) - mod >= 0) unit(mod) else nonNegativeLessThan(n) } }
ud3sh/coursework
functional-programming-in-scala-textbook/answerkey/state/08.answer.scala
Scala
unlicense
316
package pureconfig import com.typesafe.config.{ConfigFactory, ConfigValueFactory, ConfigValueType} import shapeless.test.illTyped import pureconfig.error.WrongType import pureconfig.generic.error.NoValidCoproductOptionFound import pureconfig.generic.semiauto._ class EnumerationsSuite extends BaseSuite { sealed trait Color case object RainyBlue extends Color case object SunnyYellow extends Color behavior of "deriveEnumeration" it should "provide methods to derive readers for enumerations encoded as sealed traits" in { implicit val colorReader = deriveEnumerationReader[Color] ConfigReader[Color].from(ConfigValueFactory.fromAnyRef("rainy-blue")) shouldBe Right(RainyBlue) ConfigReader[Color].from(ConfigValueFactory.fromAnyRef("sunny-yellow")) shouldBe Right(SunnyYellow) val unknownValue = ConfigValueFactory.fromAnyRef("blue") ConfigReader[Color] .from(unknownValue) should failWith(NoValidCoproductOptionFound(unknownValue, Seq.empty), "", emptyConfigOrigin) val conf = ConfigFactory.parseString("{ type: person, name: John, surname: Doe }") ConfigReader[Color].from(conf.root()) should failWith( WrongType(ConfigValueType.OBJECT, Set(ConfigValueType.STRING)), "", stringConfigOrigin(1) ) } it should "provide methods to derive writers for enumerations encoded as sealed traits" in { implicit val colorWriter = deriveEnumerationWriter[Color] ConfigWriter[Color].to(RainyBlue) shouldEqual ConfigValueFactory.fromAnyRef("rainy-blue") ConfigWriter[Color].to(SunnyYellow) shouldEqual ConfigValueFactory.fromAnyRef("sunny-yellow") } it should "provide methods to derive full converters for enumerations encoded as sealed traits" in { implicit val colorConvert = deriveEnumerationConvert[Color] ConfigConvert[Color].from(ConfigValueFactory.fromAnyRef("rainy-blue")) shouldBe Right(RainyBlue) ConfigConvert[Color].from(ConfigValueFactory.fromAnyRef("sunny-yellow")) shouldBe Right(SunnyYellow) ConfigConvert[Color].to(RainyBlue) shouldEqual ConfigValueFactory.fromAnyRef("rainy-blue") ConfigConvert[Color].to(SunnyYellow) shouldEqual ConfigValueFactory.fromAnyRef("sunny-yellow") } it should "provide customizable methods to derive readers for enumerations encoded as sealed traits" in { implicit val colorReader = deriveEnumerationReader[Color](ConfigFieldMapping(PascalCase, SnakeCase)) ConfigReader[Color].from(ConfigValueFactory.fromAnyRef("rainy_blue")) shouldBe Right(RainyBlue) ConfigReader[Color].from(ConfigValueFactory.fromAnyRef("sunny_yellow")) shouldBe Right(SunnyYellow) } it should "provide customizable methods to derive writers for enumerations encoded as sealed traits" in { implicit val colorWriter = deriveEnumerationWriter[Color](ConfigFieldMapping(PascalCase, SnakeCase)) ConfigWriter[Color].to(RainyBlue) shouldEqual ConfigValueFactory.fromAnyRef("rainy_blue") ConfigWriter[Color].to(SunnyYellow) shouldEqual ConfigValueFactory.fromAnyRef("sunny_yellow") } it should "provide customizable methods to derive full converters for enumerations encoded as sealed traits" in { implicit val colorConvert = deriveEnumerationConvert[Color](ConfigFieldMapping(PascalCase, SnakeCase)) ConfigConvert[Color].from(ConfigValueFactory.fromAnyRef("rainy_blue")) shouldBe Right(RainyBlue) ConfigConvert[Color].from(ConfigValueFactory.fromAnyRef("sunny_yellow")) shouldBe Right(SunnyYellow) ConfigConvert[Color].to(RainyBlue) shouldEqual ConfigValueFactory.fromAnyRef("rainy_blue") ConfigConvert[Color].to(SunnyYellow) shouldEqual ConfigValueFactory.fromAnyRef("sunny_yellow") } it should "not allow deriving readers, writers and full converters for enumerations encoded as sealed traits whose subclasses are not all case objects" in { sealed trait Entity case class Person(name: String, surname: String) extends Entity case class Place(name: String, lat: Double, lon: Double) extends Entity illTyped("deriveEnumerationReader[Entity]", ".*could not find Lazy implicit value of type.*") illTyped("deriveEnumerationWriter[Entity]", ".*could not find Lazy implicit value of type.*") illTyped("deriveEnumerationConvert[Entity]", ".*could not find Lazy implicit value of type.*") } }
pureconfig/pureconfig
modules/generic/src/test/scala/pureconfig/EnumerationsSuite.scala
Scala
mpl-2.0
4,297
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.network.netty import java.nio.ByteBuffer import scala.collection.JavaConverters._ import scala.concurrent.{Future, Promise} import scala.reflect.ClassTag import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.network._ import org.apache.spark.network.buffer.ManagedBuffer import org.apache.spark.network.client.{RpcResponseCallback, TransportClientBootstrap, TransportClientFactory} import org.apache.spark.network.sasl.{SaslClientBootstrap, SaslServerBootstrap} import org.apache.spark.network.server._ import org.apache.spark.network.shuffle.{BlockFetchingListener, OneForOneBlockFetcher, RetryingBlockFetcher} import org.apache.spark.network.shuffle.protocol.UploadBlock import org.apache.spark.network.util.JavaUtils import org.apache.spark.serializer.JavaSerializer import org.apache.spark.storage.{BlockId, StorageLevel} import org.apache.spark.util.Utils /** * A BlockTransferService that uses Netty to fetch a set of blocks at at time. */ private[spark] class NettyBlockTransferService( conf: SparkConf, securityManager: SecurityManager, bindAddress: String, override val hostName: String, _port: Int, numCores: Int) extends BlockTransferService { // TODO: Don't use Java serialization, use a more cross-version compatible serialization format. private val serializer = new JavaSerializer(conf) private val authEnabled = securityManager.isAuthenticationEnabled() private val transportConf = SparkTransportConf.fromSparkConf(conf, "shuffle", numCores) private[this] var transportContext: TransportContext = _ private[this] var server: TransportServer = _ private[this] var clientFactory: TransportClientFactory = _ private[this] var appId: String = _ override def init(blockDataManager: BlockDataManager): Unit = { val rpcHandler = new NettyBlockRpcServer(conf.getAppId, serializer, blockDataManager) var serverBootstrap: Option[TransportServerBootstrap] = None var clientBootstrap: Option[TransportClientBootstrap] = None if (authEnabled) { serverBootstrap = Some(new SaslServerBootstrap(transportConf, securityManager)) clientBootstrap = Some(new SaslClientBootstrap(transportConf, conf.getAppId, securityManager, securityManager.isSaslEncryptionEnabled())) } transportContext = new TransportContext(transportConf, rpcHandler) clientFactory = transportContext.createClientFactory(clientBootstrap.toSeq.asJava) server = createServer(serverBootstrap.toList) appId = conf.getAppId logInfo(s"Server created on ${hostName}:${server.getPort}") } /** Creates and binds the TransportServer, possibly trying multiple ports. */ private def createServer(bootstraps: List[TransportServerBootstrap]): TransportServer = { def startService(port: Int): (TransportServer, Int) = { val server = transportContext.createServer(bindAddress, port, bootstraps.asJava) (server, server.getPort) } Utils.startServiceOnPort(_port, startService, conf, getClass.getName)._1 } override def fetchBlocks( host: String, port: Int, execId: String, blockIds: Array[String], listener: BlockFetchingListener): Unit = { logTrace(s"Fetch blocks from $host:$port (executor id $execId)") try { val blockFetchStarter = new RetryingBlockFetcher.BlockFetchStarter { override def createAndStart(blockIds: Array[String], listener: BlockFetchingListener) { val client = clientFactory.createClient(host, port) new OneForOneBlockFetcher(client, appId, execId, blockIds.toArray, listener).start() } } val maxRetries = transportConf.maxIORetries() if (maxRetries > 0) { // Note this Fetcher will correctly handle maxRetries == 0; we avoid it just in case there's // a bug in this code. We should remove the if statement once we're sure of the stability. new RetryingBlockFetcher(transportConf, blockFetchStarter, blockIds, listener).start() } else { blockFetchStarter.createAndStart(blockIds, listener) } } catch { case e: Exception => logError("Exception while beginning fetchBlocks", e) blockIds.foreach(listener.onBlockFetchFailure(_, e)) } } override def port: Int = server.getPort override def uploadBlock( hostname: String, port: Int, execId: String, blockId: BlockId, blockData: ManagedBuffer, level: StorageLevel, classTag: ClassTag[_]): Future[Unit] = { val result = Promise[Unit]() val client = clientFactory.createClient(hostname, port) // StorageLevel and ClassTag are serialized as bytes using our JavaSerializer. // Everything else is encoded using our binary protocol. val metadata = JavaUtils.bufferToArray(serializer.newInstance().serialize((level, classTag))) // Convert or copy nio buffer into array in order to serialize it. val array = JavaUtils.bufferToArray(blockData.nioByteBuffer()) client.sendRpc(new UploadBlock(appId, execId, blockId.toString, metadata, array).toByteBuffer, new RpcResponseCallback { override def onSuccess(response: ByteBuffer): Unit = { logTrace(s"Successfully uploaded block $blockId") result.success((): Unit) } override def onFailure(e: Throwable): Unit = { logError(s"Error while uploading block $blockId", e) result.failure(e) } }) result.future } override def close(): Unit = { if (server != null) { server.close() } if (clientFactory != null) { clientFactory.close() } } }
sh-cho/cshSpark
network/netty/NettyBlockTransferService.scala
Scala
apache-2.0
6,466
/** * Copyright (c) 2002-2012 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.cypher.internal.executionplan.builders import org.neo4j.cypher.internal.pipes.ColumnFilterPipe import org.neo4j.cypher.internal.executionplan.{ExecutionPlanInProgress, PlanBuilder} import org.neo4j.cypher.internal.symbols.SymbolTable import org.neo4j.cypher.internal.commands.{AllIdentifiers, ReturnItem, ReturnColumn} class ColumnFilterBuilder extends PlanBuilder { def apply(plan: ExecutionPlanInProgress) = { val q = plan.query val p = plan.pipe val isLastPipe = q.tail.isEmpty if (!isLastPipe && q.returns == Seq(Unsolved(AllIdentifiers()))) { val resultQ = q.copy(returns = q.returns.map(_.solve)) plan.copy(query = resultQ) } else { val returnItems = getReturnItems(q.returns, p.symbols) val expressionsToExtract = returnItems.map { case ReturnItem(e, k, _) => k -> e }.toMap val newPlan = ExtractBuilder.extractIfNecessary(plan, expressionsToExtract) val filterPipe = new ColumnFilterPipe(newPlan.pipe, returnItems) val resultPipe = if (filterPipe.symbols != p.symbols) { filterPipe } else { p } val resultQ = newPlan.query.copy(returns = q.returns.map(_.solve)) plan.copy(pipe = resultPipe, query = resultQ) } } def canWorkWith(plan: ExecutionPlanInProgress) = { val q = plan.query q.extracted && !q.sort.exists(_.unsolved) && !q.slice.exists(_.unsolved) && q.returns.exists(_.unsolved) } def priority = PlanBuilder.ColumnFilter private def getReturnItems(q: Seq[QueryToken[ReturnColumn]], symbols: SymbolTable): Seq[ReturnItem] = q.map(_.token).flatMap { case x: ReturnItem => Seq(x) case x: AllIdentifiers => x.expressions(symbols).map { case (n, e) => ReturnItem(e, n) } } }
dksaputra/community
cypher/src/main/scala/org/neo4j/cypher/internal/executionplan/builders/ColumnFilterBuilder.scala
Scala
gpl-3.0
2,598
package chess import language.implicitConversions object Rank { def figureRank(figure: Figure) = figure.figureType match { case Queen => 900 case Rook => 450 case Knight | Bishop => 300 case Pawn => 100 case _ => 0 } def fieldRank(field: Field) = { def colRowRank(cr: Int) = if (cr>=5) 9-cr else cr 2*colRowRank(field.col) * colRowRank(field.row) } implicit def Game2Rank(game: Game): Rank = new Rank(game) } class Rank(game: Game) { import Rank._ import FigureMoves._ def figureDefendingOtherFiguresRank(field:Field, figure:Figure) = game.defendedDestinations(figureMoves(figure,field,true)).size/2 def checkRank(color: Color) = if (game.color == color.other && game.isKingUnderCheck) 50 else 0 def colorRank(color: Color) = (for ((field, figure) <- game.board.iterator if figure.figureColor == color; r1 = figureRank(figure); r2 = fieldRank(field); r3 = game.figureDefendingOtherFiguresRank(field, figure)) yield r1 + r2 + r3).sum + game.checkRank(color) def rank(color: Color) = game.colorRank(color)-game.colorRank(color.other) }
grzegorzbalcerek/scala-book-examples
examples/Rank.scala
Scala
mit
1,144
package tokens import org.scalatest.{FreeSpec, MustMatchers} class TokenGeneratorSpec extends FreeSpec with MustMatchers { "TokenGenerator" - { s"generates strings of one size" in new Fixture { tokenGenerator.nextTokenFor(smith).value.length mustBe tokenSize tokenGenerator.nextTokenFor(smith).value.length mustBe tokenSize } s"generates different tokens each time" in new Fixture { tokenGenerator.nextTokenFor(smith) == tokenGenerator.nextTokenFor(smith) mustBe false } } class Fixture { val tokenSize = 5 val tokenGenerator = new TokenGenerator(tokenSize) val smith = "smith" } }
jawp/testinator
src/test/scala/tokens/TokenGeneratorSpec.scala
Scala
mit
642
/*********************************************************************** * Copyright (c) 2013-2017 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.utils.text import java.util.regex.Pattern import org.apache.commons.lang.StringEscapeUtils import org.parboiled.scala._ import org.parboiled.scala.rules.Rule1 /** * Base class for parboiled parsers that provides methods for string and number matching */ class BasicParser extends Parser { private val controlCharPattern = Pattern.compile("""\\p{Cntrl}""") def int: Rule1[Int] = rule { (optional("-") ~ oneOrMore("0" - "9")) ~> (_.toInt) } def long: Rule1[Long] = rule { (optional("-") ~ oneOrMore("0" - "9")) ~> (_.toLong) } def char: Rule0 = rule { "a" - "z" | "A" - "Z" | "0" - "9" | "_" } def string: Rule1[String] = rule { quotedString | singleQuotedString | unquotedString } def unquotedString: Rule1[String] = rule { oneOrMore(char) ~> { c => c } } def quotedString: Rule1[String] = rule { "\\"" ~ zeroOrMore((noneOf("""\\"""") ~? notControlChar) | escapedChar) ~> StringEscapeUtils.unescapeJava ~ "\\"" } def singleQuotedString: Rule1[String] = rule { "'" ~ zeroOrMore((noneOf("""\\'""") ~? notControlChar) | escapedChar) ~> StringEscapeUtils.unescapeJava ~ "'" } private def escapedChar: Rule0 = rule { "\\\\" ~ (anyOf("""\\/"'bfnrt""") | "u" ~ nTimes(4, "0" - "9" | "a" - "f" | "A" - "F")) } private def notControlChar(s: String): Boolean = !controlCharPattern.matcher(s).matches() }
ronq/geomesa
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/text/BasicParser.scala
Scala
apache-2.0
1,838
/* * GitAllSpark.scala - Analyze all Git commits in parallel on a cluster. */ package se_hpc import org.apache.spark.SparkContext import ammonite.ops._ import ammonite.ops.ImplicitWd._ import java.net._ import java.io._ import org.json4s.JsonDSL._ import scala.util._ object GitAllSparkScala { def main(args: Array[String]) { val config = parseCommandLine(args).getOrElse(Config()) println("Configuration:") println(config.toXML) val sc = new SparkContext() val repoURL = config.url.getOrElse("") val sourceFolder = config.src.getOrElse("") val destinationFolder = config.dst.getOrElse("") // Initial git clone is performed to srcRoot // This should be a shared folder on the Spark cluster val srcRoot = Path(new java.io.File(config.srcRoot.getOrElse("/projects/SE_HPC"))) // The git fetch/git checkout (for each commit) is performed to dstRoot // This need not be shared among all nodes in the Spark cluster val dstRoot = Path(new java.io.File(config.dstRoot.getOrElse("/projects/SE_HPC"))) // Because we analyze many projects, we require a subdirectory for each open source project to be analyzed. val sourcePath = srcRoot / sourceFolder if (exists ! srcRoot) { println("Source folder already exists (non-fatal)") } else { println("Creating non-existent root " + srcRoot.toString) mkdir ! srcRoot } // The use of Try here and elsewhere is to allow any phase to fail. // Ordinarily, failure would only occur if the srcRoot does not contain a valid clone. val localCloneTime = simpleTimer { if (config.gitClone) { System.out.println("Cloning to " + srcRoot.toString) Try { %.git("clone", repoURL)(srcRoot) } } } val hashFetchTime = simpleTimer { val commits = hashCodes(srcRoot, sourceFolder) val rdd = sc.parallelize(config.start until commits.length by config.stride, config.nodes * config.cores) val rddFetch = rdd.map { pos => doGitCheckouts(config, pos, commits(pos)) } // The cache() here is to ensure this RDD is computed before moving onto the next phase. // RDD calculations are designed to be lazy. rddFetch.cache() rddFetch } val clocTime = simpleTimer { val rdd = hashFetchTime.result // List is used here to wrap the XML result so we can perform a reduce properly. // Temporary workaround for not being able to merge Seq[Node] easily (not performance-critical here) val rddCloc = rdd.map { gcp => List(doCloc(config, gcp).toXML) } rddCloc.cache() val result = rddCloc.reduce(_ ++ _) val clocReport = <cloc_report> { result.toSeq } </cloc_report> if (config.clocReportPath.isDefined) writeClocReport(config, clocReport) rdd.count() } val report = Report( localCloneTime.time / 1e9, hashFetchTime.time / 1e9, clocTime.time / 1e9, clocTime.result ) val experiment = Experiment("git-all-spark-scala") if (config.xmlFilename.isDefined) writePerformanceReport(experiment, config, report) } /* * simpleTimer times a block of code and returns a generic TimedResult * * This demonstrates call-by-name style parameter passing in Scala. * The type of the block of code is inferred and returned as the result. */ case class TimedResult[A](time: Double, result: A) def simpleTimer[A](block: => A): TimedResult[A] = { val t0 = System.nanoTime() // This runs the block of code val result = block val t1 = System.nanoTime() TimedResult(t1 - t0, result) } /* * Git Checkout Phase * * This is Phase 1 of the computation. For each commit, doGitCheckouts() is run by mapping the * initial RDD (of commits by position) to checkout each hash and stage it in the filesystem for * CLOC (count lines of code) analysis in Phase 2. * * The result of this phase is an RDD of GitCheckoutPhase results, which can also be reported to an XML file * for post processing. */ case class GitCheckoutPhase(order: Int, commit: String, hostname: String, path: String, successful: Boolean, time: Double, usage: Int) { def toXML(): xml.Node = { <checkout> <order>{ order }</order> <commit>{ commit }</commit> <hostname>{ hostname }</hostname> <path>{ path }</path> <success>{ successful }</success> <time>{ time }</time> <usage>{ usage }</usage> </checkout> } } def doGitCheckouts(config: Config, id: Int, hash: String): GitCheckoutPhase = { val srcRoot = Path(new java.io.File(config.srcRoot.getOrElse("/projects/SE_HPC"))) val sourceFolder = config.src.getOrElse("") val sourcePath = srcRoot / sourceFolder val destRoot = Path(new java.io.File(config.dstRoot.getOrElse("/scratch/SE_HPC"))) val destinationFolder = config.dst.getOrElse(InetAddress.getLocalHost().getHostName()) val destinationPath = destRoot / destinationFolder if (!(exists ! destinationPath)) { mkdir ! destinationPath } mkdir ! destinationPath / hash val diskUsage = du(destinationPath) val currentPath = destinationPath / hash // TODO: Make disk usage threshold a command-line option. val checkout = config.checkout && diskUsage.percent < 90 val hashCheckoutTime = simpleTimer { val success = if (checkout) { val r1 = Try { System.out.println("git init " + currentPath.toString) %.git('init)(currentPath) true } val r2 = Try { System.out.println("git remote add upstream " + sourcePath.toString + " " + currentPath.toString) %%("git", "remote", "add", "upstream", sourcePath)(currentPath) true } val r3 = Try { System.out.println("git fetch upstream " + currentPath.toString) %%("git", "fetch", "upstream")(currentPath) true } val r4 = Try { System.out.println("git checkout " + currentPath.toString + " " + hash) %%("git", "checkout", hash)(currentPath) true } /* This above code is allowed to fail. If it does, we still want to know. */ List(r1, r2, r3, r4) map { _.getOrElse(false) } reduce (_ && _) } else { true } if (success) System.out.println("doGitCheckouts(): git succeeded in cleckout of hash " + hash) else System.out.println("doGitCheckouts(): git failed in checkout of hash " + hash) success } val commitHashPath = destinationPath / hash GitCheckoutPhase(id, hash, InetAddress.getLocalHost.getHostName, commitHashPath.toString, hashCheckoutTime.result, hashCheckoutTime.time / 1e9, diskUsage.percent) } def hashCodes(rootPath: Path, args: String): Array[String] = { val source = rootPath / args val log = %%("git", "log")(source) val logString = log.toString val logArray = logString.split("\\n") val justHashCodes = logArray filter { line => line.startsWith("commit") } map { line => line.split(" ")(1) } return justHashCodes } /* * CLOC Phase - Count Lines of Code * * This is Phase 2. It assumes that the git checkouts have been staged. We'll know where the actual data * were staged by looking at the GitCheckoutPhase case class (struct) instance to inspect path. This * path will tell us the folder of the checkout. * * doCloc() runs the open source cloc tool to compute the various LOC metrics (lines, blank lines, comment lines) * on all supported languages. We then take this report and return a CountLOC case class instance, which contains * only the necessary information from the cloc tool's output. * * Our ultimate goal is to be able to support other analysis methods (including some of our own, under development). * So subsequent phases can add an analysis method like doCloc() and return a structure/report of information * similar to ClocPhase. */ case class ClocPhase(order: Int, commit: String, cloc: Option[CountLOC], hostname: String, gcf: GitCheckoutPhase) { def toXML(): xml.Node = { <cloc_phase> <order>{ order }</order> <commit>{ commit }</commit> <hostname>{ hostname } </hostname> <path>{ gcf.toXML } </path> <report>{ Try { cloc.get.toXML } getOrElse (<cloc/>) }</report> </cloc_phase> } } def doCloc(config: Config, gcp: GitCheckoutPhase): ClocPhase = { val clocTime = simpleTimer { if (config.cloc) { val xmlResult = Try { val output = %%(config.clocPath.get, "--xml", "--quiet", gcp.path) output.out.lines drop (1) reduce (_ + "\\n" + _) } Try { CountLOC(xmlResult.get) }.toOption } else { None } } ClocPhase(gcp.order, gcp.commit, clocTime.result, InetAddress.getLocalHost().getHostName(), gcp) } def writeClocReport(config: Config, document: xml.Node) { val pprinter = new scala.xml.PrettyPrinter(80, 2) // scalastyle:ignore val file = new File(config.clocReportPath.get) val bw = new BufferedWriter(new FileWriter(file)) println("Wrote cloc report file " + config.clocReportPath.get) bw.write(pprinter.format(document)) // scalastyle:ignore bw.close() } /* * This is for parsing the command line options. We have followed the pattern above by having the * command line option Config be usable to inspect the options set and for the purposes of reporting. */ def parseCommandLine(args: Array[String]): Option[Config] = { val parser = new scopt.OptionParser[Config]("scopt") { head("simplemap-spark-scala", "0.1.x") opt[String]("src") action { (x, c) => c.copy(src = Some(x)) } text ("src (String) is the name of the source folder (should match repo name) and not a path") opt[String]("dst") action { (x, c) => c.copy(dst = Some(x)) } text ("dst (String) is the name of the destination folder and not a path") opt[Int]("nodes") action { (x, c) => c.copy(nodes = x) } text ("nodes (int) is the number of cluster nodes") opt[Int]("cores") action { (x, c) => c.copy(cores = x) } text ("cores (int) is the number of cores on each cluster node") opt[String]("xml") action { (x, c) => c.copy(xmlFilename = Some(x)) } text (s"xml (string) is the name or path to a filename for writing the performance report") opt[String]("src-root") action { (x, c) => c.copy(srcRoot = Some(x)) } text ("src-root (String) is the base directory where the --src folder will be cloned") opt[String]("dst-root") action { (x, c) => c.copy(dstRoot = Some(x)) } text ("dst-root (String) is the base base directory where the --dst folder will be created for staging commits") opt[String]("url") action { (x, c) => c.copy(url = Some(x)) } text ("url (String) is the repo URL. This URL must work with git clone on your computer.") opt[Unit]("checkout") action { (x, c) => c.copy(checkout = true) } text ("checkout (Boolean) indicates whether we should perform checkouts (default False)") opt[Unit]("cloc") action { (x, c) => c.copy(cloc = true) } text ("cloc sets a flag to run the cloc line-counting tool") opt[String]("cloc-path") action { (x, c) => c.copy(clocPath = Some(x)) } text ("cloc-path (String) indicates the location of the cloc tool. Only used if cloc option is enabled.") opt[String]("cloc-report") action { (x, c) => c.copy(clocReportPath = Some(x)) } text ("cloc-report (String) is the path where to write the cloc report. Only used if cloc option is enabled.") opt[Unit]("git-clone") action { (x, c) => c.copy(gitClone = true) } text ("git-clone indicates whether the clone is to be performed by the Spark driver code") opt[Int]("start") action { (x, c) => c.copy(start = x) } text ("start (int) is the commit (by position) on master where to start (defaults to 0). Useful when you have extremely large repositories.") opt[Int]("stride") action { (x, c) => c.copy(stride = x) } text ("stride (int) is how many commits to skip (by position) on master (defaults to 1). Useful when you have extremely large repositories.") opt[String]("github") action { (x, c) => val gitPair = x.split("/") if (gitPair.length >= 2) { val org = gitPair(0) val repo = gitPair(1) c.copy(src = Some(repo), dst = Some(repo + "-commits"), url = Some(s"https://github.com/$org/$repo.git"), github = Some(x)) } else c.copy(github = Some(x)) } text ("github (String) is a user-org/repo-name; implies --url, --src, and --dst") help("help") text ("prints this usage text") } parser.parse(args, Config()) } case class Config( src: Option[String] = None, dst: Option[String] = None, cores: Int = 4, nodes: Int = 1, srcRoot: Option[String] = None, dstRoot: Option[String] = None, url: Option[String] = None, checkout: Boolean = false, cloc: Boolean = false, clocPath: Option[String] = Some("/usr/bin/cloc"), clocReportPath: Option[String] = None, start: Int = 0, stride: Int = 1, gitClone: Boolean = false, xmlFilename: Option[String] = None, github: Option[String] = None ) { def toXML(): xml.Elem = { <config> <property key="src" value={ src.getOrElse("") }/> <property key="dst" value={ dst.getOrElse("") }/> <property key="cores" value={ cores.toString }/> <property key="nodes" value={ nodes.toString }/> <property key="src-root" value={ srcRoot.getOrElse("") }/> <property key="dst-root" value={ dstRoot.getOrElse("") }/> <property key="url" value={ url.getOrElse("") }/> <property key="github" value={ github.getOrElse("") }/> <property key="checkout" value={ checkout.toString }/> <property key="cloc" value={ cloc.toString }/> <property key="clocPath" value={ clocPath.getOrElse("").toString }/> <property key="start" value={ start.toString }/> <property key="stride" value={ stride.toString }/> <property key="git-clone" value={ gitClone.toString }/> <property key="xml" value={ xmlFilename.getOrElse("") }/> </config> } } /* * Performance Report */ case class Experiment(name: String) { def toXML(): xml.Elem = <experiment id={ name }/> } case class Report(cloneTime: Double, hashCheckoutTime: Double, clocTime: Double, commits: Long) { def toXML(): xml.Node = { <report> <time id="clone-time" t={ cloneTime.toString } unit="s"/> <time id="hash-checkout-time" t={ hashCheckoutTime.toString } avg={ (hashCheckoutTime / commits).toString } unit="s"/> <time id="cloc-time" t={ clocTime.toString } avg={ (clocTime / commits).toString } unit="s"/> <commits n={ commits.toString }/> </report> } } def writePerformanceReport(exp: Experiment, config: Config, data: Report): Unit = { val results = <results> { exp.toXML }{ config.toXML }{ data.toXML } </results> val pprinter = new scala.xml.PrettyPrinter(80, 2) // scalastyle:ignore val file = new File(config.xmlFilename.get) val bw = new BufferedWriter(new FileWriter(file)) println("Wrote to XML file " + config.xmlFilename.get) bw.write(pprinter.format(results)) // scalastyle:ignore bw.close() } /* * Utility for Examining Disk Usage */ def du(path: Path): DiskUsage = { val usage = %%("df", "-Pkh", path) val lines = usage.out.lines val headings = lines(0).replace("Mounted on", "Mounted-on").split("\\\\s+") val fields = lines(1).split("\\\\s+") val dfMap = (headings zip fields) toMap val filesystem = dfMap.getOrElse("Filesystem", "") val size = dfMap.getOrElse("Size", "0G") val used = dfMap.getOrElse("Used", "0G") val avail = dfMap.getOrElse("Avail", "0G") val percent = dfMap.getOrElse("Use%", "0%") val mount = dfMap.getOrElse("Mounted-on", "") val percentMatcher = "\\\\d+%".r val storageMatcher = "\\\\d+(K|M|G|P)".r val sizeFound = storageMatcher.findFirstIn(size).get val usedFound = storageMatcher.findFirstIn(used).get val availFound = storageMatcher.findFirstIn(avail).get val percentFound = percentMatcher.findFirstIn(percent).get DiskUsage( filesystem, Storage(sizeFound.dropRight(1), sizeFound.last), Storage(usedFound.dropRight(1), usedFound.last), Storage(availFound.dropRight(1), availFound.last), percentFound.dropRight(1).toInt, mount ) } case class Storage(amount: String, unit: Char) case class DiskUsage(fs: String, size: Storage, used: Storage, avail: Storage, percent: Int, mount: String) }
gkthiruvathukal/git-all-spark-scala
src/main/scala/GitAllSpark.scala
Scala
agpl-3.0
17,037
package tifmo import dcstree.Executor import dcstree.SemRole import dcstree.Relation import scala.collection.mutable package inference { import Finder._ import RulesQuick._ import RulesLight._ /** * The core of inference engine. * * This class implements the forward chaining algorithm, provides the * infra structure for writing rules, and implements basic axioms of * relational algebra in a way that can avoid most combinatorial explosions. */ class IEngineCore extends Executor { private[this] var predCounter = 0 def getNewPredID() = { predCounter += 1 predCounter } // lock /** * Locking mechanism for debug. * * If locked, an error will occur if the status of the inference engine is changed * (e.g. new atomic sentences proven). */ var locked = false // W protected[this] var W = null:TermIndex protected[this] val WPool = mutable.Map.empty[Dimension, TermIndex] /** * Get the W term. */ def getW(dim: Dimension) = { if (W == null) { assert(!locked) W = new TermIndex(new Dimension(null)) W.setwflag() IEPredNonEmpty(W).apply(Debug_SimpleRuleTrace("W", getNewPredID())) IEPredSubsume(W, W).apply(Debug_SimpleRuleTrace("W", getNewPredID())) anyTermPool.foreach(_(this, W)) WPool(W.dim) = W } WPool.getOrElseUpdate(dim, { assert(!locked) val ret = new TermIndex(dim) ret.setwflag() IEPredNonEmpty(ret).apply(Debug_SimpleRuleTrace("W", getNewPredID())) IEPredSubsume(ret, ret).apply(Debug_SimpleRuleTrace("W", getNewPredID())) anyTermPool.foreach(_(this, ret)) claimCP(ret, dim.relabel(null).map(r => (W, r)), Debug_SimpleRuleTrace("W", getNewPredID())) ret }) } // new term def newTerm(dim: Dimension) = { assert(!locked) val w = getW(dim) val term = new TermIndex(w.dim) IEPredSubsume(term, term).apply(Debug_SimpleRuleTrace("tm id", getNewPredID())) anyTermPool.foreach(_(this, term)) for (x <- w.superSets) { applySubsume(term, x, Debug_SimpleRuleTrace("tm sub W", getNewPredID())) } term } // contradiction: private[this] var contraFlag = false /** * Returns if there has been a contradiction during forward chaining. */ def hasContradiction = contraFlag def contradict() { contraFlag = true } // five hierarchy forward chaining: private[this] val quick = mutable.Queue.empty[() => Unit] private[inference] def newQuick(todo: () => Unit) { quick.enqueue(todo) } private[this] val mergePool = mutable.Set.empty[TermIndex] private[this] val light = mutable.Queue.empty[(() => Unit, Set[TermIndex])] private[inference] def newLight(todo: () => Unit, dep: Set[TermIndex]) { light.enqueue((todo, dep)) } private[this] class Sched[T <: IEPred](pred: T, f: RuleDo[T], args: Seq[RuleArg]) { private[this] val dep = args.flatMap(_.terms).toSet def sched() { if (pred.valid && dep.forall(_.valid)) f(IEngineCore.this, pred, args) } } private[this] val heavy = mutable.Queue.empty[Sched[_ <: IEPred]] private[inference] def newHeavy[T <: IEPred](pred: T, f: RuleDo[T], args: Seq[RuleArg]) { heavy.enqueue(new Sched[T](pred, f, args)) } private[this] val constructQ = mutable.Queue.empty[() => Unit] /** * Forward chaining process. */ def explore() { def loop() { if (!quick.isEmpty) { quick.dequeue()() loop() } else if (!mergePool.isEmpty) { val cache = mutable.Set.empty[TermIndex] var most = 0 var tomerge = null:Set[TermIndex] for (x <- mergePool; if !cache.contains(x)) { val tmp = x.subSets intersect x.superSets cache ++= tmp if (tmp.size > most) { most = tmp.size tomerge = tmp } } mergePool --= tomerge assert(tomerge.size >= 2) val a = tomerge.find(_.isW) match { case Some(x) => x case None => { tomerge.maxBy[Int](x => x.iscps.size + x.mkcps.size + x.isins.size + x.mkins.size + x.ispis.size + x.mkpis.size) } } val eqs = tomerge - a merge(a, eqs) loop() } else if (!light.isEmpty) { val (todo, dep) = light.dequeue() if (dep.forall(_.valid)) todo() loop() } else if (!heavy.isEmpty) { heavy.dequeue().sched() loop() } else if (!constructQ.isEmpty) { constructQ.dequeue()() loop() } } loop() } // apply & claim private[inference] def applyNonEmpty(a: TermIndex, debug_trace: Debug_RuleTrace) = { val tmp = IEPredNonEmpty(a) if (tmp.apply(debug_trace)) { assert(!locked) rqPINE1(this, tmp) rqPINE2(this, tmp) rqCPNE1(this, tmp) rqCPNE2(this, tmp) rqPICP3(this, tmp) for (x <- a.neTriggers) x.fire(this, tmp) if (a.selfDisjoint) contradict() true } else { false } } /** * Claim a term to be non-empty. */ def claimNonEmpty(a: TermIndex, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) { if (applyNonEmpty(a, debug_trace)) { for (x <- a.superSets) { applyNonEmpty(x, Debug_RuleSubNE(a.holder, x.holder, getNewPredID())) } } } private[inference] def applyDisjoint(a: TermIndex, b: TermIndex, debug_trace: Debug_RuleTrace) = { val tmp = IEPredDisjoint(a, b) if (tmp.apply(debug_trace)) { assert(!locked) for (x <- a.djtTriggers) x.fire(this, tmp) if (a == b) { if (a.knownNE) contradict() } else { val tmp2 = IEPredDisjoint(b, a) val rec = tmp2.apply(Debug_SimpleRuleTrace("djt rev", getNewPredID())) assert(rec) for (x <- b.djtTriggers) x.fire(this, tmp2) } true } else { false } } /** * Claim two terms to be disjoint. */ def claimDisjoint(a: TermIndex, b: TermIndex, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) { assert(a.dim == b.dim) if (applyDisjoint(a, b, debug_trace)) { newLight(() => rlDjtSubPI1(this, a, b), Set(a, b)) } } private[inference] def applySubsume(a: TermIndex, b: TermIndex, debug_trace: Debug_RuleTrace) = { val tmp = IEPredSubsume(a, b) if (tmp.apply(debug_trace)) { assert(!locked) if (a.hasSub(b)) mergePool += a for (in <- b.mkins; if in.comp.forall(_.hasSub(a))) { newQuick(() => claimSubsume(a, in.head, Debug_RuleINdef(a.holder, in.head.holder, in.comp.map(_.holder), getNewPredID()))) } for (x <- a.subTriggers) x.fire(this, tmp) for (x <- b.superTriggers) x.fire(this, tmp) true } else { false } } /** * Claim term `a` to be a subset of term `b`. */ def claimSubsume(a: TermIndex, b: TermIndex, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) { assert(a.dim == b.dim) if (applySubsume(a, b, debug_trace)) { val bsup = b.superSets newLight(() => rlDjtSubPI2(this, a, b), Set(a, b)) var task = Nil:List[() => Boolean] for (x <- a.subSets) { if (!x.hasSuper(b) || x == a) { for (y <- bsup) { task = (() => applySubsume(x, y, Debug_RuleSubSub(x.holder, a.holder, b.holder, y.holder, getNewPredID()))) :: task } } else { assert(bsup.forall(x.hasSuper(_))) } } task.foreach(_()) if (a.knownNE) { claimNonEmpty(b, Debug_RuleSubNE(a.holder, b.holder, getNewPredID())) } } } private[inference] def applyCP(h: TermIndex, t: Set[(TermIndex, SemRole)], debug_trace: Debug_RuleTrace) = { val tmp = IEPredCP(h, t) if (tmp.apply(debug_trace)) { assert(!locked) val mincps = t.minBy[Int](_._1.mkcps.size)._1.mkcps for (cp <- mincps; if cp.comp == t && cp != tmp) { newQuick(() => claimSubsume(h, cp.head, Debug_SimpleRuleTrace("CP Uniqueness", getNewPredID()))) newQuick(() => claimSubsume(cp.head, h, Debug_SimpleRuleTrace("CP Uniqueness", getNewPredID()))) } rqCPNE3(this, tmp) rqCPNE4(this, tmp) rqPICP1(this, tmp) for (x <- t.map(_._1); y <- x.mkcpTriggers) y.fire(this, tmp) for (x <- h.iscpTriggers) x.fire(this, tmp) true } else { false } } /** * Claim term `h` to be the Cartesian product of `t` */ def claimCP(h: TermIndex, t: Set[(TermIndex, SemRole)], debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) { assert{ val hrs = h.dim.relabel(null) val rss = t.map(x => x._1.dim.relabel(x._2)) t.size >= 2 && hrs == rss.flatten && hrs.size == (0 /: rss)(_ + _.size) } if (applyCP(h, t, debug_trace)) { newLight(() => rlCPCP(this, h, t), t.map(_._1) + h) } } private[inference] def applyPI(h: TermIndex, t: TermIndex, r: SemRole, debug_trace: Debug_RuleTrace) = { val tmp = IEPredPI(h, t, r) if (tmp.apply(debug_trace)) { assert(!locked) for (pi <- t.mkpis; if pi.headrs == tmp.headrs && pi != tmp) { newQuick(() => claimSubsume(h, pi.head, Debug_SimpleRuleTrace("PI Uniqueness", getNewPredID()))) newQuick(() => claimSubsume(pi.head, h, Debug_SimpleRuleTrace("PI Uniqueness", getNewPredID()))) } rqPINE3(this, tmp) rqPINE4(this, tmp) rqPICP2(this, tmp) newLight(() => rlDjtSubPI3(this, h, t, r), Set(h, t)) for (x <- t.mkpiTriggers) x.fire(this, tmp) for (x <- h.ispiTriggers) x.fire(this, tmp) true } else { false } } /** * Claim term `h` to be the projection of term `t` into role `r` */ def claimPI(h: TermIndex, t: TermIndex, r: SemRole, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) { assert{ val hrs = h.dim.relabel(r) val trs = t.dim.relabel(null) hrs.subsetOf(trs) && trs != hrs } if (applyPI(h, t, r, debug_trace)) { newLight(() => rlPIPI(this, h, t, r), Set(h, t)) } } private[this] def esssub(x: Set[TermIndex], y: Set[TermIndex]) = { x.subsetOf(y.flatMap(z => z.subSets intersect z.superSets)) } private[inference] def applyIN(h: TermIndex, t: Set[TermIndex], aux: Boolean, debug_trace: Debug_RuleTrace) = { h.isins.filter(in => in.aux && esssub(t, in.comp)).foreach(_.dispose()) val tmp = IEPredIN(h, t, aux) if (tmp.apply(debug_trace)) { assert(!locked) val subs = t.minBy[Int](_.assuper.size).subSets.filter(x => t.forall(_.hasSub(x))) for (x <- subs) { newQuick(() => claimSubsume(x, h, Debug_RuleINdef(x.holder, h.holder, t.map(_.holder), getNewPredID()))) } if (!aux) { for (x <- h.isinTriggers) x.fire(this, tmp) for (x <- t; y <- x.mkinTriggers) y.fire(this, tmp) } true } else { false } } /** * Claim term `h` to be the intersection of terms `t` */ def claimIN(h: TermIndex, t: Set[TermIndex], aux: Boolean = false, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) { assert(t.forall(_.dim == h.dim)) assert(aux || h.dim.size >= 2) t.foreach(claimSubsume(h, _, Debug_SimpleRuleTrace("IN def", getNewPredID()))) def squeeze(t: Set[TermIndex]) = { val tmp = for (x <- t; if !t.exists(y => y.hasSuper(x) && !x.hasSuper(y))) yield { x.superSets intersect x.subSets } tmp.map(_.head) } if (aux) { val mt = squeeze(t) if (mt.size <= 1) { claimSubsume(mt.head, h, Debug_SimpleRuleTrace("IN Check", getNewPredID())) } else if (!h.isins.exists(in => esssub(in.comp, mt))) { applyIN(h, mt, true, debug_trace) } } else { applyIN(h, t, false, debug_trace) } } /** * Claim `rl(a, b)` for the user-defined binary relation `rl`. */ def claimRL(a: TermIndex, rl: Relation, b: TermIndex, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) { val tmp = IEPredRL(a, rl, b) if (tmp.apply(debug_trace)) { assert(!locked) for (x <- a.arlTriggers) x.fire(this, tmp) for (x <- b.rlbTriggers) x.fire(this, tmp) rl.execute[TermIndex](this, a, b) } } /** * Claim `tms.head = func(tms.tail, param)` for the user-defined function `func`. */ def claimFunc(func: IEFunction, tms: Seq[TermIndex], param: Any, debug_trace: Debug_RuleTrace = Debug_SimpleRuleTrace("default", getNewPredID())) { assert(tms.head.dim == func.headDim(null +: tms.tail.map(_.holder), param)) if (IEPredFunc(func, tms, param).apply(debug_trace)) { assert(!locked) if (!tms.tail.isEmpty) { val minfuncs = tms.tail.minBy[Int](_.funcs.size).funcs for (x <- minfuncs; if x.func == func && x.param == param && x.tms.tail == tms.tail) { claimSubsume(tms.head, x.tms.head, Debug_SimpleRuleTrace("Func Uniqueness", getNewPredID())) claimSubsume(x.tms.head, tms.head, Debug_SimpleRuleTrace("Func Uniqueness", getNewPredID())) } } func.applyFunc(this, tms, param) } } // merge: private[this] def merge(a: TermIndex, eqs: Set[TermIndex]) { eqs.foreach(_.holder.idx = a) for (x <- eqs) { assert(a.hasSuper(x)) assert(a.hasSub(x)) assert(a.knownNE == x.knownNE) for (y <- x.superSets; if !eqs.contains(y)) assert(a.hasSuper(y)) for (y <- x.subSets; if !eqs.contains(y)) assert(a.hasSub(y)) } for (x <- eqs) { x.kne.toList.foreach(_.dispose()) x.assub.toList.foreach(_.dispose()) x.assuper.toList.foreach(_.dispose()) } def replace(x: TermIndex) = if (eqs.contains(x)) a else x var task = Nil:List[() => Unit] for (x <- eqs) { for (djt <- x.djts) { val na = replace(djt.a) val nb = replace(djt.b) val ntr = djt.debug_trace task = (() => claimDisjoint(na, nb, ntr)) :: task } for (cp <- x.iscps) { val ncomp = cp.comp.map(y => (replace(y._1), y._2)) val ntr = cp.debug_trace task = (() => claimCP(a, ncomp, ntr)) :: task } for (cp <- x.mkcps) { val nhead = replace(cp.head) val ncomp = cp.comp.map(y => (replace(y._1), y._2)) val ntr = cp.debug_trace task = (() => claimCP(nhead, ncomp, ntr)) :: task } for (pi <- x.ispis) { val ncompt = replace(pi.compt) val ncompr = pi.compr val ntr = pi.debug_trace task = (() => claimPI(a, ncompt, ncompr, ntr)) :: task } for (pi <- x.mkpis) { val nhead = replace(pi.head) val ncompr = pi.compr val ntr = pi.debug_trace task = (() => claimPI(nhead, a, ncompr, ntr)) :: task } for (in <- x.isins) { val ncomp = in.comp.map(replace(_)) val naux = in.aux val ntr = in.debug_trace task = (() => claimIN(a, ncomp, naux, ntr)) :: task } for (in <- x.mkins) { val nhead = replace(in.head) val ncomp = in.comp.map(replace(_)) val naux = in.aux val ntr = in.debug_trace task = (() => claimIN(nhead, ncomp, naux, ntr)) :: task } for (fc <- x.funcs) { val nfunc = fc.func val ntms = fc.tms.map(replace(_)) val nparam = fc.param val ntr = fc.debug_trace task = (() => claimFunc(nfunc, ntms, nparam, ntr)) :: task } for (rel <- x.asarl) { val nb = replace(rel.b) val nrl = rel.rl val ntr = rel.debug_trace task = (() => claimRL(a, nrl, nb, ntr)) :: task } for (rel <- x.asrlb) { val na = replace(rel.a) val nrl = rel.rl val ntr = rel.debug_trace task = (() => claimRL(na, nrl, a, ntr)) :: task } } for (x <- eqs) { for (y <- x.djts.toList) { y.dispose() IEPredDisjoint(y.b, y.a).dispose() } x.iscps.toList.foreach(_.dispose()) x.mkcps.toList.foreach(_.dispose()) x.ispis.toList.foreach(_.dispose()) x.mkpis.toList.foreach(_.dispose()) x.isins.toList.foreach(_.dispose()) x.mkins.toList.foreach(_.dispose()) x.funcs.toList.foreach(_.dispose()) x.asarl.toList.foreach(_.dispose()) x.asrlb.toList.foreach(_.dispose()) } task.foreach(_()) for (x <- eqs) { x.disposers.toList.foreach(_()) } val toreloc = mutable.Set.empty[Guard[_ <: IEPred]] for (x <- eqs) { x.neTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.subTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.superTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.djtTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.iscpTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.mkcpTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.ispiTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.mkpiTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.isinTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.mkinTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.arlTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) x.rlbTriggers.foreach(tg => { tg.guard.change(tg, a) toreloc += tg.guard }) } toreloc.foreach(_.locate(this)) for (x <- eqs) { assert(x.kne.isEmpty) assert(x.assub.isEmpty) assert(x.assuper.isEmpty) assert(x.djts.isEmpty) assert(x.iscps.isEmpty) assert(x.mkcps.isEmpty) assert(x.ispis.isEmpty) assert(x.mkpis.isEmpty) assert(x.isins.isEmpty) assert(x.mkins.isEmpty) assert(x.funcs.isEmpty) assert(x.asarl.isEmpty) assert(x.asrlb.isEmpty) assert(x.disposers.isEmpty) assert(x.neTriggers.isEmpty) assert(x.subTriggers.isEmpty) assert(x.superTriggers.isEmpty) assert(x.djtTriggers.isEmpty) assert(x.iscpTriggers.isEmpty) assert(x.mkcpTriggers.isEmpty) assert(x.ispiTriggers.isEmpty) assert(x.mkpiTriggers.isEmpty) assert(x.isinTriggers.isEmpty) assert(x.mkinTriggers.isEmpty) assert(x.arlTriggers.isEmpty) assert(x.rlbTriggers.isEmpty) } } // foreach: private[this] val anyTermPool = mutable.Set.empty[(IEngineCore, TermIndex) => Unit] protected def forAnyTerm(f: (IEngineCore, TermIndex) => Unit) { anyTermPool.add(f) } /** * Setup a callback function which is activated when `term` is known to be non-empty. */ def ifNotEmpty(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredNonEmpty]) { assert(term.valid) (new Watcher[IEPredNonEmpty]("ne", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever `term` has a new super-set. */ def foreachSuperset(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredSubsume]) { assert(term.valid) (new Watcher[IEPredSubsume]("sub", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever `term` has a new subset. */ def foreachSubset(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredSubsume]) { assert(term.valid) (new Watcher[IEPredSubsume]("super", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever `term` has a new disjoint. */ def foreachDisjoint(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredDisjoint]) { assert(term.valid) (new Watcher[IEPredDisjoint]("djt", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever `term` is known to be a Cartesian product. */ def foreachIsCP(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredCP]) { assert(term.valid) (new Watcher[IEPredCP]("iscp", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever `term` is known to be a component of some Cartesian product. */ def foreachMkCP(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredCP]) { assert(term.valid) (new Watcher[IEPredCP]("mkcp", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever `term` is known to be a projection. */ def foreachIsPI(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredPI]) { assert(term.valid) (new Watcher[IEPredPI]("ispi", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever some other term is known to be a projection of `term`. */ def foreachMkPI(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredPI]) { assert(term.valid) (new Watcher[IEPredPI]("mkpi", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever `term` is known to be an intersection. */ def foreachIsIN(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredIN]) { assert(term.valid) (new Watcher[IEPredIN]("isin", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever `term` is known to be a component of an intersection. */ def foreachMkIN(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredIN]) { assert(term.valid) (new Watcher[IEPredIN]("mkin", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever a relation `rl(term, x)` is known, for some user-defined relation `rl` and some term `x`. */ def foreachARLX(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredRL]) { assert(term.valid) (new Watcher[IEPredRL]("arl", f, args)).initialize(term, this) } /** * Setup a callback function which is activated whenever a relation `rl(x, term)` is known, for some user-defined relation `rl` and some term `x`. */ def foreachXRLB(term: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredRL]) { assert(term.valid) (new Watcher[IEPredRL]("rlb", f, args)).initialize(term, this) } /** * Setup a callback function which is activated when the Cartesian product of `comp` is constructed. */ def forCPof(comp: Set[(TermIndex, SemRole)], args: Seq[RuleArg], f: RuleDo[IEPredCP]) { comp.map(_._1).foreach(_.valid) (new ForCPof(f, args)).initialize(comp, this) } /** * Setup a callback function which is activated when `a` is known to be subsumed by `b`. */ def ifSubsume(a: TermIndex, b: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredSubsume]) { assert(a.valid) assert(b.valid) (new IfSubsume(f, args)).initialize(a, b, this) } /** * Setup a callback function which is activated when `a` is known to be disjoint to `b`. */ def ifDisjoint(a: TermIndex, b: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredDisjoint]) { assert(a.valid) assert(b.valid) (new IfDisjoint(f, args)).initialize(a, b, this) } /** * Setup a callback function which is activated when there is a relation `rl(a, b)`. */ def ifRelation(a: TermIndex, b: TermIndex, args: Seq[RuleArg], f: RuleDo[IEPredRL]) { assert(a.valid) assert(b.valid) (new IfRelation(f, args)).initialize(a, b, this) } // getter: /** * Get Cartesian product. */ def getCP(comp: Set[(Term, SemRole)]) = findCP(comp) match { case Some(x) => x case None => { val dim = new Dimension(comp.flatMap(x => x._1.dim.relabel(x._2))) val ret = newTerm(dim) claimCP(ret, comp.map(x => (x._1.index, x._2)), Debug_SimpleRuleTrace("getCP", getNewPredID())) ret.holder } } /** * Get projection. */ def getPI(compt: Term, headrs: Set[SemRole]) = findPI(compt, headrs) match { case Some(x) => x case None => { val (dim, r) = Dimension(headrs) val ret = newTerm(dim) claimPI(ret, compt.index, r, Debug_SimpleRuleTrace("getPI", getNewPredID())) ret.holder } } /** * Get intersection. */ def getIN(comp: Set[Term], aux: Boolean = false) = findIN(comp) match { case Some(x) => { if (!aux && x.dim.size >= 2) { claimIN(x.index, comp.map(_.index), false, Debug_SimpleRuleTrace("getIN", getNewPredID())) } x } case None => { val dim = comp.head.dim val ret = newTerm(dim) claimIN(ret, comp.map(_.index), aux || dim.size == 1, Debug_SimpleRuleTrace("getIN", getNewPredID())) ret.holder } } /** * Get `func(tms.tail, param)` for the user-defined function `func`. */ def getFunc(func: IEFunction, tms: Seq[Term], param: Any) = findFunc(func, tms, param) match { case Some(x) => x case None => { val dim = func.headDim(tms, param) val ret = newTerm(dim) claimFunc(func, ret +: tms.tail.map(_.index), param, Debug_SimpleRuleTrace("getFunc", getNewPredID())) ret.holder } } // constructors: def construct(finder: () => Option[Term], getter: () => Term, args: Seq[RuleArg], f: (Term, Seq[RuleArg]) => Unit) { val dep = args.flatMap(_.terms).toSet assert(dep.forall(_.valid)) finder() match { case Some(x) => f(x, args) case None => constructQ.enqueue(() => if (dep.forall(_.valid)) f(getter(), args)) } } def constructCP(comp: Set[(Term, SemRole)], args: Seq[RuleArg], f: (Term, Seq[RuleArg]) => Unit) { val finder = () => findCP(comp) val getter = () => getCP(comp) construct(finder, getter, args, f) } def constructPI(compt: Term, headrs: Set[SemRole], args: Seq[RuleArg], f: (Term, Seq[RuleArg]) => Unit) { val finder = () => findPI(compt, headrs) val getter = () => getPI(compt, headrs) construct(finder, getter, args, f) } def constructIN(comp: Set[Term], args: Seq[RuleArg], f: (Term, Seq[RuleArg]) => Unit) { val finder = () => findIN(comp) val getter = () => getIN(comp, true) construct(finder, getter, args, f) } //////////////////////////////////////////////////// forAnyTerm(rPISub) forAnyTerm(rCPSub) forAnyTerm(rPIdef) forAnyTerm(rPIWIN) forAnyTerm(rCPIN) } }
tianran/tifmo
src/tifmo/inference/IEngineCore.scala
Scala
bsd-2-clause
25,620
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.server import kafka.testkit.{KafkaClusterTestKit, TestKitNodes} import kafka.utils.TestUtils import org.apache.kafka.clients.admin.{Admin, NewTopic} import org.apache.kafka.metadata.BrokerState import org.junit.jupiter.api.{Test, Timeout} import org.junit.jupiter.api.Assertions._ import java.util import java.util.Collections @Timeout(120000) class RaftClusterTest { @Test def testCreateClusterAndClose(): Unit = { val cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder(). setNumKip500BrokerNodes(1). setNumControllerNodes(1).build()).build() try { cluster.format() cluster.startup() } finally { cluster.close() } } @Test def testCreateClusterAndWaitForBrokerInRunningState(): Unit = { val cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder(). setNumKip500BrokerNodes(3). setNumControllerNodes(3).build()).build() try { cluster.format() cluster.startup() TestUtils.waitUntilTrue(() => cluster.brokers().get(0).currentState() == BrokerState.RUNNING, "Broker never made it to RUNNING state.") TestUtils.waitUntilTrue(() => cluster.raftManagers().get(0).kafkaRaftClient.leaderAndEpoch().leaderId.isPresent, "RaftManager was not initialized.") val admin = Admin.create(cluster.clientProperties()) try { assertEquals(cluster.nodes().clusterId().toString, admin.describeCluster().clusterId().get()) } finally { admin.close() } } finally { cluster.close() } } @Test def testCreateClusterAndCreateListDeleteTopic(): Unit = { val cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder(). setNumKip500BrokerNodes(3). setNumControllerNodes(3).build()).build() try { cluster.format() cluster.startup() cluster.waitForReadyBrokers() TestUtils.waitUntilTrue(() => cluster.brokers().get(0).currentState() == BrokerState.RUNNING, "Broker never made it to RUNNING state.") TestUtils.waitUntilTrue(() => cluster.raftManagers().get(0).kafkaRaftClient.leaderAndEpoch().leaderId.isPresent, "RaftManager was not initialized.") val admin = Admin.create(cluster.clientProperties()) try { // Create a test topic val newTopic = Collections.singletonList(new NewTopic("test-topic", 1, 3.toShort)) val createTopicResult = admin.createTopics(newTopic) createTopicResult.all().get() // List created topic TestUtils.waitUntilTrue(() => { val listTopicsResult = admin.listTopics() val result = listTopicsResult.names().get().size() == newTopic.size() if (result) { newTopic forEach(topic => { assertTrue(listTopicsResult.names().get().contains(topic.name())) }) } result }, "Topics created were not listed.") // Delete topic val deleteResult = admin.deleteTopics(Collections.singletonList("test-topic")) deleteResult.all().get() // List again TestUtils.waitUntilTrue(() => { val listTopicsResult = admin.listTopics() val result = listTopicsResult.names().get().size() != newTopic.size() if (result) { newTopic forEach(topic => { assertFalse(listTopicsResult.names().get().contains(topic.name())) }) } result }, "Topic was not removed from list.") } finally { admin.close() } } finally { cluster.close() } } @Test def testCreateClusterAndCreateAndManyTopics(): Unit = { val cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder(). setNumKip500BrokerNodes(3). setNumControllerNodes(3).build()).build() try { cluster.format() cluster.startup() cluster.waitForReadyBrokers() TestUtils.waitUntilTrue(() => cluster.brokers().get(0).currentState() == BrokerState.RUNNING, "Broker never made it to RUNNING state.") TestUtils.waitUntilTrue(() => cluster.raftManagers().get(0).kafkaRaftClient.leaderAndEpoch().leaderId.isPresent, "RaftManager was not initialized.") val admin = Admin.create(cluster.clientProperties()) try { // Create many topics val newTopic = new util.ArrayList[NewTopic]() newTopic.add(new NewTopic("test-topic-1", 1, 3.toShort)) newTopic.add(new NewTopic("test-topic-2", 1, 3.toShort)) newTopic.add(new NewTopic("test-topic-3", 1, 3.toShort)) val createTopicResult = admin.createTopics(newTopic) createTopicResult.all().get() // List created topic TestUtils.waitUntilTrue(() => { val listTopicsResult = admin.listTopics() val result = listTopicsResult.names().get().size() == newTopic.size() if (result) { newTopic forEach(topic => { assertTrue(listTopicsResult.names().get().contains(topic.name())) }) } result }, "Topics created were not listed.") } finally { admin.close() } } finally { cluster.close() } } @Test def testCreateClusterAndCreateAndManyTopicsWithManyPartitions(): Unit = { val cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder(). setNumKip500BrokerNodes(3). setNumControllerNodes(3).build()).build() try { cluster.format() cluster.startup() cluster.waitForReadyBrokers() TestUtils.waitUntilTrue(() => cluster.brokers().get(0).currentState() == BrokerState.RUNNING, "Broker never made it to RUNNING state.") TestUtils.waitUntilTrue(() => cluster.raftManagers().get(0).kafkaRaftClient.leaderAndEpoch().leaderId.isPresent, "RaftManager was not initialized.") val admin = Admin.create(cluster.clientProperties()) try { // Create many topics val newTopic = new util.ArrayList[NewTopic]() newTopic.add(new NewTopic("test-topic-1", 3, 3.toShort)) newTopic.add(new NewTopic("test-topic-2", 3, 3.toShort)) newTopic.add(new NewTopic("test-topic-3", 3, 3.toShort)) val createTopicResult = admin.createTopics(newTopic) createTopicResult.all().get() // List created topic TestUtils.waitUntilTrue(() => { val listTopicsResult = admin.listTopics() val result = listTopicsResult.names().get().size() == newTopic.size() if (result) { newTopic forEach(topic => { assertTrue(listTopicsResult.names().get().contains(topic.name())) }) } result }, "Topics created were not listed.") } finally { admin.close() } } finally { cluster.close() } } }
Chasego/kafka
core/src/test/scala/integration/kafka/server/RaftClusterTest.scala
Scala
apache-2.0
7,730
/* * Copyright 2019 Spotify AB. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.scio import com.google.api.client.http.javanet.NetHttpTransport import com.google.api.client.http.{GenericUrl, HttpRequest, HttpRequestInitializer} import com.google.api.client.json.JsonObjectParser import com.google.api.client.json.jackson2.JacksonFactory import org.apache.beam.sdk.util.ReleaseInfo import org.apache.beam.sdk.{PipelineResult, PipelineRunner} import org.slf4j.LoggerFactory import scala.io.AnsiColor._ import scala.jdk.CollectionConverters._ import scala.collection.mutable import scala.util.Try private[scio] object VersionUtil { case class SemVer(major: Int, minor: Int, rev: Int, suffix: String) extends Ordered[SemVer] { def compare(that: SemVer): Int = Ordering[(Int, Int, Int, String)].compare(SemVer.unapply(this).get, SemVer.unapply(that).get) } private[this] val Timeout = 3000 private[this] val Url = "https://api.github.com/repos/spotify/scio/releases" private[this] val Pattern = """v?(\d+)\.(\d+).(\d+)(-\w+)?""".r private[this] val Logger = LoggerFactory.getLogger(this.getClass) private[this] val MessagePattern: (String, String) => String = (version, url) => s""" | $YELLOW>$BOLD Scio $version introduced some breaking changes in the API.$RESET | $YELLOW>$RESET Follow the migration guide to upgrade: $url. | $YELLOW>$RESET Scio provides automatic migration rules (See migration guide). """.stripMargin private[this] val NewerVersionPattern: (String, String) => String = (current, v) => s""" | $YELLOW>$BOLD A newer version of Scio is available: $current -> $v$RESET | $YELLOW>$RESET Use `-Dscio.ignoreVersionWarning=true` to disable this check.$RESET |""".stripMargin private lazy val latest: Option[String] = Try { val transport = new NetHttpTransport() val response = transport .createRequestFactory(new HttpRequestInitializer { override def initialize(request: HttpRequest): Unit = { request.setConnectTimeout(Timeout) request.setReadTimeout(Timeout) request.setParser(new JsonObjectParser(new JacksonFactory)) () } }) .buildGetRequest(new GenericUrl(Url)) .execute() .parseAs(classOf[java.util.List[java.util.Map[String, AnyRef]]]) response.asScala .filter(node => !node.get("prerelease").asInstanceOf[Boolean]) .find(node => !node.get("draft").asInstanceOf[Boolean]) .map(latestNode => latestNode.get("tag_name").asInstanceOf[String]) }.toOption.flatten private def parseVersion(version: String): SemVer = { val m = Pattern.findFirstMatchIn(version).get // higher value for no "-SNAPSHOT" val snapshot = if (m.group(4) != null) m.group(4).toUpperCase else "\uffff" SemVer(m.group(1).toInt, m.group(2).toInt, m.group(3).toInt, snapshot) } private[scio] def ignoreVersionCheck: Boolean = sys.props .get("scio.ignoreVersionWarning") .orElse(sys.env.get("SCIO_IGNORE_VERSION_WARNING")) .exists(_.trim == "true") private def messages(current: SemVer, latest: SemVer): Option[String] = (current, latest) match { case (SemVer(0, minor, _, _), SemVer(0, 7, _, _)) if minor < 7 => Some( MessagePattern("0.7", "https://spotify.github.io/scio/migrations/v0.7.0-Migration-Guide") ) case (SemVer(0, minor, _, _), SemVer(0, 8, _, _)) if minor < 8 => Some( MessagePattern("0.8", "https://spotify.github.io/scio/migrations/v0.8.0-Migration-Guide") ) case (SemVer(0, minor, _, _), SemVer(0, 9, _, _)) if minor < 9 => Some( MessagePattern("0.9", "https://spotify.github.io/scio/migrations/v0.9.0-Migration-Guide") ) case (SemVer(0, minor, _, _), SemVer(0, 10, _, _)) if minor < 10 => Some( MessagePattern("0.10", "https://spotify.github.io/scio/migrations/v0.10.0-Migration-Guide") ) case _ => None } def checkVersion( current: String, latestOverride: Option[String] = None, ignore: Boolean = ignoreVersionCheck ): Seq[String] = if (ignore) { Nil } else { val buffer = mutable.Buffer.empty[String] val v1 = parseVersion(current) if (v1.suffix == "-SNAPSHOT") { buffer.append(s"Using a SNAPSHOT version of Scio: $current") } latestOverride.orElse(latest).foreach { v => val v2 = parseVersion(v) if (v2 > v1) { buffer.append(NewerVersionPattern(current, v)) messages(v1, v2).foreach(buffer.append(_)) } } buffer.toSeq } def checkVersion(): Unit = checkVersion(BuildInfo.version).foreach(Logger.warn) def checkRunnerVersion(runner: Class[_ <: PipelineRunner[_ <: PipelineResult]]): Unit = { val name = runner.getSimpleName val version = ReleaseInfo.getReleaseInfo.getVersion require( version == BuildInfo.beamVersion, s"Mismatched version for $name, expected: ${BuildInfo.beamVersion}, actual: $version" ) } }
regadas/scio
scio-core/src/main/scala/com/spotify/scio/VersionUtil.scala
Scala
apache-2.0
5,563
package com.seanshubin.todo.persistence.domain import org.scalatest.FunSuite import scala.collection.mutable.ArrayBuffer /* test-driven-014 Dispatch logic can be generic */ class DispatcherTest extends FunSuite { test("unknown request") { //given val handlersBySubject = Map[String, RequestValueHandler]() val dispatcher = createDispatcher(handlersBySubject) val request = RequestValue(method = "foo", path = "/bar/path", body = "baz") //when val response = dispatcher.handle(request) //then assert(response === ResponseValue(404, "Not allowed to apply method foo to subject bar")) } test("handle exception") { //given val exception = new RuntimeException("the exception") val expectedExceptionString = ExceptionUtil.toString(exception) val handlerThatThrows = new HandlerThatThrows(exception) val handlersBySubject = Map("fragile" -> handlerThatThrows) val dispatcher = createDispatcher(handlersBySubject) //when val response = dispatcher.handle(RequestValue("GET", "/fragile/path")) //then assert(response === ResponseValue(500, "When trying to apply method GET to subject fragile, got the following exception:\\n" + expectedExceptionString)) } test("typical") { //given val stubResponse = ResponseValue(200, "stub") val stubHandler = new StubHandler(stubResponse) val handlersBySubject = Map("stub" -> stubHandler) val dispatcher = createDispatcher(handlersBySubject) val request = RequestValue("GET", "/stub/path") //when val response = dispatcher.handle(request) //then assert(response === stubResponse) } def createDispatcher(handlersBySubject: Map[String, RequestValueHandler]): Dispatcher = { val dispatcher = new Dispatcher(handlersBySubject) dispatcher } class StubHandler(response: ResponseValue) extends RequestValueHandler { val requestsHandled = new ArrayBuffer[RequestValue] override def handle(request: RequestValue): ResponseValue = { requestsHandled.append(request) response } } class HandlerThatThrows(exception: Exception) extends RequestValueHandler { override def handle(request: RequestValue): ResponseValue = { throw exception } } }
SeanShubin/todo-persistence
domain/src/test/scala/com/seanshubin/todo/persistence/domain/DispatcherTest.scala
Scala
unlicense
2,256
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2021 Andre White. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.truthencode.ddo.model.effect /** * Reduces damage from a given source by a some amount */ /** * @author * adarr */ trait DamageReduction /** * Indicates physical as opposed to magical or other damage type */ trait Physical { self: TypeCategory => } trait Slashing extends Physical with Form trait Piercing extends Physical with Form trait Bludgeoning extends Physical with Form trait UnTyped { self: TypeCategory => } trait Alignment { self: TypeCategory => } /** * Resists or inflicts all physical types of damage (Slash / Pierce / Bludgeon) */ trait FullPhysical extends Slashing with Piercing with Bludgeoning with FullMaterial trait Good extends Alignment with AlignmentBased trait Evil extends Alignment with AlignmentBased trait Chaotic extends Alignment with AlignmentBased trait Lawful extends Alignment with AlignmentBased /** * Resists, deals or bypasses all alignments */ trait Aligned extends Good with Evil with Chaotic with Lawful /** * Pertains to the material substance used for an item which may affect the damage type dealt, such * as a Silver Longsword dealing 'Silver' Damage which can harm a Vampire. */ trait Material extends MaterialBased { self: TypeCategory => } /** * Bypasses some constructs, such as Golems */ trait Adamantine extends Material /** * Material can generally be used to bypass Aberration DR */ trait Byeshk extends Material /** * Some Outsiders such as Devils */ trait ColdIron extends Material /** * Immune to Rust Monster / Ooze damage, but may be brittle */ trait Crystal extends Material trait Mithral extends Material /** * Bypasses some undead such as Vampires */ trait Silver extends Material /** * Encompasses all material used for damage. * * Internally used as convenience trait that is more concise / useful for flagging "DR/-" */ trait FullMaterial extends Adamantine with Byeshk with ColdIron with Crystal with Mithral with Silver trait Magic extends Other trait Light extends Other trait Energy extends Health /** * Damage type suffered by Constructs */ trait Rust extends Health trait Poison extends Health /** * Infuses with Positive Energy. Heals the truly living and harms undead. */ trait Positive extends Health /** * Infuses with Negative Energy. Heals the Undead, harms the truly living with no effect on living * constructs. */ trait Negative extends Health /** * Infuses constructs / living constructs with Healing. No effect on the truly living or undead. */ trait Repair extends Health /** * Bane damage is considered untyped and thus can not be bypassed by effects such as Damage * Reduction */ trait Bane extends UnTyped with Typeless trait Sonic trait Force trait RustRepair trait Acid extends Elemental trait Fire extends Elemental trait Cold extends Elemental trait Electric extends Elemental /** * Reduces damage from Acid, Fire, Cold and Electric */ trait ElementalResistance extends Resist with Acid with Fire with Cold with Electric
adarro/ddo-calc
subprojects/common/ddo-modeling/src/main/scala/io/truthencode/ddo/model/effect/DamageReduction.scala
Scala
apache-2.0
3,650
/*********************************************************************** * Copyright (c) 2013-2015 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 which * accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.accumulo.iterators import org.locationtech.geomesa.accumulo.util.CloseableIterator import org.opengis.feature.simple.SimpleFeature /** * Simple utility that removes duplicates from the list of IDs passed through. * * @param source the original iterator that may contain duplicate ID-rows */ class DeDuplicatingIterator(source: CloseableIterator[SimpleFeature], maxCacheSize: Int = 999999) extends CloseableIterator[SimpleFeature] { private val cache = scala.collection.mutable.HashSet.empty[String] private var nextEntry = findNext() override def next(): SimpleFeature = { val next = nextEntry nextEntry = findNext() next } override def hasNext: Boolean = nextEntry != null private def findNext(): SimpleFeature = { var next: SimpleFeature = null do { next = if (source.hasNext) source.next() else null } while (next != null && (if (cache.size < maxCacheSize) !cache.add(next.getID) else cache.contains(next.getID))) next } override def close(): Unit = { cache.clear() source.close() } }
vpipkt/geomesa
geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/DeDuplicatingIterator.scala
Scala
apache-2.0
1,557
/////////////////////////////////////////////////////////////////////////////// // numeric.scala // // Copyright (C) 2011-2014 Ben Wing, The University of Texas at Austin // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /////////////////////////////////////////////////////////////////////////////// package opennlp.textgrounder package util import scala.math._ import math.is_negative // import error.warning //////////////////////////////////////////////////////////////////////////// // String functions involving numbers // //////////////////////////////////////////////////////////////////////////// package object numeric { // /** // Convert a string to floating point, but don't crash on errors; // instead, output a warning. // */ // def safe_float(x: String) = { // try { // x.toDouble // } catch { // case _: Exception => { // val y = x.trim() // if (y != "") warning("Expected number, saw %s", y) // 0.0 // } // } // } // Originally based on code from: // http://stackoverflow.com/questions/1823058/how-to-print-number-with-commas-as-thousands-separators-in-python-2-x protected def imp_format_long_commas(x: Long): String = { var mx = x if (mx < 0) "-" + imp_format_long_commas(-mx) else { var result = "" while (mx >= 1000) { val r = mx % 1000 mx /= 1000 result = ",%03d%s" format (r, result) } "%s%s" format (mx, result) } } /** * Format a long, optionally adding commas to separate thousands. * * @param with_commas If true, add commas to separate thousands. * @param include_plus If true, include a + sign before positive numbers. */ def format_long(x: Long, include_plus: Boolean = false, with_commas: Boolean = false) = { val sign = if (include_plus && x >= 0) "+" else "" sign + ( if (!with_commas) x.toString else imp_format_long_commas(x) ) } /** * Format a long, adding commas to separate thousands. * * @param include_plus If true, include a + sign before positive numbers. */ def format_long_commas(x: Long, include_plus: Boolean = false) = format_long(x, include_plus = include_plus, with_commas = true) /** * Format a long integer in a "pretty" fashion. This adds commas * to separate thousands in the integral part. * * @param sigdigits Number of significant digits after decimal point * to display. * @param include_plus If true, include a + sign before positive numbers. */ def pretty_long(x: Long, include_plus: Boolean = false) = format_long_commas(x, include_plus = include_plus) /** * Format a floating-point number using %f style (i.e. avoding * scientific notation) and with a fixed number of significant digits * after the decimal point. Normally this is the same as the actual * number of digits displayed after the decimal point, but more digits * will be used if the number is excessively small (e.g. possible outputs * might be 0.33, 0.033, 0.0033, etc. for 1.0/3, 1.0/30, 1.0/300, etc.). * * @param sigdigits Number of significant digits after decimal point * to display. * @param include_plus If true, include a + sign before positive numbers. * @param drop_zeros If true, drop trailing zeros after decimal point. * @param with_commas If true, add commas to separate thousands * in the integral part. */ def format_double(x: Double, sigdigits: Int = 2, drop_zeros: Boolean = false, with_commas: Boolean = false, include_plus: Boolean = false): String = { if (is_negative(x)) { // Don't use x.abs because it has a bug handling -0.0 "-" + format_double(abs(x), sigdigits = sigdigits, drop_zeros = drop_zeros, with_commas = with_commas, include_plus = false) } else if (with_commas) { val sign = if (include_plus) "+" else "" val longpart = x.toLong // Use 1+ so that we don't get special treatment of values near 0 // unless we're actually near 0 val fracpart = (if (longpart != 0) 1 else 0) + abs(x - longpart) sign + imp_format_long_commas(longpart) + format_double(fracpart, sigdigits = sigdigits, drop_zeros = drop_zeros, with_commas = false, include_plus = false). drop(1) } else { var precision = sigdigits if (x != 0) { var xx = abs(x) while (xx < 0.1) { xx *= 10 precision += 1 } } val formatstr = "%%%s.%sf" format (if (include_plus) "+" else "", precision) val retval = formatstr format x if (drop_zeros) // Drop zeros after decimal point, then drop decimal point if it's last. retval.replaceAll("""\\.([0-9]*?)0+$""", """.$1"""). replaceAll("""\\.$""", "") else retval } } /** * Format a floating-point number, dropping final zeros so it takes the * minimum amount of space. * * @param sigdigits Number of significant digits after decimal point * to display. * @param include_plus If true, include a + sign before positive numbers. */ def min_format_double(x: Double, sigdigits: Int = 2, with_commas: Boolean = false, include_plus: Boolean = false) = format_double(x, sigdigits = sigdigits, with_commas = with_commas, include_plus = include_plus, drop_zeros = true) /** * Format a floating-point number, adding commas to separate thousands * in the integral part. * * @param sigdigits Number of significant digits after decimal point * to display. * @param include_plus If true, include a + sign before positive numbers. */ def format_double_commas(x: Double, sigdigits: Int = 2, drop_zeros: Boolean = false, include_plus: Boolean = false) = format_double(x, sigdigits = sigdigits, include_plus = include_plus, drop_zeros = drop_zeros, with_commas = true) /** * Format a floating-point number, dropping final zeros so it takes the * minimum amount of space and adding commas to separate thousands in the * integral part. * * @param sigdigits Number of significant digits after decimal point * to display. * @param include_plus If true, include a + sign before positive numbers. */ def min_format_double_commas(x: Double, sigdigits: Int = 2, include_plus: Boolean = false) = format_double(x, sigdigits = sigdigits, include_plus = include_plus, drop_zeros = true, with_commas = true) /** * Format a floating-point number in a "pretty" fashion. This drops * final zeros so it takes the minimum amount of space and adds commas * to separate thousands in the integral part. * * @param sigdigits Number of significant digits after decimal point * to display. * @param include_plus If true, include a + sign before positive numbers. */ def pretty_double(x: Double, sigdigits: Int = 2, include_plus: Boolean = false) = min_format_double_commas(x, sigdigits = sigdigits, include_plus = include_plus) }
utcompling/textgrounder
src/main/scala/opennlp/textgrounder/util/numeric.scala
Scala
apache-2.0
7,648
package com.scalaAsm.x86 package Instructions package x87 // Description: Subtract // Category: general/arith trait FSUB extends InstructionDefinition { val mnemonic = "FSUB" } object FSUB extends OneOperand[FSUB] with FSUBImpl trait FSUBImpl extends FSUB { implicit object _0 extends OneOp[m32] { val opcode: OneOpcode = 0xD8 /+ 4 val format = RmFormat override def hasImplicitOperand = true } implicit object _1 extends OneOp[m64] { val opcode: OneOpcode = 0xDC /+ 4 val format = RmFormat override def hasImplicitOperand = true } }
bdwashbu/scala-x86-inst
src/main/scala/com/scalaAsm/x86/Instructions/x87/FSUB.scala
Scala
apache-2.0
573
package regolic package dpllt import sat.Solver.Results._ import sat.FixedIntStack import sat.FixedIntDoublePriorityQueue import sat.Vector import util.{HasLogger, Logger} /* * TODO: what should we do with multiple copy of the same literal with different id ? * Can break communication with theory solver with literals mapping */ object DPLLSolver { /* The results, unknown means timeout */ object Results { sealed trait Result case class Satisfiable(model: Array[Boolean]) extends Result case object Unsatisfiable extends Result case object Unknown extends Result } //Enumeration for the different status of the algorithm private sealed trait Status private case object Satisfiable extends Status private case object Unsatisfiable extends Status private case object Conflict extends Status private case object Unknown extends Status private case object Timeout extends Status } //TODO: nbVars should be nbLits class DPLLSolver[T <: TheoryComponent](nbVars: Int, val theory: T)(implicit val context: Context) { import theory.{Solver => TheorySolver, Literal} implicit val ev = theory.literalClassTag private val logger = context.logger private[this] implicit val tag = new Logger.Tag("DPLL(T)") import DPLLSolver._ /* * Not for the faint of heart. * If you like Scala or functional programming, you may want to skip the rest of this file and * take it as a black box. */ private[this] var nbConflicts = 0 private[this] var nbDecisions = 0 private[this] var nbPropagations = 0 private[this] var nbLearntClauseTotal = 0 private[this] var nbLearntLiteralTotal = 0 private[this] var nbRemovedClauses = 0 private[this] var nbRemovedLiteral = 0 private[this] var nbRestarts = 0 private[this] var nbSolveCalls = 0 private[this] var decisionLevel = 0 private[this] var trail: FixedIntStack = new FixedIntStack(nbVars) //store literals, but only of one unique polarity per literal, so nbVar size is enough //TODO: could it be that we need nbVars + 1 ? private[this] var qHead = 0 private[this] var theoryHead = 0 //reasons contains the clause explaining why bcp propagated a certain propositional variable //it could be null for either of three reasons: (1) not yet assigned (2) decision variable (3) theory propagation private[this] var reasons: Array[Clause] = new Array(nbVars) private[this] var theoryPropagated: Array[Boolean] = new Array(nbVars) private[this] var levels: Array[Int] = Array.fill(nbVars)(-1) //model for each literal id: -1 is unknown, 0 is false, 1 is true private[this] var model: Array[Int] = Array.fill(nbVars)(-1) private[this] var watched: Array[Vector[Clause]] = Array.fill(2*nbVars)(new Vector(20)) private[this] var incrementallyAddedClauses: List[Clause] = Nil private[this] var learntClauses: List[Clause] = Nil /* * seen can be used locally for algorithms to maintain variables that have been seen * They should maintain the invariant that seen is set to false everywhere. * History proved that locally initializing this array where needed was a killer for performance. */ private[this] var seen: Array[Boolean] = Array.fill(nbVars)(false) private[this] var status: Status = Unknown private[this] var restartInterval = Settings.restartInterval private[this] var nextRestart = restartInterval private[this] val restartFactor = Settings.restartFactor private[this] var cnfFormula: CNFFormula = null private[this] var conflict: Clause = null private[this] var assumptions: Array[Int] = null private[this] var literals: Array[Literal] = new Array(2*nbVars) private[this] val conflictAnalysisStopWatch = StopWatch("backtrack.conflictanalysis") private[this] val find1UIPStopWatch = StopWatch("backtrack.conflictanalysis.find1uip") private[this] val clauseMinimizationStopWatch = StopWatch("backtrack.conflictanalysis.clauseminimization") private[this] val explanationStopwatch = StopWatch("explanation") private[this] val setTrueStopwatch = StopWatch("setTrue") private[this] val tBacktrackStopwatch = StopWatch("t-backtrack") var tSolver: theory.Solver = _ //ignore size 1 for watched literal, they are never kept in the db private class Clause(val lits: Array[Int]) { var activity: Double = 0d var locked = false def this(listLits: Set[Literal]) = this(listLits.map(lit => 2*lit.id + lit.polInt).toArray) val size = lits.size override def toString = lits.map(lit => (if(lit % 2 == 0) "" else "-") + (lit >> 1)).mkString("[", ", ", "]") } private def resetSolver(): Unit = { nbConflicts = 0 nbDecisions = 0 nbPropagations = 0 nbRemovedClauses = 0 nbRemovedLiteral = 0 nbRestarts = 0 decisionLevel = 0 trail = new FixedIntStack(nbVars) //store literals, but only one polarity at the same time, so nbVar size is enough qHead = 0 theoryHead = 0 reasons = new Array(nbVars) levels = Array.fill(nbVars)(-1) model = Array.fill(nbVars)(-1) watched = Array.fill(2*nbVars)(new Vector(20)) seen = Array.fill(nbVars)(false) status = Unknown restartInterval = Settings.restartInterval nextRestart = restartInterval literals = new Array(2*nbVars) conflictAnalysisStopWatch.reset() find1UIPStopWatch.reset() clauseMinimizationStopWatch.reset() } private def initClauses(clauses: List[Clause]): Unit = { var newClauses: List[Clause] = Nil clauses.foreach(cl => { val litsUnique = cl.lits.toSet if(litsUnique.size == 1) { val id = litsUnique.head >> 1 if(model(id) == -1) { logger.debug("Simplifying clause of size 1: " + literals(litsUnique.head)) enqueueLiteral(litsUnique.head) } else if(model(id) != (litsUnique.head & 1)) { logger.debug("Detecting conflicting clause of size 1: " + literals(litsUnique.head)) status = Unsatisfiable } } else if(!litsUnique.exists(l1 => litsUnique.count(l2 => (l2 >> 1) == (l1 >> 1)) > 1)) { val newLits = new Clause(litsUnique.toArray) newClauses ::= newLits } }) cnfFormula = new CNFFormula(newClauses, nbVars) for(clause <- newClauses) recordClause(clause) //tSolver = theory.makeSolver(cnfFormula.originalClauses.map(clause => clause.lits.map(literals(_)).toSet).toSet) } def addClause(lits: Set[Literal]) = { incrementallyAddedClauses ::= new Clause(lits) for(lit <- lits) { literals(2*lit.id + 0) = lit.pos.neg literals(2*lit.id + 1) = lit.pos } } def solve(solver: TheorySolver, assumps: Array[Literal] = Array.empty[Literal]): Results.Result = { logger.info("Solving formula") tSolver = solver nbSolveCalls += 1 if(nbSolveCalls > 1) { resetSolver() this.learntClauses :::= cnfFormula.learntClauses // save learnt clauses from previous run } initClauses(this.learntClauses ::: incrementallyAddedClauses) assumptions = assumps.map((lit: Literal) => (lit.id << 1) + lit.polInt ^ 1) // TODO correct literal to int conversion logger.debug("CNF formula: " + cnfFormula.originalClauses.map(clause => clause.lits.map(literals(_)).mkString("[", ", ", "]") ).mkString("{\\n\\t", "\\n\\t", "}")) logger.debug("Assumptions: " + assumptions.map(literals(_)).mkString("[", ",", "]")) logger.trace("Literals array: " + literals.mkString("{\\n\\t", "\\n\\t", "}")) search() } private[this] def search(): Results.Result = { val topLevelStopWatch = StopWatch("toplevelloop") val deduceStopWatch = StopWatch("deduce") val decideStopWatch = StopWatch("decide") val backtrackStopWatch = StopWatch("backtrack") topLevelStopWatch.time { deduceStopWatch.time { deduce() } if(status == Conflict) status = Unsatisfiable val timeout: Option[Int] = Settings.timeout var elapsedTime: Long = 0 //in ms //assertWatchedInvariant //assertTrailInvariant //MAIN LOOP var fileCounter = 0 while(status == Unknown) { val startTime = System.currentTimeMillis //assertWatchedInvariant //assertTrailInvariant decideStopWatch.time { decide() } var cont = true while(cont) { //assertWatchedInvariant //assertTrailInvariant deduceStopWatch.time { deduce() } if(status == Conflict) { logger.info("Conflict detected at level " + decisionLevel) backtrackStopWatch.time { backtrack() } } else { cont = false } } val endTime = System.currentTimeMillis elapsedTime += (endTime - startTime) timeout.foreach(timeout => if(elapsedTime > 1000*timeout) status = Timeout) } } if(Settings.stats) { println("Conflicts: " + nbConflicts) println("Decisions: " + nbDecisions) println("Propagations: " + nbPropagations + " ( " + (nbPropagations/deduceStopWatch.seconds).toInt + " / sec)") println("Restarts: " + nbRestarts) println("Learned Literals: " + nbLearntLiteralTotal + " (" + nbLearntClauseTotal + " clauses) --- " + nbLearntLiteralTotal.toDouble/nbLearntClauseTotal.toDouble + " per clause") println("Removed Literals: " + nbRemovedLiteral + "(" + nbRemovedClauses + " clauses) --- " + nbRemovedLiteral.toDouble/nbRemovedClauses.toDouble + " per clause") println("Active Literals: " + (nbLearntLiteralTotal - nbRemovedLiteral) + "(" + (nbLearntClauseTotal - nbRemovedClauses) + ") --- " + (nbLearntLiteralTotal - nbRemovedLiteral).toDouble/(nbLearntClauseTotal-nbRemovedClauses).toDouble + " per clause") println("Time spend in:\\n") println(" toplevelloop: " + topLevelStopWatch.seconds + " sec") println(" decide: " + decideStopWatch.seconds + " sec") println(" deduce: " + deduceStopWatch.seconds + " sec") println(" backtrack: " + backtrackStopWatch.seconds + " sec") println(" conflictanalysis: " + conflictAnalysisStopWatch.seconds + " sec") println(" clausemin: " + clauseMinimizationStopWatch.seconds + " sec") println(" find1uip: " + find1UIPStopWatch.seconds + " sec") println(" tSolver-setTrue: " + setTrueStopwatch.seconds + " sec") println(" tSolver-explain: " + explanationStopwatch.seconds + " sec") println(" tSolver-backtrack: " + tBacktrackStopwatch.seconds + " sec") } status match { case Unknown | Conflict => sys.error("unexpected") case Timeout => Results.Unknown case Unsatisfiable => Results.Unsatisfiable case Satisfiable => { assert(model.forall(pol => pol == 1 || pol == 0)) assert((cnfFormula.originalClauses ++ cnfFormula.learntClauses).forall(clause => clause.lits.exists(lit => isSat(lit)))) assert(model.zipWithIndex.forall{ case (pol, id) => { val lit = literals(2*id + pol) tSolver.isTrue(lit) }}) logger.info("Model: " + model.zipWithIndex.map{ case (pol, id) => literals(2*id + pol) }.mkString("[\\n\\t", ",\\n\\t", "]") ) Results.Satisfiable(model.map(pol => pol == 1)) } } } private[this] def conflictAnalysis: Clause = { implicit val tag = new Logger.Tag("Conflict Analysis") logger.info("Conflict analysis: " + conflict.lits.map(literals(_)).mkString("[", ", ", "]")) assert(conflict != null) assert(conflict.lits.forall(lit => isUnsat(lit))) assert(conflict.lits.exists(lit => levels(lit >> 1) == decisionLevel)) assert(seen.forall(b => !b)) //the algorithm augment the cut closest to the conflict node successively by doing //a BFS while only searching through the nodes of the current decision level //it stops when only one node of the current decision level (the UIP) remain in the cut var learntClause: List[Int] = Nil var p: Int = -1 //literal var c = 0 var trailIndex = trail.size var confl = conflict conflict = null //find 1-UIP logger.trace("Searching 1UIP...") find1UIPStopWatch.time { do { assert(confl != null) logger.trace("Current conflict reason: " + confl.lits.map(literals(_)).mkString("[", ", ", "]")) if(p != -1) assert(p == (confl.lits(0))) cnfFormula.incVSIDSClause(confl) val lits = confl.lits var i = if(p == -1) 0 else 1 while(i < lits.size) { val id = lits(i) >> 1 val lvl = levels(id) logger.trace("Considering literal [" + literals(lits(i)) + "] at level " + lvl + " with seen: " + seen(id)) if(!seen(id) && lvl > 0) { seen(id) = true if(lvl == decisionLevel) c += 1 else { logger.trace("Adding to learnt clause: " + literals(lits(i))) learntClause ::= lits(i) } } i += 1 } assert(learntClause.forall(lit => levels(lit >> 1) != decisionLevel)) do { trailIndex -= 1 p = trail(trailIndex) } while(!seen(p>>1)) assert(isSat(p)) logger.trace("current UIP: " + literals(p)) confl = reasons(p>>1) c = c - 1 seen(p>>1) = false logger.trace("current counter c: " + c) if(confl == null && theoryPropagated(p>>1)) { //conflict from theory propagation val tLit = literals(p) logger.debug("Computing theory explanation of literal: " + tLit) val expl = tSolver.explanation(tLit) assert(expl.forall(lit => tSolver.isTrue(lit))) assert(expl.forall(lit => { val realLit = literals(lit.id*2 + lit.polInt); realLit.id == lit.id && realLit.polInt == lit.polInt })) assert({ //make sure no cycle val prefixTrail: Array[Int] = trail.stack.takeWhile(_ != p) expl.forall(lit => if(!prefixTrail.contains(2*lit.id + lit.polInt)) { logger.debug(prefixTrail.map(l => "id: " + (l>>1) + ", polInt: " + (l&1) + ", lit: " + literals(l)).mkString("[", "\\n", "]")) logger.error("literal [" + lit + ", id: " + lit.id + ", pol: " + lit.polInt + "] from theory explanation is not in prefix of trail") false } else true) }) assert(expl.forall(lit => isSat(2*lit.id + lit.polInt))) confl = new Clause(p +: expl.map(l => 2*l.id + (1 - l.polInt)).toArray) } if(confl != null) { assert(confl.lits(0) == p) assert(isSat(confl.lits(0))) assert(confl.lits.tail.forall(lit => isUnsat(lit))) } assert(confl != null || c == 0) //if confl is null then we reached a UIP } while(c > 0) } logger.debug("UIP: " + literals(p)) //p is 1-UIP assert(isSat(p)) assert(levels(p>>1) == decisionLevel) assert(learntClause.forall(lit => isUnsat(lit))) var toSetUnseen: List[Int] = learntClause clauseMinimizationStopWatch.time { def getAbstractLevel(id: Int) = 1 << (levels(id) & 31) //clause minimalization var marked: Set[Int] = learntClause.map(_ >> 1).toSet val levelsInClause: Set[Int] = marked.map(levels(_)) //we can optimize the search, if we see a node of a level not in the set, then for sure there will be a decision node of the same level def litRedundant(id: Int, abstractLevel: Int): Boolean = { var stack = List(id) var analyzeToclear: List[Int] = Nil var res = true while(!stack.isEmpty && res) { val reasonClause = reasons(stack.head) stack = stack.tail reasonClause.lits.foreach(l => if((l>>1) != id && res) { val id = l>>1 if(!seen(id) && levels(id) > 0) { if(reasons(id) != null && (getAbstractLevel(id) & abstractLevel) != 0) { seen(id) = true stack ::= id analyzeToclear ::= id toSetUnseen ::= l } else { while(!analyzeToclear.isEmpty) { seen(analyzeToclear.head) = false; analyzeToclear = analyzeToclear.tail } res = false } } }) } res } var absLevel: Int = 0 learntClause.foreach(lit => absLevel |= getAbstractLevel(lit >> 1)) //maintain an abstract level learntClause = learntClause.filterNot(lit => { val reasonClause = reasons(lit >> 1) reasonClause != null && litRedundant(lit >> 1, absLevel) }) } toSetUnseen.foreach(lit => seen(lit>>1) = false) while(trailIndex < trail.size) { seen(trail(trailIndex) >> 1) = false trailIndex += 1 } learntClause ::= (p ^ 1) //don't forget to add p in the clause ! val res = new Clause(learntClause.toArray) logger.info("Learning clause: " + res.lits.map(literals(_)).mkString("[", ",", "]")) res } def litToVar(lit: Int): Int = lit >> 1 def litPolarity(lit: Int): Boolean = (lit & 1) == 0 def isAssigned(lit: Int): Boolean = model(lit >> 1) != -1 def isUnassigned(lit: Int): Boolean = model(lit >> 1) == -1 def isSat(lit: Int): Boolean = (model(lit >> 1) ^ (lit & 1)) == 0 def isUnsat(lit: Int): Boolean = (model(lit >> 1) ^ (lit & 1)) == 1 private class CNFFormula(var originalClauses: List[Clause], val nbVar: Int) { require(originalClauses.forall(cl => cl.lits.forall(lit => lit >= 0 && lit < 2*nbVar))) require(originalClauses.forall(cl => cl.lits.size >= 2)) require(originalClauses.forall(cl => cl.lits.forall(lit => cl.lits.count(l2 => (l2 >> 1) == (lit >> 1)) == 1))) private val nbProblemClauses: Int = originalClauses.size var nbClauses: Int = nbProblemClauses var learntClauses: List[Clause] = Nil var nbLearntClauses = 0 private var maxLearnt: Int = nbClauses / 3 private val maxLearntFactor: Double = 1.1 def augmentMaxLearnt() { maxLearnt = (maxLearnt*maxLearntFactor + 1).toInt } /* * The decay mechanism is from MiniSAT, instead of periodically scaling down * each variable, it is equivalent to just augment the value of the increment, since * the scale down will not change any order and only the relative value are important. * We use doubles and we use the upper bound of 1e100 before scaling down everything, to * avoid reaching the limits of floating points. */ private val VSIDS_DECAY: Double = 0.95 private val VSIDS_CLAUSE_DECAY: Double = 0.999 private var vsidsInc: Double = 1d private val vsidsDecay: Double = 1d/VSIDS_DECAY private var vsidsClauseInc: Double = 1d private val vsidsClauseDecay: Double = 1d/VSIDS_CLAUSE_DECAY val vsidsQueue = new FixedIntDoublePriorityQueue(nbVar) initVSIDS() def initVSIDS() { originalClauses.foreach(cl => cl.lits.foreach(lit => { vsidsQueue.incScore(lit >> 1, vsidsInc) })) } def incVSIDS(id: Int) { val newScore = vsidsQueue.incScore(id, vsidsInc) if(newScore > 1e100) rescaleVSIDS() } def rescaleVSIDS() { vsidsQueue.rescaleScores(1e-100) vsidsInc *= 1e-100 } def decayVSIDS() { vsidsInc *= vsidsDecay } def incVSIDSClause(cl: Clause) { cl.activity = cl.activity + vsidsClauseInc if(cl.activity > 1e100) rescaleVSIDSClause() } def rescaleVSIDSClause() { for(cl <- learntClauses) cl.activity = cl.activity*1e-100 vsidsClauseInc *= 1e-100 } def decayVSIDSClause() { vsidsClauseInc *= vsidsClauseDecay } def learn(clause: Clause) { assert(clause.size > 1) learntClauses ::= clause nbClauses += 1 nbLearntClauses += 1 nbLearntClauseTotal += 1 nbLearntLiteralTotal += clause.lits.size for(lit <- clause.lits) incVSIDS(lit >> 1) incVSIDSClause(clause) recordClause(clause) if(nbLearntClauses > maxLearnt) reduceLearntClauses() } def reduceLearntClauses() { val sortedLearntClauses = learntClauses.sortWith((cl1, cl2) => cl1.activity < cl2.activity) val (forgotenClauses, keptClauses) = sortedLearntClauses.splitAt(maxLearnt/2) learntClauses = keptClauses for(cl <- forgotenClauses) { if(!cl.locked) { unrecordClause(cl) nbClauses -= 1 nbLearntClauses -= 1 nbRemovedClauses += 1 for(lit <- cl.lits) nbRemovedLiteral += 1 } else { learntClauses ::= cl } } } override def toString: String = (learntClauses ++ originalClauses).mkString("{\\n", "\\n", "\\n}") } private[this] def recordClause(cl: Clause) { watched(cl.lits(0)).append(cl) watched(cl.lits(1)).append(cl) } private[this] def unrecordClause(cl: Clause) { watched(cl.lits(0)).remove(cl) watched(cl.lits(1)).remove(cl) } /* * all enqueued literals are processed by the theory solver as well, * even those returned as t-consequences. This makes the overall invariants * much easier to maintain and consistant. */ private[this] def enqueueLiteral(lit: Int, from: Clause = null) { logger.trace( "Enqueuing literal [" + literals(lit) + "] from clause: " + (if(from == null) "null" else from.lits.map(literals(_)).mkString("[", ", ", "]"))) val id = lit >> 1 val pol = (lit & 1) assert(model(id) == -1) model(id) = pol trail.push(lit) reasons(id) = from if(from != null) { //assert(from.lits.head == lit) //assert(from.lits.tail.forall(lit => isAssigned(lit))) //assert(from.lits.tail.forall(lit => isUnsat(lit))) //assert(from.lits.tail.forall(lit => trail.contains(lit>>1))) from.locked = true } levels(id) = decisionLevel } private[this] def decide() { if(cnfFormula.vsidsQueue.isEmpty) { logger.info("VSIDS queue is empty, model found") status = Satisfiable } else { logger.info("Determining decision literal at level %d".format(decisionLevel+1)) // handle assumptions var next = 0 // TODO next can be both a variable and a literal, which is confusing var foundNext = false while(decisionLevel < assumptions.size && !foundNext) { val p = assumptions(decisionLevel) if(isSat(p)) { // dummy decision level nbDecisions += 1 decisionLevel += 1 } else if(isUnsat(p)) { status = Unsatisfiable return } else { next = p foundNext = true // break } } if(foundNext) { nbDecisions += 1 decisionLevel += 1 enqueueLiteral(next) } // regular decision else { next = cnfFormula.vsidsQueue.deleteMax while(model(next) != -1 && !cnfFormula.vsidsQueue.isEmpty) next = cnfFormula.vsidsQueue.deleteMax if(model(next) == -1) { nbDecisions += 1 decisionLevel += 1 logger.info("Decision literal: " + literals(2*next + (nbDecisions & 1))) enqueueLiteral(2*next + (nbDecisions & 1)) } else { logger.info("no more literal to assign: model found") status = Satisfiable } } } } private[this] def backtrack() { if(decisionLevel == 0) status = Unsatisfiable else { nbConflicts += 1 cnfFormula.decayVSIDS() cnfFormula.decayVSIDSClause() val learntClause = conflictAnalysisStopWatch.time { conflictAnalysis } var lits = learntClause.lits val backtrackLevel = if(lits.size == 1) 0 else { var m = levels(lits(1) >> 1) var i = 2 while(i < lits.size) { val lvl = levels(lits(i) >> 1) if(lvl > m) { val tmp = lits(1) lits(1) = lits(i) lits(i) = tmp m = lvl } i += 1 } m } if(nbConflicts == nextRestart) { logger.info("Restarting after " + nbConflicts + " conflicts") restartInterval = (restartInterval*restartFactor).toInt nextRestart = nbConflicts + restartInterval nbRestarts += 1 backtrackTo(0) if(learntClause.size == 1) //since we do not learn the clause enqueueLiteral(learntClause.lits(0), learntClause) cnfFormula.augmentMaxLearnt() } else { assert(decisionLevel > backtrackLevel) backtrackTo(backtrackLevel) val lit = learntClause.lits(0) //assert(isUnassigned(lit)) //assert(learntClause.lits.tail.forall(isUnsat)) enqueueLiteral(lit, learntClause) //only on non restart //note that if the unitClause is of size 1, there will be an auto-reset to backtrack level 0 so this is correct as well } if(learntClause.size > 1) //we only learn if it is not unit, if it is unit, then it is processed via the unitClauses and its consequences is added at level 0 which is never forgot cnfFormula.learn(learntClause) status = Unknown } } private[this] def backtrackTo(lvl: Int): Unit = { logger.info("Backtracking to level " + lvl) while(decisionLevel > lvl && !trail.isEmpty) { //TODO: move pop inside ite body ? val head = trail.pop() decisionLevel = levels(head >> 1) if(decisionLevel > lvl) undo(head) else trail.push(head) } qHead = trail.size theoryHead = trail.size decisionLevel = lvl } private[this] def undo(lit: Int): Unit = { logger.trace("Undoing literal: " + literals(lit)) assert(isSat(lit)) val id = lit>>1 cnfFormula.vsidsQueue.insert(id) model(id) = -1 levels(id) = -1 val reasonClause = reasons(id) if(reasonClause != null) { reasonClause.locked = false reasons(id) = null } if(trail.size < theoryHead && !theoryPropagated(id)) { logger.debug("Theory backtrack for lit: " + literals(lit)) tSolver.backtrack(1) } theoryPropagated(id) = false } private[this] def deduce(): Unit = { while(qHead < trail.size && status != Conflict) { booleanPropagation() theoryPropagation() } } private[this] def booleanPropagation(): Unit = { while(qHead < trail.size && status != Conflict) { val forcedLit = trail(qHead) logger.trace("Processing BCP enqueued literal: " + literals(forcedLit)) //negatedLit is the literals that are made false and need updating of watchers val negatedLit = forcedLit ^ 1 assert(isAssigned(negatedLit)) qHead += 1 val ws: Vector[Clause] = watched(negatedLit) var i = 0 var j = 0 while(i < ws.size) { val clause = ws(i) val lits = clause.lits logger.trace("Considering clause: " + lits.map(literals(_)).mkString("[", ", ", "]") + " watching literal: " + literals(negatedLit)) i += 1 assert(lits(0) == negatedLit || lits(1) == negatedLit) if(lits(1) != negatedLit) { lits(0) = lits(1) lits(1) = negatedLit } assert(lits(1) == negatedLit) if(isAssigned(lits(0)) && isSat(lits(0))) { ws(j) = clause j += 1 } else { var newWatcherIndex = 2 var found = false while(!found && newWatcherIndex < lits.size) { val l = lits(newWatcherIndex) if(isUnassigned(l) || isSat(l)) found = true else newWatcherIndex += 1 } if(found) { lits(1) = lits(newWatcherIndex) lits(newWatcherIndex) = negatedLit watched(lits(1)).append(clause) } else { ws(j) = clause j += 1 if(isUnassigned(lits(0))) { nbPropagations += 1 logger.debug("Deducing literal: " + literals(lits(0))) enqueueLiteral(lits(0), clause) } else if(isUnsat(lits(0))) { logger.info("Detecting conflict during boolean propagation; unsat literal: " + literals(lits(0))) status = Conflict qHead = trail.size conflict = clause while(i < ws.size) { ws(j) = ws(i) i += 1 j += 1 } } } } } ws.shrink(i - j) } assert(qHead == trail.size) } def theoryPropagation(): Unit = { //theory propagation only propagates up to qHead, so is always behind boolean propagation (TODO: might not be necessary) while(theoryHead < qHead /*trail.size*/ && status != Conflict) { val lit = trail(theoryHead) val tLit = literals(lit) logger.trace("Processing theory head: " + tLit) theoryHead += 1 if(!theoryPropagated(lit >> 1)) { logger.info("Theory setTrue: " + tLit) val res = setTrueStopwatch.time{ tSolver.setTrue(tLit) } res match { case Left(tConsequences) => { tConsequences.foreach(l => { assert(tSolver.isTrue(l)) if(status != Conflict) { logger.debug("Theory propagation: " + l) val lInt = 2*l.id + l.polInt assert(lInt != lit) if(isUnsat(lInt)) { logger.info("Theory propagation detecting conflict with unsat literal: " + l) status = Conflict val trailArray = (for(i <- 0 until trail.size) yield trail(i) ^ 1).toArray conflict = new Clause(trailArray.filter(el => reasons(el>>1) == null && !theoryPropagated(el>>1))) } else if(isSat(lInt)) { logger.trace("Theory propagation deducing an already sat literal: " + l) } else { theoryPropagated(l.id) = true enqueueLiteral(lInt) } } }) } case Right(unsatCore) => { status = Conflict if(reasons(lit>>1) == null) { logger.info("Theory conflict triggered by decision literal " + tLit) val trailArray = (for(i <- 0 until trail.size) yield trail(i) ^ 1).toArray conflict = new Clause(trailArray.filter(el => reasons(el>>1) == null && !theoryPropagated(el>>1))) } else { logger.info("Theory conflict triggered by literal " + tLit) val trailArray = (for(i <- 0 until trail.size) yield trail(i) ^ 1).toArray conflict = new Clause(trailArray.filter(el => reasons(el>>1) == null && !theoryPropagated(el>>1))) } } } } } } //some debugging assertions that can be introduced in the code to check for correctness //assert that there is no unit clauses in the database def assertNoUnits(): Unit = { assert(qHead == trail.size) //we assume that all unit clauses queued have been processed for(clause <- cnfFormula.originalClauses ::: cnfFormula.learntClauses) { if(clause.lits.count(isUnassigned) == 1 && clause.lits.forall(lit => isUnassigned(lit) || isUnsat(lit))) { println("clause " + clause + " should be unit !") assert(false) } } } //assert the invariant of watched literal is correct def assertWatchedInvariant(): Unit = { for(cl <- (cnfFormula.originalClauses ::: cnfFormula.learntClauses)) { if(!watched(cl.lits(0)).contains(cl)) { println("clause " + cl + " is not correctly watched on " + cl.lits(0)) assert(false) } if(!watched(cl.lits(1)).contains(cl)) { println("clause " + cl + " is not correctly watched on " + cl.lits(1)) assert(false) } } for(v <- 0 until cnfFormula.nbVar) { val posLit = 2*v + 0 val ws1 = watched(posLit) for(i <- 0 until ws1.size) { val cl = ws1(i) assert(cl.lits(0) == posLit || cl.lits(1) == posLit) } val negLit = 2*v + 1 val ws2 = watched(negLit) for(i <- 0 until ws2.size) { val cl = ws2(i) assert(cl.lits(0) == negLit || cl.lits(1) == negLit) } } } def assertTrailInvariant(): Unit = { assert(qHead <= trail.size) val seen: Array[Boolean] = Array.fill(cnfFormula.nbVar)(false) // default value of false var lst: List[Int] = Nil var currentLevel = decisionLevel while(!trail.isEmpty) { val head = trail.pop() assert(isSat(head)) if(levels(head>>1) == currentLevel - 1) currentLevel -= 1 else { assert(levels(head>>1) == currentLevel) } lst ::= head if(reasons(head>>1) != null) { assert(isSat(reasons(head>>1).lits.head)) assert(reasons(head>>1).lits.tail.forall(lit => isUnsat(lit))) assert(reasons(head>>1).lits.tail.forall(lit => trail.contains(lit ^ 1) )) assert(reasons(head>>1).lits.forall(lit => !seen(lit >> 1) )) } seen(head>>1) = true } assert(currentLevel == 1 || currentLevel == 0) lst.foreach(i => trail.push(i)) } }
regb/scabolic
src/main/scala/regolic/dpllt/DPLLSolver.scala
Scala
mit
33,435
package im.actor.server.persist.push import scala.concurrent.ExecutionContext import slick.driver.PostgresDriver.api._ import im.actor.server.models class GooglePushCredentialsTable(tag: Tag) extends Table[models.push.GooglePushCredentials](tag, "google_push_credentials") { def authId = column[Long]("auth_id", O.PrimaryKey) def projectId = column[Long]("project_id") def regId = column[String]("reg_id") def * = (authId, projectId, regId) <> (models.push.GooglePushCredentials.tupled, models.push.GooglePushCredentials.unapply) } object GooglePushCredentials { val creds = TableQuery[GooglePushCredentialsTable] def createOrUpdate(authId: Long, projectId: Long, regId: String)(implicit ec: ExecutionContext) = { for { _ ← creds.filterNot(_.authId === authId).filter(c ⇒ c.projectId === projectId && c.regId === regId).delete r ← creds.insertOrUpdate(models.push.GooglePushCredentials(authId, projectId, regId)) } yield r } def createOrUpdate(c: models.push.GooglePushCredentials) = creds.insertOrUpdate(c) def byAuthId(authId: Rep[Long]) = creds.filter(_.authId === authId) val byAuthIdC = Compiled(byAuthId _) def find(authId: Long) = byAuthIdC(authId).result.headOption def delete(authId: Long) = creds.filter(_.authId === authId).delete def deleteByToken(token: String) = creds.filter(_.regId === token).delete }
chenbk85/actor-platform
actor-server/actor-persist/src/main/scala/im/actor/server/persist/push/GooglePushCredentials.scala
Scala
mit
1,399
package service import java.io.File import base.TestBaseDefinition import org.apache.commons.io.FileUtils import org.apache.poi.ss.usermodel.IndexedColors import org.joda.time.LocalDate import org.scalatest.WordSpec import service.sheet._ class SheetServiceSpec extends WordSpec with TestBaseDefinition { "A SheetServiceSpec" should { "create rows" in { import Fraction._ val now = LocalDate.now.toString("dd.MM.yy") val sheetHeader = SheetHeader("AP2\nKohls", "Medieninformatik", now) def col(i: Int) = List( Row(i.toString), Row("DobryninDobryninDobryninDobryninDobrynin"), Row("AlexAlexAlexAlexAlexAlex"), Row("88888888"), Row(now), ) val rowHeader = RowHeader( List( Row("#1") -> Low, Row("Nachname") -> AutoFit, Row("Vorname") -> AutoFit, Row("MatNr.") -> Medium, Row("Datum") -> Medium ), IndexedColors.GREY_25_PERCENT, repeating = true ) val cols = (0 until 100).map(col).toList val footer = SheetFooter("Generiert durch das Praktikumstool (praktikum.gm.fh-koeln.de)", true) val sheet = Sheet("A", sheetHeader, rowHeader, cols, footer) val res = SheetService.createSheet(sheet)(_ => Unit) val f = new File("foo.xls") FileUtils.writeByteArrayToFile(f, res.success.value.toByteArray) } } override protected def afterAll(): Unit = { super.afterAll() FileUtils.forceDelete(new File("foo.xls")) } }
THK-ADV/lwm-reloaded
test/service/SheetServiceSpec.scala
Scala
mit
1,537
package com.boldradius.astrolabe.client.domain import com.boldradius.astrolabe.client.d3.Layout.GraphLink import scala.scalajs.js trait ClusterGraphLink extends GraphLink { var sourceHost: String = js.native var targetHost: String = js.native } trait ClusterGraphRoleLink extends ClusterGraphLink { var index: Int = js.native }
boldradius/cluster-console
js/src/main/scala/com/boldradius/astrolabe/client/domain/ClusterGraphLink.scala
Scala
bsd-3-clause
338
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.plan.util import org.apache.flink.table.catalog.QueryOperationCatalogViewTable import com.google.common.collect.Sets import org.apache.calcite.plan.RelOptUtil import org.apache.calcite.rel.core.{TableFunctionScan, TableScan} import org.apache.calcite.rel.logical._ import org.apache.calcite.rel.{RelNode, RelShuttle, RelShuttleImpl} import org.apache.calcite.rex.{RexNode, RexShuttle, RexSubQuery} import scala.collection.JavaConversions._ class DefaultRelShuttle extends RelShuttle { override def visit(rel: RelNode): RelNode = { var change = false val newInputs = rel.getInputs.map { input => val newInput = input.accept(this) change = change || (input ne newInput) newInput } if (change) { rel.copy(rel.getTraitSet, newInputs) } else { rel } } override def visit(intersect: LogicalIntersect): RelNode = visit(intersect.asInstanceOf[RelNode]) override def visit(union: LogicalUnion): RelNode = visit(union.asInstanceOf[RelNode]) override def visit(aggregate: LogicalAggregate): RelNode = visit(aggregate.asInstanceOf[RelNode]) override def visit(minus: LogicalMinus): RelNode = visit(minus.asInstanceOf[RelNode]) override def visit(sort: LogicalSort): RelNode = visit(sort.asInstanceOf[RelNode]) override def visit(`match`: LogicalMatch): RelNode = visit(`match`.asInstanceOf[RelNode]) override def visit(exchange: LogicalExchange): RelNode = visit(exchange.asInstanceOf[RelNode]) override def visit(scan: TableScan): RelNode = visit(scan.asInstanceOf[RelNode]) override def visit(scan: TableFunctionScan): RelNode = visit(scan.asInstanceOf[RelNode]) override def visit(values: LogicalValues): RelNode = visit(values.asInstanceOf[RelNode]) override def visit(filter: LogicalFilter): RelNode = visit(filter.asInstanceOf[RelNode]) override def visit(project: LogicalProject): RelNode = visit(project.asInstanceOf[RelNode]) override def visit(join: LogicalJoin): RelNode = visit(join.asInstanceOf[RelNode]) override def visit(correlate: LogicalCorrelate): RelNode = visit(correlate.asInstanceOf[RelNode]) } /** * Convert all [[QueryOperationCatalogViewTable]]s (including tables in [[RexSubQuery]]) * to to a relational expression. */ class ExpandTableScanShuttle extends RelShuttleImpl { /** * Override this method to use `replaceInput` method instead of `copy` method * if any children change. This will not change any output of LogicalTableScan * when LogicalTableScan is replaced with RelNode tree in its RelTable. */ override def visitChild(parent: RelNode, i: Int, child: RelNode): RelNode = { stack.push(parent) try { val child2 = child.accept(this) if (child2 ne child) { parent.replaceInput(i, child2) } parent } finally { stack.pop } } override def visit(filter: LogicalFilter): RelNode = { val newCondition = filter.getCondition.accept(new ExpandTableScanInSubQueryShuttle) if (newCondition ne filter.getCondition) { val newFilter = filter.copy(filter.getTraitSet, filter.getInput, newCondition) super.visit(newFilter) } else { super.visit(filter) } } override def visit(project: LogicalProject): RelNode = { val shuttle = new ExpandTableScanInSubQueryShuttle var changed = false val newProjects = project.getProjects.map { project => val newProject = project.accept(shuttle) if (newProject ne project) { changed = true } newProject } if (changed) { val newProject = project.copy( project.getTraitSet, project.getInput, newProjects, project.getRowType) super.visit(newProject) } else { super.visit(project) } } override def visit(join: LogicalJoin): RelNode = { val newCondition = join.getCondition.accept(new ExpandTableScanInSubQueryShuttle) if (newCondition ne join.getCondition) { val newJoin = join.copy( join.getTraitSet, newCondition, join.getLeft, join.getRight, join.getJoinType, join.isSemiJoinDone) super.visit(newJoin) } else { super.visit(join) } } class ExpandTableScanInSubQueryShuttle extends RexShuttle { override def visitSubQuery(subQuery: RexSubQuery): RexNode = { val newRel = subQuery.rel.accept(ExpandTableScanShuttle.this) var changed = false val newOperands = subQuery.getOperands.map { op => val newOp = op.accept(ExpandTableScanInSubQueryShuttle.this) if (op ne newOp) { changed = true } newOp } var newSubQuery = subQuery if (newRel ne newSubQuery.rel) { newSubQuery = newSubQuery.clone(newRel) } if (changed) { newSubQuery = newSubQuery.clone(newSubQuery.getType, newOperands) } newSubQuery } } /** * Converts [[LogicalTableScan]] the result [[RelNode]] tree * by calling [[QueryOperationCatalogViewTable]]#toRel */ override def visit(scan: TableScan): RelNode = { scan match { case tableScan: LogicalTableScan => val viewTable = tableScan.getTable.unwrap(classOf[QueryOperationCatalogViewTable]) if (viewTable != null) { val rel = viewTable.toRel(RelOptUtil.getContext(tableScan.getCluster), tableScan.getTable) rel.accept(this) } else { tableScan } case otherScan => otherScan } } } /** * Rewrite same rel object to different rel objects. * * <p>e.g. * {{{ * Join Join * / \\ / \\ * Filter1 Filter2 => Filter1 Filter2 * \\ / | | * Scan Scan1 Scan2 * }}} * After rewrote, Scan1 and Scan2 are different object but have same digest. */ class SameRelObjectShuttle extends DefaultRelShuttle { private val visitedNodes = Sets.newIdentityHashSet[RelNode]() override def visit(node: RelNode): RelNode = { val visited = !visitedNodes.add(node) var change = false val newInputs = node.getInputs.map { input => val newInput = input.accept(this) change = change || (input ne newInput) newInput } if (change || visited) { node.copy(node.getTraitSet, newInputs) } else { node } } }
shaoxuan-wang/flink
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/util/RelShuttles.scala
Scala
apache-2.0
7,201
package viewModel import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import com.thetestpeople.trt.model._ import com.thetestpeople.trt.utils.DateUtils abstract class AbstractExecutionView(execution: AbstractExecution) { def id: Id[_] def executionTime: TimeDescription = TimeDescription(execution.executionTime) def durationOpt: Option[String] = execution.durationOpt.map(DateUtils.describeDuration) def durationSecondsOpt: Option[Double] = execution.durationOpt.map(_.getMillis / 1000.0) def passFailText: String = if (passed) "Passed" else "Failed" def passFailIcon = TickIcons.icon(execution.passed) def passed = execution.passed def failed = execution.failed def epochMillis = execution.executionTime.getMillis }
thetestpeople/trt
app/viewModel/AbstractExecutionView.scala
Scala
mit
769
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.fuberlin.wiwiss.silk.workspace import modules.linking.LinkingModule import modules.output.OutputModule import modules.source.SourceModule import de.fuberlin.wiwiss.silk.util.Identifier import de.fuberlin.wiwiss.silk.runtime.resource.{ResourceManager, ResourceLoader} import de.fuberlin.wiwiss.silk.workspace.modules.transform.TransformModule trait Project { /** * The name of this project */ val name : Identifier /** * Retrieves the project configuration. */ def config : ProjectConfig /** * Updates the project configuration. */ def config_=(config : ProjectConfig) /** * The source module, which encapsulates all data sources. */ def sourceModule : SourceModule /** * The linking module, which encapsulates all linking tasks. */ def linkingModule : LinkingModule /** * The transform module, which encapsulates all transform tasks. */ def transformModule: TransformModule /** * The output module, which encapsulates all outputs. */ def outputModule : OutputModule /** * For loading and writing resources. */ def resourceManager: ResourceManager }
fusepoolP3/p3-silk
silk-workspace/src/main/scala/de/fuberlin/wiwiss/silk/workspace/Project.scala
Scala
apache-2.0
1,714
package scalan.compilation.language import java.io.File import java.lang.reflect.Method import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.reflect.ClassTag import scalan.util.ReflectionUtil trait Adjustment { def apply[A](x: A) = Adjusted(x, Some(this)) } /** Converts a function to an Ordering object */ object MkOrdering extends Adjustment /** Converts a function A => B to (&A, &B) => void (for C++) */ object VoidInOut extends Adjustment /** Converts a enum to its index */ object EnumIndex extends Adjustment // TODO currently adjustments are simply ignored in MethodCallBridge // maybe adjustments: Seq[Adjustment] instead? case class Adjusted[+A](value: A, adjustment: Option[Adjustment]) { def adjust(adjustment: Adjustment) = copy(adjustment = Some(adjustment)) def map[B](f: A => B) = copy(value = f(value)) } object Adjusted { implicit def apply[A](x: A): Adjusted[A] = Adjusted(x, None) } trait LanguageId // TODO Should this be unified with ScalaMapping/CxxMapping? case object SCALA extends LanguageId case object CXX extends LanguageId class Mapping[LibraryT, TypeT <: TypeRep[MethodT], MethodT <: MethodRep](library: LibraryT, types: Map[String, (TypeT, Map[(String, Option[String]), MethodT])]) { private def supertypes(clazz: Class[_]): Iterable[Class[_]] = { val superclass = clazz.getSuperclass val immediateSupertypes = (if (superclass != null) Iterable(superclass) else Iterable.empty[Class[_]]) ++ clazz.getInterfaces immediateSupertypes ++ immediateSupertypes.flatMap(supertypes) } private def supertypesIncluding(clazz: Class[_]) = Iterable(clazz) ++ supertypes(clazz) def getMethod(method: Method): Option[(LibraryT, TypeT, MethodT)] = { val overloadId = ReflectionUtil.overloadId(method) val methodName = method.getName supertypesIncluding(method.getDeclaringClass). map(clazz => getMethod(clazz.getName, methodName, overloadId)).collectFirst { case Some(x) => x } } def getMethod(className: String, methodName: String, overloadId: Option[String]): Option[(LibraryT, TypeT, MethodT)] = { val typeMapping = types.get(className) typeMapping.flatMap(pair => pair._2.get((methodName, overloadId)).map(methodT => (library, pair._1, methodT))) } // TODO handle type arguments def getType(m: Manifest[_]): Option[(LibraryT, TypeT)] = { supertypesIncluding(m.runtimeClass).map(clazz => getType(clazz.getName)).collectFirst { case Some(x) => x } } def getType(className: String) = types.get(className).map(pair => (library, pair._1)) } trait MappingBuilder[LibraryT, TypeT <: TypeRep[MethodT], MethodT <: MethodRep] { def language: LanguageId type TypeBuilderT <: TypeBuilder[LibraryT, TypeT, MethodT, TypeBuilderT] def moduleName: String def library: LibraryT // If we make this a nested class along with everything else, TypeRep and MethodRep can't be params of MethodCallBridge def types(types: TypeT*)(implicit outer: MethodMappingDSL) = { val typeMap = types.map { sType => val fullClassName = moduleName + "$" + sType.scalanName val methodMap = sType.methods.map { sMethod => ((sMethod.scalanName, sMethod.overloadId), sMethod) }.toMap (fullClassName, (sType, methodMap)) }.toMap val mapping = new Mapping[LibraryT, TypeT, MethodT](library, typeMap) outer.addMapping(language, mapping) } } trait TypeRep[MethodT <: MethodRep] { def scalanName: String def mappedName: String def methods: Seq[MethodT] } trait MethodRep { def scalanName: String def mappedName: String def overloadId: Option[String] def isStatic: Boolean def isInternal: Boolean def receiverIndex: Adjusted[Int] def argOrder: Seq[Adjusted[Int]] } object MappingBuilder { def symbolIndex(originalArgs: Seq[Symbol], symbol: Adjusted[Symbol], name: String) = symbol.map { case 'this => -1 case a => originalArgs.indexOf(a) match { case -1 => throw new NoSuchElementException(s"$a not found in argument list $name(${originalArgs.mkString(", ")})") case index => index } } def symbolOrder(originalArgs: Seq[Symbol], reorderedArgs: Seq[Adjusted[Symbol]], name: String) = { reorderedArgs.map(argSym => symbolIndex(originalArgs, argSym, name)) } } import MappingBuilder.{symbolIndex, symbolOrder} trait TypeBuilder[LibraryT, TypeT, MethodT, TypeBuilderT] { type MethodBuilderT <: MethodBuilder[LibraryT, TypeT, MethodT, MethodBuilderT] def to(mappedName: String): TypeBuilderT def methods(methodBuilders: MethodBuilderT*): TypeT } trait MethodBuilder[LibraryT, TypeT, MethodT, MethodBuilderT] { def to(mappedName: String): MethodBuilderT def apply(): MethodT } object ScalaMapping { def MapModuleScala(moduleName: String): ScalaMappingBuilder = new ScalaMappingBuilder(moduleName, None, Nil) def MapModuleScala[A](implicit tag: ClassTag[A]): ScalaMappingBuilder = MapModuleScala(tag.runtimeClass.getName) case class ScalaLibrary(packageName: Option[String], jars: Seq[File]) // Add SBT dependency support? case class ScalaType(scalanName: String, mappedName: String, methods: Seq[ScalaMethod]) extends TypeRep[ScalaMethod] // TODO constructor arguments case class ScalaMethod(scalanName: String, overloadId: Option[String], mappedName: String, isStatic: Boolean, isInternal: Boolean, receiverIndex: Adjusted[Int], typeArgOrder: Seq[Adjusted[Int]], argOrder: Seq[Adjusted[Int]], implicitArgOrder: Seq[Adjusted[Int]]) extends MethodRep case class ScalaMappingBuilder(moduleName: String, packageName: Option[String], jars: Seq[File]) extends MappingBuilder[ScalaLibrary, ScalaType, ScalaMethod] { def language = SCALA def packageName(name: String): ScalaMappingBuilder = copy(packageName = Some(name)) def jars(files: File*) = copy(jars = this.jars ++ files) def library = ScalaLibrary(packageName, jars) type TypeBuilderT = MapTypeScala } case class MapTypeScala(scalanName: String, fieldSyms: Seq[Symbol], mappedName: String) extends TypeBuilder[ScalaLibrary, ScalaType, ScalaMethod, MapTypeScala] { def to(mappedName: String) = copy(mappedName = mappedName) def methods(methodBuilders: MethodBuilderT*) = ScalaType(scalanName, mappedName, methodBuilders.map(_.apply())) type MethodBuilderT = MapMethodScala } object MapTypeScala { def apply(scalanName: String, fieldSyms: Symbol*): MapTypeScala = new MapTypeScala(scalanName, fieldSyms, scalanName) def apply[A](fieldSyms: Symbol*)(implicit tag: ClassTag[A]): MapTypeScala = apply(tag.runtimeClass.getSimpleName, fieldSyms: _*) } case class MapMethodScala(scalanName: String, overloadId: Option[String], scalanArgs: Seq[Symbol], mappedName: String, isStatic: Boolean, isInternal: Boolean, receiverSym: Adjusted[Symbol], typeArgs: Seq[Adjusted[Symbol]], args: Seq[Adjusted[Symbol]], implicitArgs: Seq[Adjusted[Symbol]]) extends MethodBuilder[ScalaLibrary, ScalaType, ScalaMethod, MapMethodScala] { def to(mappedName: String) = copy(mappedName = mappedName) def internal(mappedName: String) = copy(isInternal = true, mappedName = mappedName, args = Adjusted('this) +: this.args) def onCompanion = copy(isStatic = true) def onArg(receiver: Adjusted[Symbol]) = copy(receiverSym = receiver, args = this.args.filter(_.value != receiver.value)) def typeArgs(typeArgs: Adjusted[Symbol]*): MapMethodScala = copy(typeArgs = typeArgs) def args(args: Adjusted[Symbol]*): MapMethodScala = copy(args = args) def implicitArgs(implicitArgs: Adjusted[Symbol]*): MapMethodScala = copy(implicitArgs = implicitArgs) def apply() = { val receiverIndex = symbolIndex(scalanArgs, receiverSym, scalanName) val typeArgOrder = symbolOrder(scalanArgs, typeArgs, scalanName) val argOrder = symbolOrder(scalanArgs, args, scalanName) val implicitArgOrder = symbolOrder(scalanArgs, implicitArgs, scalanName) ScalaMethod(scalanName, overloadId, mappedName, isStatic, isInternal, receiverIndex, typeArgOrder, argOrder, implicitArgOrder) } } object MapMethodScala { def apply(scalanName: String, argSyms: Symbol*): MapMethodScala = new MapMethodScala(scalanName, None, argSyms, scalanName, false, false, 'this, Nil, argSyms.map(Adjusted(_)), Nil) def apply(scalanName: String, overloadId: String, argSyms: Symbol*): MapMethodScala = new MapMethodScala(scalanName, Some(overloadId), argSyms, scalanName, false, false, 'this, Nil, argSyms.map(Adjusted(_)), Nil) } } object CxxMapping { def MapModuleCxx(moduleName: String): CxxMappingBuilder = new CxxMappingBuilder(moduleName, None, None) def MapModuleCxx[A](implicit tag: ClassTag[A]): CxxMappingBuilder = MapModuleCxx(tag.runtimeClass.getName) // Distinguish <> and "" headers in the future? For now, always use "", since it works for both. case class CxxLibrary(headerName: Option[String], namespace: Option[String]) case class CxxType(scalanName: String, mappedName: String, templateArgOrder: Seq[Adjusted[Int]], methods: Seq[CxxMethod]) extends TypeRep[CxxMethod] case class CxxMethod(scalanName: String, overloadId: Option[String], mappedName: String, isStatic: Boolean, isInternal: Boolean, receiverIndex: Adjusted[Int], templateArgOrder: Seq[Adjusted[Int]], argOrder: Seq[Adjusted[Int]]) extends MethodRep case class CxxMappingBuilder(moduleName: String, headerName: Option[String], namespace: Option[String]) extends MappingBuilder[CxxLibrary, CxxType, CxxMethod] { def language = CXX def withHeader(name: String): CxxMappingBuilder = copy(headerName = Some(name)) def withNamespace(namespace: String): CxxMappingBuilder = copy(namespace = Some(namespace)) def library = CxxLibrary(headerName, namespace) type TypeBuilderT = MapTypeCxx } case class MapTypeCxx(scalanName: String, fieldSyms: Seq[Symbol], mappedName: String, templateArgs: Seq[Adjusted[Symbol]]) extends TypeBuilder[CxxLibrary, CxxType, CxxMethod, MapTypeCxx] { def to(mappedName: String) = copy(mappedName = mappedName) def methods(methodBuilders: MethodBuilderT*) = CxxType(scalanName, mappedName, Nil /* TODO use templateArgs */, methodBuilders.map(_.apply())) type MethodBuilderT = MapMethodCxx } object MapTypeCxx { def apply(scalanName: String, fieldSyms: Symbol*): MapTypeCxx = new MapTypeCxx(scalanName, fieldSyms, scalanName, Nil) def apply[A](fieldSyms: Symbol*)(implicit tag: ClassTag[A]): MapTypeCxx = apply(tag.runtimeClass.getSimpleName, fieldSyms: _*) } case class MapMethodCxx(scalanName: String, overloadId: Option[String], scalanArgs: Seq[Symbol], mappedName: String, isStatic: Boolean, isInternal: Boolean, receiverSym: Adjusted[Symbol], templateArgs: Seq[Adjusted[Symbol]], args: Seq[Adjusted[Symbol]]) extends MethodBuilder[CxxLibrary, CxxType, CxxMethod, MapMethodCxx] { def to(mappedName: String) = copy(mappedName = mappedName) def static = copy(isStatic = true) def internal(mappedName: String) = copy(isInternal = true, mappedName = mappedName, args = Adjusted('this) +: this.args) def templateArgs(templateArgs: Adjusted[Symbol]*): MapMethodCxx = copy(templateArgs = templateArgs) def args(args: Adjusted[Symbol]*): MapMethodCxx = copy(args = args) def apply() = { val receiverIndex = symbolIndex(scalanArgs, receiverSym, scalanName) val templateArgOrder = symbolOrder(scalanArgs, templateArgs, scalanName) val argOrder = symbolOrder(scalanArgs, args, scalanName) CxxMethod(scalanName, overloadId, mappedName, isStatic, isInternal, receiverIndex, templateArgOrder, argOrder) } } object MapMethodCxx { def apply(scalanName: String, argSyms: Symbol*): MapMethodCxx = new MapMethodCxx(scalanName, None, argSyms, scalanName, false, false, 'this, Nil, argSyms.map(Adjusted(_))) def apply(scalanName: String, overloadId: String, argSyms: Symbol*): MapMethodCxx = new MapMethodCxx(scalanName, Some(overloadId), argSyms, scalanName, false, false, 'this, Nil, argSyms.map(Adjusted(_))) } } trait MethodMappingDSL { implicit def mm: MethodMappingDSL = this type AMapping = Mapping[LibraryT, TypeT, MethodT] forSome { type LibraryT type TypeT <: TypeRep[MethodT] type MethodT <: MethodRep } val mappingDSLs: mutable.Map[LanguageId, ArrayBuffer[AMapping]] = mutable.Map.empty def addMapping(language: LanguageId, mapping: AMapping) = mappingDSLs.getOrElseUpdate(language, ArrayBuffer.empty) += mapping }
PCMNN/scalan-ce
core/src/main/scala/scalan/compilation/language/MethodMappingDSL.scala
Scala
apache-2.0
12,546
package spark.broadcast import java.io._ import java.net._ import java.util.{Comparator, PriorityQueue, Random, UUID} import scala.collection.mutable.{Map, Set} import scala.math import spark._ class ChainedBroadcast[T](@transient var value_ : T, isLocal: Boolean) extends Broadcast[T] with Logging with Serializable { def value = value_ ChainedBroadcast.synchronized { ChainedBroadcast.values.put(uuid, value_) } @transient var arrayOfBlocks: Array[BroadcastBlock] = null @transient var totalBytes = -1 @transient var totalBlocks = -1 @transient var hasBlocks = 0 // CHANGED: BlockSize in the Broadcast object is expected to change over time @transient var blockSize = Broadcast.BlockSize @transient var listenPortLock = new Object @transient var guidePortLock = new Object @transient var totalBlocksLock = new Object @transient var hasBlocksLock = new Object @transient var pqOfSources = new PriorityQueue[SourceInfo] @transient var serveMR: ServeMultipleRequests = null @transient var guideMR: GuideMultipleRequests = null @transient var hostAddress = Utils.localIpAddress @transient var listenPort = -1 @transient var guidePort = -1 @transient var hasCopyInHDFS = false @transient var stopBroadcast = false // Must call this after all the variables have been created/initialized if (!isLocal) { sendBroadcast } def sendBroadcast(): Unit = { logInfo("Local host address: " + hostAddress) // Store a persistent copy in HDFS // TODO: Turned OFF for now // val out = new ObjectOutputStream(DfsBroadcast.openFileForWriting(uuid)) // out.writeObject(value_) // out.close() // TODO: Fix this at some point hasCopyInHDFS = true // Create a variableInfo object and store it in valueInfos var variableInfo = Broadcast.blockifyObject(value_) guideMR = new GuideMultipleRequests guideMR.setDaemon(true) guideMR.start() logInfo("GuideMultipleRequests started...") serveMR = new ServeMultipleRequests serveMR.setDaemon(true) serveMR.start() logInfo("ServeMultipleRequests started...") // Prepare the value being broadcasted // TODO: Refactoring and clean-up required here arrayOfBlocks = variableInfo.arrayOfBlocks totalBytes = variableInfo.totalBytes totalBlocks = variableInfo.totalBlocks hasBlocks = variableInfo.totalBlocks while (listenPort == -1) { listenPortLock.synchronized { listenPortLock.wait } } pqOfSources = new PriorityQueue[SourceInfo] val masterSource = SourceInfo(hostAddress, listenPort, totalBlocks, totalBytes, blockSize) pqOfSources.add(masterSource) // Register with the Tracker while (guidePort == -1) { guidePortLock.synchronized { guidePortLock.wait } } ChainedBroadcast.registerValue(uuid, guidePort) } private def readObject(in: ObjectInputStream): Unit = { in.defaultReadObject ChainedBroadcast.synchronized { val cachedVal = ChainedBroadcast.values.get(uuid) if (cachedVal != null) { value_ = cachedVal.asInstanceOf[T] } else { // Initializing everything because Master will only send null/0 values initializeSlaveVariables logInfo("Local host address: " + hostAddress) serveMR = new ServeMultipleRequests serveMR.setDaemon(true) serveMR.start() logInfo("ServeMultipleRequests started...") val start = System.nanoTime val receptionSucceeded = receiveBroadcast(uuid) // If does not succeed, then get from HDFS copy if (receptionSucceeded) { value_ = Broadcast.unBlockifyObject[T](arrayOfBlocks, totalBytes, totalBlocks) ChainedBroadcast.values.put(uuid, value_) } else { val fileIn = new ObjectInputStream(DfsBroadcast.openFileForReading(uuid)) value_ = fileIn.readObject.asInstanceOf[T] ChainedBroadcast.values.put(uuid, value_) fileIn.close() } val time =(System.nanoTime - start) / 1e9 logInfo("Reading Broadcasted variable " + uuid + " took " + time + " s") } } } private def initializeSlaveVariables: Unit = { arrayOfBlocks = null totalBytes = -1 totalBlocks = -1 hasBlocks = 0 blockSize = -1 listenPortLock = new Object totalBlocksLock = new Object hasBlocksLock = new Object serveMR = null hostAddress = Utils.localIpAddress listenPort = -1 stopBroadcast = false } def getMasterListenPort(variableUUID: UUID): Int = { var clientSocketToTracker: Socket = null var oosTracker: ObjectOutputStream = null var oisTracker: ObjectInputStream = null var masterListenPort: Int = SourceInfo.TxOverGoToHDFS var retriesLeft = Broadcast.MaxRetryCount do { try { // Connect to the tracker to find out the guide clientSocketToTracker = new Socket(Broadcast.MasterHostAddress, Broadcast.MasterTrackerPort) oosTracker = new ObjectOutputStream(clientSocketToTracker.getOutputStream) oosTracker.flush() oisTracker = new ObjectInputStream(clientSocketToTracker.getInputStream) // Send UUID and receive masterListenPort oosTracker.writeObject(uuid) oosTracker.flush() masterListenPort = oisTracker.readObject.asInstanceOf[Int] } catch { case e: Exception => { logInfo("getMasterListenPort had a " + e) } } finally { if (oisTracker != null) { oisTracker.close() } if (oosTracker != null) { oosTracker.close() } if (clientSocketToTracker != null) { clientSocketToTracker.close() } } retriesLeft -= 1 Thread.sleep(ChainedBroadcast.ranGen.nextInt( Broadcast.MaxKnockInterval - Broadcast.MinKnockInterval) + Broadcast.MinKnockInterval) } while (retriesLeft > 0 && masterListenPort == SourceInfo.TxNotStartedRetry) logInfo("Got this guidePort from Tracker: " + masterListenPort) return masterListenPort } def receiveBroadcast(variableUUID: UUID): Boolean = { val masterListenPort = getMasterListenPort(variableUUID) if (masterListenPort == SourceInfo.TxOverGoToHDFS || masterListenPort == SourceInfo.TxNotStartedRetry) { // TODO: SourceInfo.TxNotStartedRetry is not really in use because we go // to HDFS anyway when receiveBroadcast returns false return false } // Wait until hostAddress and listenPort are created by the // ServeMultipleRequests thread while (listenPort == -1) { listenPortLock.synchronized { listenPortLock.wait } } var clientSocketToMaster: Socket = null var oosMaster: ObjectOutputStream = null var oisMaster: ObjectInputStream = null // Connect and receive broadcast from the specified source, retrying the // specified number of times in case of failures var retriesLeft = Broadcast.MaxRetryCount do { // Connect to Master and send this worker's Information clientSocketToMaster = new Socket(Broadcast.MasterHostAddress, masterListenPort) // TODO: Guiding object connection is reusable oosMaster = new ObjectOutputStream(clientSocketToMaster.getOutputStream) oosMaster.flush() oisMaster = new ObjectInputStream(clientSocketToMaster.getInputStream) logInfo("Connected to Master's guiding object") // Send local source information oosMaster.writeObject(SourceInfo(hostAddress, listenPort)) oosMaster.flush() // Receive source information from Master var sourceInfo = oisMaster.readObject.asInstanceOf[SourceInfo] totalBlocks = sourceInfo.totalBlocks arrayOfBlocks = new Array[BroadcastBlock](totalBlocks) totalBlocksLock.synchronized { totalBlocksLock.notifyAll } totalBytes = sourceInfo.totalBytes logInfo("Received SourceInfo from Master:" + sourceInfo + " My Port: " + listenPort) val start = System.nanoTime val receptionSucceeded = receiveSingleTransmission(sourceInfo) val time =(System.nanoTime - start) / 1e9 // Updating some statistics in sourceInfo. Master will be using them later if (!receptionSucceeded) { sourceInfo.receptionFailed = true } // Send back statistics to the Master oosMaster.writeObject(sourceInfo) if (oisMaster != null) { oisMaster.close() } if (oosMaster != null) { oosMaster.close() } if (clientSocketToMaster != null) { clientSocketToMaster.close() } retriesLeft -= 1 } while (retriesLeft > 0 && hasBlocks < totalBlocks) return(hasBlocks == totalBlocks) } // Tries to receive broadcast from the source and returns Boolean status. // This might be called multiple times to retry a defined number of times. private def receiveSingleTransmission(sourceInfo: SourceInfo): Boolean = { var clientSocketToSource: Socket = null var oosSource: ObjectOutputStream = null var oisSource: ObjectInputStream = null var receptionSucceeded = false try { // Connect to the source to get the object itself clientSocketToSource = new Socket(sourceInfo.hostAddress, sourceInfo.listenPort) oosSource = new ObjectOutputStream(clientSocketToSource.getOutputStream) oosSource.flush() oisSource = new ObjectInputStream(clientSocketToSource.getInputStream) logInfo("Inside receiveSingleTransmission") logInfo("totalBlocks: "+ totalBlocks + " " + "hasBlocks: " + hasBlocks) // Send the range oosSource.writeObject((hasBlocks, totalBlocks)) oosSource.flush() for (i <- hasBlocks until totalBlocks) { val recvStartTime = System.currentTimeMillis val bcBlock = oisSource.readObject.asInstanceOf[BroadcastBlock] val receptionTime =(System.currentTimeMillis - recvStartTime) logInfo("Received block: " + bcBlock.blockID + " from " + sourceInfo + " in " + receptionTime + " millis.") arrayOfBlocks(hasBlocks) = bcBlock hasBlocks += 1 // Set to true if at least one block is received receptionSucceeded = true hasBlocksLock.synchronized { hasBlocksLock.notifyAll } } } catch { case e: Exception => { logInfo("receiveSingleTransmission had a " + e) } } finally { if (oisSource != null) { oisSource.close() } if (oosSource != null) { oosSource.close() } if (clientSocketToSource != null) { clientSocketToSource.close() } } return receptionSucceeded } class GuideMultipleRequests extends Thread with Logging { // Keep track of sources that have completed reception private var setOfCompletedSources = Set[SourceInfo]() override def run: Unit = { var threadPool = Utils.newDaemonCachedThreadPool() var serverSocket: ServerSocket = null serverSocket = new ServerSocket(0) guidePort = serverSocket.getLocalPort logInfo("GuideMultipleRequests => " + serverSocket + " " + guidePort) guidePortLock.synchronized { guidePortLock.notifyAll } try { // Don't stop until there is a copy in HDFS while (!stopBroadcast || !hasCopyInHDFS) { var clientSocket: Socket = null try { serverSocket.setSoTimeout(Broadcast.ServerSocketTimeout) clientSocket = serverSocket.accept } catch { case e: Exception => { logInfo("GuideMultipleRequests Timeout.") // Stop broadcast if at least one worker has connected and // everyone connected so far are done. Comparing with // pqOfSources.size - 1, because it includes the Guide itself if (pqOfSources.size > 1 && setOfCompletedSources.size == pqOfSources.size - 1) { stopBroadcast = true } } } if (clientSocket != null) { logInfo("Guide: Accepted new client connection: " + clientSocket) try { threadPool.execute(new GuideSingleRequest(clientSocket)) } catch { // In failure, close the socket here; else, the thread will close it case ioe: IOException => clientSocket.close() } } } logInfo("Sending stopBroadcast notifications...") sendStopBroadcastNotifications ChainedBroadcast.unregisterValue(uuid) } finally { if (serverSocket != null) { logInfo("GuideMultipleRequests now stopping...") serverSocket.close() } } // Shutdown the thread pool threadPool.shutdown() } private def sendStopBroadcastNotifications: Unit = { pqOfSources.synchronized { var pqIter = pqOfSources.iterator while (pqIter.hasNext) { var sourceInfo = pqIter.next var guideSocketToSource: Socket = null var gosSource: ObjectOutputStream = null var gisSource: ObjectInputStream = null try { // Connect to the source guideSocketToSource = new Socket(sourceInfo.hostAddress, sourceInfo.listenPort) gosSource = new ObjectOutputStream(guideSocketToSource.getOutputStream) gosSource.flush() gisSource = new ObjectInputStream(guideSocketToSource.getInputStream) // Send stopBroadcast signal. Range = SourceInfo.StopBroadcast*2 gosSource.writeObject((SourceInfo.StopBroadcast, SourceInfo.StopBroadcast)) gosSource.flush() } catch { case e: Exception => { logInfo("sendStopBroadcastNotifications had a " + e) } } finally { if (gisSource != null) { gisSource.close() } if (gosSource != null) { gosSource.close() } if (guideSocketToSource != null) { guideSocketToSource.close() } } } } } class GuideSingleRequest(val clientSocket: Socket) extends Thread with Logging { private val oos = new ObjectOutputStream(clientSocket.getOutputStream) oos.flush() private val ois = new ObjectInputStream(clientSocket.getInputStream) private var selectedSourceInfo: SourceInfo = null private var thisWorkerInfo:SourceInfo = null override def run: Unit = { try { logInfo("new GuideSingleRequest is running") // Connecting worker is sending in its hostAddress and listenPort it will // be listening to. Other fields are invalid(SourceInfo.UnusedParam) var sourceInfo = ois.readObject.asInstanceOf[SourceInfo] pqOfSources.synchronized { // Select a suitable source and send it back to the worker selectedSourceInfo = selectSuitableSource(sourceInfo) logInfo("Sending selectedSourceInfo: " + selectedSourceInfo) oos.writeObject(selectedSourceInfo) oos.flush() // Add this new(if it can finish) source to the PQ of sources thisWorkerInfo = SourceInfo(sourceInfo.hostAddress, sourceInfo.listenPort, totalBlocks, totalBytes, blockSize) logInfo("Adding possible new source to pqOfSources: " + thisWorkerInfo) pqOfSources.add(thisWorkerInfo) } // Wait till the whole transfer is done. Then receive and update source // statistics in pqOfSources sourceInfo = ois.readObject.asInstanceOf[SourceInfo] pqOfSources.synchronized { // This should work since SourceInfo is a case class assert(pqOfSources.contains(selectedSourceInfo)) // Remove first pqOfSources.remove(selectedSourceInfo) // TODO: Removing a source based on just one failure notification! // Update sourceInfo and put it back in, IF reception succeeded if (!sourceInfo.receptionFailed) { // Add thisWorkerInfo to sources that have completed reception setOfCompletedSources.synchronized { setOfCompletedSources += thisWorkerInfo } selectedSourceInfo.currentLeechers -= 1 // Put it back pqOfSources.add(selectedSourceInfo) } } } catch { // If something went wrong, e.g., the worker at the other end died etc. // then close everything up case e: Exception => { // Assuming that exception caused due to receiver worker failure. // Remove failed worker from pqOfSources and update leecherCount of // corresponding source worker pqOfSources.synchronized { if (selectedSourceInfo != null) { // Remove first pqOfSources.remove(selectedSourceInfo) // Update leecher count and put it back in selectedSourceInfo.currentLeechers -= 1 pqOfSources.add(selectedSourceInfo) } // Remove thisWorkerInfo if (pqOfSources != null) { pqOfSources.remove(thisWorkerInfo) } } } } finally { ois.close() oos.close() clientSocket.close() } } // FIXME: Caller must have a synchronized block on pqOfSources // FIXME: If a worker fails to get the broadcasted variable from a source and // comes back to Master, this function might choose the worker itself as a // source tp create a dependency cycle(this worker was put into pqOfSources // as a streming source when it first arrived). The length of this cycle can // be arbitrarily long. private def selectSuitableSource(skipSourceInfo: SourceInfo): SourceInfo = { // Select one based on the ordering strategy(e.g., least leechers etc.) // take is a blocking call removing the element from PQ var selectedSource = pqOfSources.poll assert(selectedSource != null) // Update leecher count selectedSource.currentLeechers += 1 // Add it back and then return pqOfSources.add(selectedSource) return selectedSource } } } class ServeMultipleRequests extends Thread with Logging { override def run: Unit = { var threadPool = Utils.newDaemonCachedThreadPool() var serverSocket: ServerSocket = null serverSocket = new ServerSocket(0) listenPort = serverSocket.getLocalPort logInfo("ServeMultipleRequests started with " + serverSocket) listenPortLock.synchronized { listenPortLock.notifyAll } try { while (!stopBroadcast) { var clientSocket: Socket = null try { serverSocket.setSoTimeout(Broadcast.ServerSocketTimeout) clientSocket = serverSocket.accept } catch { case e: Exception => { logInfo("ServeMultipleRequests Timeout.") } } if (clientSocket != null) { logInfo("Serve: Accepted new client connection: " + clientSocket) try { threadPool.execute(new ServeSingleRequest(clientSocket)) } catch { // In failure, close socket here; else, the thread will close it case ioe: IOException => clientSocket.close() } } } } finally { if (serverSocket != null) { logInfo("ServeMultipleRequests now stopping...") serverSocket.close() } } // Shutdown the thread pool threadPool.shutdown() } class ServeSingleRequest(val clientSocket: Socket) extends Thread with Logging { private val oos = new ObjectOutputStream(clientSocket.getOutputStream) oos.flush() private val ois = new ObjectInputStream(clientSocket.getInputStream) private var sendFrom = 0 private var sendUntil = totalBlocks override def run: Unit = { try { logInfo("new ServeSingleRequest is running") // Receive range to send var rangeToSend = ois.readObject.asInstanceOf[(Int, Int)] sendFrom = rangeToSend._1 sendUntil = rangeToSend._2 if (sendFrom == SourceInfo.StopBroadcast && sendUntil == SourceInfo.StopBroadcast) { stopBroadcast = true } else { // Carry on sendObject } } catch { // If something went wrong, e.g., the worker at the other end died etc. // then close everything up case e: Exception => { logInfo("ServeSingleRequest had a " + e) } } finally { logInfo("ServeSingleRequest is closing streams and sockets") ois.close() oos.close() clientSocket.close() } } private def sendObject: Unit = { // Wait till receiving the SourceInfo from Master while (totalBlocks == -1) { totalBlocksLock.synchronized { totalBlocksLock.wait } } for (i <- sendFrom until sendUntil) { while (i == hasBlocks) { hasBlocksLock.synchronized { hasBlocksLock.wait } } try { oos.writeObject(arrayOfBlocks(i)) oos.flush() } catch { case e: Exception => { logInfo("sendObject had a " + e) } } logInfo("Sent block: " + i + " to " + clientSocket) } } } } } class ChainedBroadcastFactory extends BroadcastFactory { def initialize(isMaster: Boolean) = ChainedBroadcast.initialize(isMaster) def newBroadcast[T](value_ : T, isLocal: Boolean) = new ChainedBroadcast[T](value_, isLocal) } private object ChainedBroadcast extends Logging { val values = SparkEnv.get.cache.newKeySpace() var valueToGuidePortMap = Map[UUID, Int]() // Random number generator var ranGen = new Random private var initialized = false private var isMaster_ = false private var trackMV: TrackMultipleValues = null def initialize(isMaster__ : Boolean): Unit = { synchronized { if (!initialized) { isMaster_ = isMaster__ if (isMaster) { trackMV = new TrackMultipleValues trackMV.setDaemon(true) trackMV.start() // TODO: Logging the following line makes the Spark framework ID not // getting logged, cause it calls logInfo before log4j is initialized logInfo("TrackMultipleValues started...") } // Initialize DfsBroadcast to be used for broadcast variable persistence DfsBroadcast.initialize initialized = true } } } def isMaster = isMaster_ def registerValue(uuid: UUID, guidePort: Int): Unit = { valueToGuidePortMap.synchronized { valueToGuidePortMap +=(uuid -> guidePort) logInfo("New value registered with the Tracker " + valueToGuidePortMap) } } def unregisterValue(uuid: UUID): Unit = { valueToGuidePortMap.synchronized { valueToGuidePortMap(uuid) = SourceInfo.TxOverGoToHDFS logInfo("Value unregistered from the Tracker " + valueToGuidePortMap) } } class TrackMultipleValues extends Thread with Logging { override def run: Unit = { var threadPool = Utils.newDaemonCachedThreadPool() var serverSocket: ServerSocket = null serverSocket = new ServerSocket(Broadcast.MasterTrackerPort) logInfo("TrackMultipleValues" + serverSocket) try { while (true) { var clientSocket: Socket = null try { serverSocket.setSoTimeout(Broadcast.TrackerSocketTimeout) clientSocket = serverSocket.accept } catch { case e: Exception => { logInfo("TrackMultipleValues Timeout. Stopping listening...") } } if (clientSocket != null) { try { threadPool.execute(new Thread { override def run: Unit = { val oos = new ObjectOutputStream(clientSocket.getOutputStream) oos.flush() val ois = new ObjectInputStream(clientSocket.getInputStream) try { val uuid = ois.readObject.asInstanceOf[UUID] var guidePort = if (valueToGuidePortMap.contains(uuid)) { valueToGuidePortMap(uuid) } else SourceInfo.TxNotStartedRetry logInfo("TrackMultipleValues: Got new request: " + clientSocket + " for " + uuid + " : " + guidePort) oos.writeObject(guidePort) } catch { case e: Exception => { logInfo("TrackMultipleValues had a " + e) } } finally { ois.close() oos.close() clientSocket.close() } } }) } catch { // In failure, close socket here; else, client thread will close case ioe: IOException => clientSocket.close() } } } } finally { serverSocket.close() } // Shutdown the thread pool threadPool.shutdown() } } }
jperla/spark-advancers
core/src/main/scala/spark/broadcast/ChainedBroadcast.scala
Scala
bsd-3-clause
25,949
/* * Copyright 2006-2011 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb package util import java.lang.reflect.{Method, InvocationTargetException} import java.lang.reflect.Modifier._ import scala.reflect.Manifest import common._ object ClassHelpers extends ClassHelpers with ControlHelpers /** * ClassHelpers provide several functions to instantiate a Class object given the class name and one or more package names */ trait ClassHelpers { self: ControlHelpers => private val nameModifiers = List[String => String](StringHelpers.camelify _, n => n) /** * This operator transforms its arguments into a List * @return the list of arguments passed as varargs */ def ^ [T](i: T*): List[T] = i.toList /** * General method to in find a class according to its name, a list of possible packages, * a list of functions modifying the given name create a target name to look for * (e.g: 'name' is hello_world and the target name may be 'HelloWorld'). * * @parameter name name of the class to find * @parameter where list of package names which may contain the class * @parameter modifiers list of functions that modify the 'name' of the class (e.g., leave it alone, make it camel case, etc.) * @parameter targetType optional expected type which the retrieved class should conform to * * @return a Box, either containing the found class or an Empty can. */ def findClass[C <: AnyRef](name: String, where: List[String], modifiers: List[Function1[String, String]], targetType: Class[C]): Box[Class[C]] = (for ( place <- where.view; mod <- modifiers.view; fullName = place + "." + mod(name); ignore = List(classOf[ClassNotFoundException], classOf[ClassCastException], classOf[NoClassDefFoundError]); klass <- tryo(ignore)(Class.forName(fullName).asSubclass(targetType).asInstanceOf[Class[C]]) ) yield klass).headOption /** * General method to in find a class according to its type, its name, a list of possible * packages and a list of functions modifying the given name create a target name to look for * (e.g: 'name' is hello_world and the target name may be 'HelloWorld'). * * @parameter C type of the class to find * @parameter name name of the class to find * @parameter where list of package names which may contain the class * @parameter modifiers list of functions that modify the 'name' of the class (e.g., leave it alone, make it camel case, etc.) * * @return a Box, either containing the found class or an Empty can. */ def findType[C <: AnyRef](name: String, where: List[String], modifiers: List[String => String])(implicit m: Manifest[C]): Box[Class[C]] = findClass(name, where, modifiers, m.runtimeClass.asInstanceOf[Class[C]]) /** * General method to in find a class according to its name, a list of possible packages and a * list of functions modifying the given name create a target name to look for (e.g: 'name' is * hello_world and the target name may be 'HelloWorld'). * * @parameter name name of the class to find * @parameter where list of package names which may contain the class * @parameter modifiers list of functions that modify the 'name' of the class (e.g., leave it alone, make it camel case, etc.) * * @return a Box, either containing the found class or an Empty can. */ def findClass(name: String, where: List[String], modifiers: List[String => String]): Box[Class[AnyRef]] = findType[AnyRef](name, where, modifiers) /** * Find a class given its name and a list of packages, turning underscored names to * CamelCase if necessary. * * @parameter name name of the class to find * @parameter where list of package names which may contain the class * @parameter targetType optional expected type which the retrieved class should conform to * * @return a Box, either containing the found class or an Empty can. */ def findClass[C <: AnyRef](name: String, where: List[String], targetType: Class[C]): Box[Class[C]] = findClass(name, where, nameModifiers, targetType) /** * Find a class given its type, its name and a list of packages, turning underscored names to * CamelCase if necessary. * * @parameter C type of the class to find * @parameter name name of the class to find * @parameter where list of package names which may contain the class * * @return a Box, either containing the found class or an Empty can. */ def findType[C <: AnyRef](name: String, where: List[String])(implicit m: Manifest[C]): Box[Class[C]] = findType[C](name, where, nameModifiers) /** * Find a class given its name and a list of packages, turning underscored names to CamelCase if * necessary. * * @parameter name name of the class to find * @parameter where list of package names which may contain the class * * @return a Box, either containing the found class or an Empty can. */ def findClass(name: String, where: List[String]): Box[Class[AnyRef]] = findClass(name, where, nameModifiers) /** * Find a class given its type, a list of possible names and corresponding packages, turning * underscored names to CamelCase if necessary * * @parameter C type of the class to find * @parameter where list of pairs (name, package names) which may contain the class * * @return a Box, either containing the found class or an Empty can. */ def findType[C <: AnyRef](where: List[(String, List[String])])(implicit m: Manifest[C]): Box[Class[C]] = (for ( (name, packages) <- where; klass <- findType[C](name, packages) ) yield klass).headOption /** * Find a class given a list of possible names and corresponding packages, turning underscored * names to CamelCase if necessary * * @parameter where list of pairs (name, package names) which may contain the class * * @return a Box, either containing the found class or an Empty can. */ def findClass(where: List[(String, List[String])]): Box[Class[AnyRef]] = findType[AnyRef](where) /** * @return true if the method is public and has no parameters */ def callableMethod_?(meth: Method) = { meth != null && meth.getParameterTypes.length == 0 && isPublic(meth.getModifiers) } /** * Is the clz an instance of (assignable from) any of the classes in the list * * @param clz the class to test * @param toMatch the list of classes to match against * * @return true if clz is assignable from any of the matching classes */ def containsClass[C](clz: Class[C], toMatch: List[Class[_]]): Boolean = if (toMatch eq null) false else toMatch.exists(_.isAssignableFrom(clz)) /** * Check that the method 'name' is callable for class 'clz' * * @param clz the class supposed to own the method * @param name name of the method to test * * @return true if the method exists on the class and is callable */ def classHasControllerMethod(clz: Class[_], name: String): Boolean = { tryo { clz match { case null => false case _ => callableMethod_?(clz.getDeclaredMethod(name)) } } openOr false } /** * Invoke a controller method (parameterless, public) on a class * * @param clz the class owning the method * @param name name of the method to invoke * * @return the result of the method invocation or throws the root exception causing an error */ def invokeControllerMethod(clz: Class[_], meth: String) = { try { clz.getMethod(meth).invoke(clz.newInstance) } catch { case c : InvocationTargetException => { def findRoot(e : Throwable) { if (e.getCause == null || e.getCause == e) throw e else findRoot(e.getCause) } findRoot(c) } } } /** * Invoke the given method for the given class, with no params. * The class is not instanciated if the method is static, otherwise the passed instance is used * * @param clz class whose method should be invoked * @param inst instance of the class who method should be invoked, if the method is not static * @param meth method to invoke * * @return a Box containing the value returned by the method */ def invokeMethod[C](clz: Class[C], inst: AnyRef, meth: String): Box[Any] = invokeMethod(clz, inst, meth, Nil.toArray) /** * Invoke the given method for the given class, with some parameters. * Tries the method name, then the method as a CamelCased name and the method as a camelCased name * The class is not instanciated if the method is static, otherwise the passed instance is used * * @param clz class whose method should be invoked * @param inst instance of the class who method should be invoked, if the method is not static * @param meth method to invoke * @param params parameters to pass to the method * * @return a Box containing the value returned by the method */ def invokeMethod[C](clz: Class[C], inst: AnyRef, meth: String, params: Array[AnyRef]): Box[Any] = { _invokeMethod(clz, inst, meth, params, Empty) or _invokeMethod(clz, inst, StringHelpers.camelify(meth), params, Empty) or _invokeMethod(clz, inst, StringHelpers.camelifyMethod(meth), params, Empty) } /** * Invoke the given method for the given class, with some parameters and their types * Tries the method name, then the method as a CamelCased name and the method as a camelCased name * The class is not instanciated if the method is static, otherwise the passed instance is used * * @param clz class whose method should be invoked * @param inst instance of the class who method should be invoked, if the method is not static * @param meth method to invoke * @param params parameters to pass to the method * @param ptypes list of types of the parameters * * @return a Box containing the value returned by the method */ def invokeMethod[C](clz: Class[C], inst: AnyRef, meth: String, params: Array[AnyRef], ptypes: Array[Class[_]]): Box[Any] = { _invokeMethod(clz, inst, meth, params, Full(ptypes)) or _invokeMethod(clz, inst, StringHelpers.camelify(meth), params, Full(ptypes)) or _invokeMethod(clz, inst, StringHelpers.camelifyMethod(meth), params, Full(ptypes)) } /** * Invoke the given method for the given class, with the given params. * The class is not instanciated if the method is static, otherwise the passed instance is used * * @param clz class whose method should be invoked * @param inst instance of the class who method should be invoked, if the method is not static * @param meth method to invoke * @param params parameters to pass to the method * @param ptypes list of types of the parameters * * @return a Box containing the value returned by the method */ private def _invokeMethod[C](clz: Class[C], inst: AnyRef, meth: String, params: Array[AnyRef], ptypes: Box[Array[Class[_]]]): Box[Any] = { // try to find a method matching the given parameters def possibleMethods: List[Method] = { /* * try to find a method with the same name and the same number of arguments. Doesn't check the types. * The reason is that it's hard to know for the programmer what is the class name of a given object/class, because scala * add some extra $ for ex. */ def alternateMethods: List[Method] = clz.getDeclaredMethods.toList.filter( m => m.getName.equals(meth) && isPublic(m.getModifiers) && m.getParameterTypes.length == params.length) methCacheLock.read { def key = (clz.getName, meth, params.length) if (Props.productionMode && methodCache.contains(key)) { methodCache(key) } else { val ret = try { val classes: Array[Class[_]] = ptypes openOr params.map(_.getClass) List(clz.getMethod(meth, classes : _*)) } catch { case e: NullPointerException => Nil case e: NoSuchMethodException => alternateMethods } if (Props.productionMode) { methCacheLock.upgrade(methodCache(key) = ret) } ret } } } /* def findFirst[T, U](l: List[T], f: T => U, predicate: U => Boolean): Box[U] = { l match { case Nil => Empty case x :: xs => { val result = f(x) if (predicate(result)) Full(result) else findFirst(xs, f, predicate) } } } */ possibleMethods.iterator.filter(m => inst != null || isStatic(m.getModifiers)). map((m: Method) => tryo{m.invoke(inst, params : _*)}). find((x: Box[Any]) => x match { case result@Full(_) => true case Failure(_, Full(c: IllegalAccessException), _) => false case Failure(_, Full(c: IllegalArgumentException), _) => false case Failure(_, Full(c), _) => if (c.getCause != null) throw c.getCause else throw c case _ => false }) match { case Some(result@Full(_)) => result case _ => Failure("invokeMethod " + meth, Empty, Empty) } } private val methCacheLock = new ConcurrentLock private val methodCache: LRU[(String, String, Int), List[Method]] = new LRU(5000) /** * Create a new instance of a class * * @return a Full can with the instance or a Failure if the instance can't be created */ def instantiate[C](clz: Class[C]): Box[C] = tryo { clz.newInstance } /** * Create a function (the 'invoker') which will trigger any public, parameterless method * That function will throw the cause exception if the method can't be invoked * * @param clz class whose method should be invoked * @param on instance whose method must be invoked * * @return Empty if instance is null or Full(invoker) */ def createInvoker[C <: AnyRef](name: String, on: C): Box[() => Box[Any]] = { def controllerMethods(instance: C) = instance.getClass.getDeclaredMethods.filter { m => m.getName == name && isPublic(m.getModifiers) && m.getParameterTypes.isEmpty } on match { case null => Empty case instance => { controllerMethods(instance).toList match { case Nil => Empty case x :: xs => Full(() => { try { Full(x.invoke(instance)) } catch { case e : InvocationTargetException => throw e.getCause } } ) } } } } def classHierarchy(in: Class[_]): List[Class[_]] = { import scala.collection.mutable._ val ret: ListBuffer[Class[_]] = new ListBuffer var c: Class[_] = in ret += c while (c.getSuperclass != null) { val sc: Class[_] = c.getSuperclass ret += sc c = sc } ret.toList } }
lzpfmh/framework-2
core/util/src/main/scala/net/liftweb/util/ClassHelpers.scala
Scala
apache-2.0
15,496
package org.hello.foo import org.hello.bar._ import scala.util.Random import scala.io.Source class Simple(step:Int) { private val _step:Int = step def timeStep: Long = this._step * Bar.showTiming def showStep: Int = this._step } object Foo { private val Limit = 10 def main(args:Array[String]) { val iterations:Int = if (args.nonEmpty) args(0).toInt else 1 (0 until iterations).foreach( x => { val aNew = new Simple(Random.nextInt(this.Limit)) println(aNew.timeStep) } ) println("# parsing file now") val file = getClass.getResourceAsStream("/dataset.txt") println(file) Source.fromInputStream(file).getLines.foreach( x => { val aNew = new Simple(Random.nextInt(this.Limit)) println(aNew.timeStep) } ) } }
sadikovi/sbt-multi-project-example
foo/src/main/scala/Foo.scala
Scala
mit
916
import scala.language.dynamics class Foo extends scala.Dynamic { def applytDynamicNamed(name: String)(args: Any*): String = ??? } object DynamicTest { implicit class Bar(foo: Foo) { def bazApply: Int = ??? } def baz: String = new Foo().bazApply(a = "") // error }
lampepfl/dotty
tests/neg/dynamicDynamicImplicitsTest3.scala
Scala
apache-2.0
279
package com.github.otsoaunloco import java.awt.BorderLayout import javax.swing.JPanel import uk.co.caprica.vlcj.component.EmbeddedMediaPlayerComponent import uk.co.caprica.vlcj.discovery.NativeDiscovery import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.swing._ import scala.util.{Failure, Success, Try} class Gui extends MainFrame { //finds vlc location new NativeDiscovery().discover() maximize() title = "WolfPlayer" preferredSize = new Dimension(600, 600) val defaultNumberOfResults = 25 private val playListDirectory = System.getProperty("user.home") + "/.WolfPlayer" private def restrictHeight(s: Component) = s.maximumSize = new Dimension(Short.MaxValue, s.preferredSize.height) private def restrictWidth(s: Component, adjust: Int = 0) = s.maximumSize = new Dimension(s.preferredSize.width + adjust, Short.MaxValue) private def restrictHeightWidth(s: Component) = s.maximumSize = new Dimension(s.preferredSize.width, s.preferredSize.height) def searchService(): Future[Unit] = Future { val n = Try(resultsNumber.text.toInt) match { case Success(i) => i case Failure(_) => defaultNumberOfResults } resultsView.listData = if (youtube.selected) new YT listVideos(searchField.text, n) else new CDA listVideos(searchField.text, n) } val searchField = new TextField(columns = 15) { listenTo(keys) reactions += { case event.KeyPressed(_, event.Key.Enter, _, _) => searchService() } } restrictHeight(searchField) private def playSong(song: VideoDetails) = { println(song.url) mediaSlider.max = song.duration title = "WolfPlayer -- " + song.title player.getMediaPlayer.playMedia(song.url) endLabel.text = song.getParsedDuration } private def playSelected(ob: ListView[VideoDetails]) = { val song = ob.selection.items.head playSong(song) } private def playPlaylist(pl: ListView[VideoDetails]) = { def aux(songs: Seq[VideoDetails]): Unit = Future { if (songs.nonEmpty) { println("playing: " + songs.head) playSong(songs.head) Thread.sleep(5000) //wait for the song to be started while (player.getMediaPlayer.getPosition < 0.99) { Thread.sleep(1000) //wait for the song to end } aux(songs.tail) } else println("playlist empty") } val start = pl.selection.anchorIndex val list = pl.listData.drop(start - 1) aux(list) } //displays the results of last search val resultsView = new ListView[VideoDetails]() { listenTo(mouse.clicks) reactions += { case m: event.MouseClicked if m.clicks == 2 => playSelected(this) } } val resultsListPane = new ScrollPane(resultsView) restrictWidth(resultsListPane) //playlist val playlistView = new ListView[VideoDetails]() { listenTo(mouse.clicks) reactions += { case m: event.MouseClicked if m.clicks == 2 => playPlaylist(this) } } val playlistViewPane = new ScrollPane(playlistView) val playListNameLabel = new Label("DefaultName") restrictWidth(playlistViewPane) val pl = new Playlist(playListDirectory) def newPlaylist(): Unit = { val input = Dialog.showInput(contents.head, "playlist name: ", initial = "default") input match { case Some(name) => pl.createPlaylist(name) playListNameLabel.text = name case None => newPlaylist() } playlistView.listData = Vector() } private def loadPlaylist() = { val playlists = pl.listPlaylists println("playlists: " + playlists) if (playlists.nonEmpty) { val selected = Dialog.showInput(contents.head, "found playlists", initial = playlists.head, entries = playlists).get println(selected) playlistView.listData = pl playlistContents selected playListNameLabel.text = selected } else { Dialog.showMessage(contents.head, "no playlists found", "error") } } private def savePlaylist(name: String, tracks: Seq[VideoDetails]) = pl savePlaylist(name, tracks) private def removePlaylist(name: String) = pl removePlaylist name private def removeSelected(ob: ListView[VideoDetails]) = { val selected = ob.selection.items val current = ob.listData ob.listData = current diff selected } //number of results val resultsNumber = new TextField(columns = 2) resultsNumber.text = defaultNumberOfResults.toString restrictHeightWidth(resultsNumber) //VLCJ val player = new EmbeddedMediaPlayerComponent() player.getMediaPlayer.setPlaySubItems(true) player.getMediaPlayer.setVolume(50) val jElement = new JPanel jElement.setLayout(new BorderLayout) jElement.add(player, BorderLayout.CENTER) private def maximizePlayer() = { if (searchPane.visible) { searchPane.visible = false playListPane.visible = false } else { searchPane.visible = true playListPane.visible = true } } object volumeSlider extends Slider { min = 0 max = 200 paintTicks = true listenTo(this) reactions += { case event.ValueChanged(`volumeSlider`) => player.getMediaPlayer.setVolume(volumeSlider.value) } } restrictWidth(volumeSlider) object mediaSlider extends Slider { min = 0 value = 0 listenTo(mouse.clicks) reactions += { case e: event.MouseClicked => println(e.point) player.getMediaPlayer.pause() player.getMediaPlayer.setPosition(e.point.x.toFloat / 1000) player.getMediaPlayer.pause() } } def trackPlayerProgress() = Future { println("running trackplayer") while (true) { mediaSlider.value = (player.getMediaPlayer.getPosition * 100).toInt Thread.sleep(1000) } println("tracking progress ended") } trackPlayerProgress() val startLabel = new Label("0:00") val endLabel = new Label val playerPane = new BoxPanel(Orientation.Vertical) { contents += Component.wrap(jElement) contents += new BoxPanel(Orientation.Horizontal) { contents += startLabel contents += mediaSlider contents += endLabel } contents += new BoxPanel(Orientation.Horizontal) { contents += Button("skip(-)") { player.getMediaPlayer.skip(-1000) } contents += Button("play/pause") { player.getMediaPlayer.pause() } contents += Button("stop") { player.getMediaPlayer.stop() } contents += Button("skip(+)") { player.getMediaPlayer.skip(1000) } contents += Button("maximize") { maximizePlayer() } contents += Swing.VStrut(12) contents += new Label("Volume: ") contents += volumeSlider } } //select service val youtube = new RadioButton("Youtube") val cda = new RadioButton("CDA.pl") youtube.selected = true val serviceGroup = new ButtonGroup(youtube, cda) //left side of the ui, with search and results display functionality val searchPane = new BoxPanel(Orientation.Vertical) { contents += new BoxPanel(Orientation.Horizontal) { contents += youtube contents += cda contents += Swing.VStrut(8) contents += new Label("Results:") contents += resultsNumber } contents += new BoxPanel(Orientation.Horizontal) { contents += searchField contents += Button("search") { searchService() } } contents += resultsListPane contents += new BoxPanel(Orientation.Horizontal) { contents += Button("add to playlist") { val items = resultsView.selection.items playlistView.listData = playlistView.listData ++ items } contents += Button("(Un)hide Playlist") { if (playListPane.visible) playListPane.visible = false else playListPane.visible = true } } } restrictWidth(searchPane) val playListPane = new BoxPanel(Orientation.Vertical) { contents += playListNameLabel contents += new BoxPanel(Orientation.Horizontal) { contents += Button("New...") { newPlaylist() } contents += Button("Load") { loadPlaylist() } contents += Button("Save") { savePlaylist(playListNameLabel.text, playlistView.listData) } contents += Button("Remove") { removePlaylist(playListNameLabel.text) playListNameLabel.text = "default" } } contents += playlistViewPane contents += Button("Remove selected") { removeSelected(playlistView) } } //gui contents = new BoxPanel(Orientation.Horizontal) { contents += searchPane contents += playListPane contents += Swing.VStrut(2) contents += playerPane border = Swing.EmptyBorder(5, 5, 5, 5) } }
otsoaUnLoco/WolfPlayer
src/main/scala/Gui.scala
Scala
gpl-3.0
8,779
package dotty.tools.languageserver.util.actions import dotty.tools.languageserver.util.embedded.CodeMarker import dotty.tools.languageserver.util.{CodeRange, PositionContext} import org.junit.Assert.{assertEquals, assertNull, assertTrue} import scala.collection.JavaConverters._ /** * An action requesting for the info shown when `range` is hovered. * This action corresponds to the `textDocument/hover` method of the Language Server Protocol. * * @param range The range of positions that should be hovered. * @param expected None if no response is expected, the expected Markdown string otherwise. */ class CodeHover(override val range: CodeRange, expectedOpt: Option[String]) extends ActionOnRange { override def onMarker(marker: CodeMarker): Exec[Unit] = { val result = server.hover(marker.toTextDocumentPositionParams).get() expectedOpt match { case None => assertNull(result) case Some(expected) => assertNull(result.getRange) val contents = result.getContents.getRight assertEquals(contents.getKind, "markdown") assertEquals(expected, contents.getValue) } } override def show: PositionContext.PosCtx[String] = s"CodeHover(${range.show}, $expectedOpt)" }
lampepfl/dotty
language-server/test/dotty/tools/languageserver/util/actions/CodeHover.scala
Scala
apache-2.0
1,251
package org.biosys.pubmed.models object Authors { def fromXml(doc: xml.Node):Author = { Author( lastname = (doc \\ "LastName").text, initials = (doc \\ "Initials").text, affiliation = (doc \\ "AffiliationInfo" \\ "Affiliation").map(_.text).toList ) } } case class Author(lastname:String, initials:String, affiliation:List[String])
sdor/biosys
pubmed_common/src/main/scala/org/biosys/pubmed/models/Author.scala
Scala
gpl-2.0
359
/* * A real-time collaborative tool to develop files over the network. * Copyright (C) 2010 Mauro Ciancio and Leandro Gilioli * {maurociancio,legilioli} at gmail dot com * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package ar.noxit.paralleleditor.kernel.messages import ar.noxit.paralleleditor.kernel.{DocumentSession, Session} /** * Se aplica a los mensajes que son convertibles hacia mensajes remotos */ sealed trait ToRemote /** * Mensajes entre el actor Kernel y el actor del proxy del Cliente remoto */ case class TerminateKernel() case class LoginRequest(val username: String) case class LoginResponse(val session: Session) { if (session == null) throw new IllegalArgumentException("session cannot be null") } case class UsernameAlreadyExists() case class DocumentTitleExists(val offenderTitle: String) extends ToRemote case class DocumentDeletionTitleNotExists(val offenderTitle: String) extends ToRemote case class DocumentNotExists(val offenderTitle: String) extends ToRemote case class NewDocumentRequest(val session: Session, val title: String, val initialContent: String) case class CloseDocument(val session: Session, val docTitle: String) case class UserListRequest(val session: Session) case class UserListResponse(val usernames: Map[String, List[String]]) extends ToRemote /** * Generado cuando la suscripción a un documento fue exitosa. * Puede ser enviado tanto si es para un nuevo documento como para uno existente */ case class SubscriptionResponse(val docSession: DocumentSession, val initialContent: String) extends ToRemote /** * Generado cuando una sesión ya está suscripta a un mensaje */ case class SubscriptionAlreadyExists(val offenderTitle: String) extends ToRemote /** * Generado cuando se solicita desuscripción a un documento no suscripto */ case class SubscriptionNotExists(val offenderTitle: String) extends ToRemote case class DocumentDeletedOk(val docTitle: String) extends ToRemote case class DocumentInUse(val docTitle: String) extends ToRemote case class DocumentListRequest(val session: Session) case class DocumentListResponse(val documents: List[String]) extends ToRemote case class SubscribeToDocumentRequest(val session: Session, val title: String) case class UnsubscribeToDocumentRequest(val session: Session, val title: String) /** * Generado cuando un usuario se loguea al kernel */ case class NewUserLoggedIn(val username: String) extends ToRemote /** * Generado cuando un usuario se desloguea */ case class UserLoggedOut(val username: String) extends ToRemote /** * Generado cuando un usuario se une a un documento */ case class NewSubscriberToDocument(val username: String, val docTitle: String) extends ToRemote /** * Generado cuando un usuario deja la edición de un documento */ case class SubscriberLeftDocument(val username: String, val docTitle: String) extends ToRemote /** * Generado cuando el usuario se desuscribe de un document */ case class SubscriptionCancelled(val docTitle: String) extends ToRemote /** * Generado cuando un usuario envia un mensaje de chat */ case class ChatMessage(val session: Session, val message: String) extends ToRemote
maurociancio/parallel-editor
src/parallel-editor-kernel/src/main/scala/ar/noxit/paralleleditor/kernel/messages/ClientKernelMessages.scala
Scala
gpl-3.0
3,819
package com.dominikgruber.fpinscala.chapter06 import org.scalatest._ class Exercise05Spec extends FlatSpec with Matchers { "double2" should "generate a double" in { Chapter06.double2(Simple(-10000000))._1 should be (0.3733188882470131) } }
TheDom/functional-programming-in-scala
src/test/scala/com/dominikgruber/fpinscala/chapter06/Exercise05Spec.scala
Scala
mit
250
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openwhisk.core.connector import scala.util.Try import spray.json._ import org.apache.openwhisk.common.TransactionId import org.apache.openwhisk.core.entity._ /** Basic trait for messages that are sent on a message bus connector. */ trait Message { /** * A transaction id to attach to the message. */ val transid = TransactionId.unknown /** * Serializes message to string. Must be idempotent. */ def serialize: String /** * String representation of the message. Delegates to serialize. */ override def toString = serialize } case class ActivationMessage(override val transid: TransactionId, action: FullyQualifiedEntityName, revision: DocRevision, user: Identity, activationId: ActivationId, rootControllerIndex: ControllerInstanceId, blocking: Boolean, content: Option[JsObject], cause: Option[ActivationId] = None, traceContext: Option[Map[String, String]] = None) extends Message { override def serialize = ActivationMessage.serdes.write(this).compactPrint override def toString = { val value = (content getOrElse JsObject.empty).compactPrint s"$action?message=$value" } def causedBySequence: Boolean = cause.isDefined } object ActivationMessage extends DefaultJsonProtocol { def parse(msg: String) = Try(serdes.read(msg.parseJson)) private implicit val fqnSerdes = FullyQualifiedEntityName.serdes implicit val serdes = jsonFormat10(ActivationMessage.apply) } /** * Message that is sent from the invoker to the controller after action is completed or after slot is free again for * new actions. */ abstract class AcknowledegmentMessage(private val tid: TransactionId) extends Message { override val transid: TransactionId = tid override def serialize: String = { AcknowledegmentMessage.serdes.write(this).compactPrint } } /** * This message is sent from the invoker to the controller, after the slot of an invoker that has been used by the * current action, is free again (after log collection) */ case class CompletionMessage(override val transid: TransactionId, activationId: ActivationId, isSystemError: Boolean, invoker: InvokerInstanceId) extends AcknowledegmentMessage(transid) { override def toString = { activationId.asString } } object CompletionMessage extends DefaultJsonProtocol { def parse(msg: String): Try[CompletionMessage] = Try(serdes.read(msg.parseJson)) implicit val serdes = jsonFormat4(CompletionMessage.apply) } /** * That message will be sent from the invoker to the controller after action completion if the user wants to have * the result immediately (blocking activation). * When adding fields, the serdes of the companion object must be updated also. * The whisk activation field will have its logs stripped. */ case class ResultMessage(override val transid: TransactionId, response: Either[ActivationId, WhiskActivation]) extends AcknowledegmentMessage(transid) { override def toString = { response.fold(l => l, r => r.activationId).asString } } object ResultMessage extends DefaultJsonProtocol { implicit def eitherResponse = new JsonFormat[Either[ActivationId, WhiskActivation]] { def write(either: Either[ActivationId, WhiskActivation]) = either match { case Right(a) => a.toJson case Left(b) => b.toJson } def read(value: JsValue) = value match { // per the ActivationId's serializer, it is guaranteed to be a String even if it only consists of digits case _: JsString => Left(value.convertTo[ActivationId]) case _: JsObject => Right(value.convertTo[WhiskActivation]) case _ => deserializationError("could not read ResultMessage") } } def parse(msg: String): Try[ResultMessage] = Try(serdes.read(msg.parseJson)) implicit val serdes = jsonFormat2(ResultMessage.apply) } object AcknowledegmentMessage extends DefaultJsonProtocol { def parse(msg: String): Try[AcknowledegmentMessage] = { Try(serdes.read(msg.parseJson)) } implicit val serdes = new RootJsonFormat[AcknowledegmentMessage] { override def write(obj: AcknowledegmentMessage): JsValue = { obj match { case c: CompletionMessage => c.toJson case r: ResultMessage => r.toJson } } override def read(json: JsValue): AcknowledegmentMessage = { json.asJsObject // The field invoker is only part of the CompletionMessage. If this field is part of the JSON, we try to convert // it to a CompletionMessage. Otherwise to a ResultMessage. // If both conversions fail, an error will be thrown that needs to be handled. .getFields("invoker") .headOption .map(_ => json.convertTo[CompletionMessage]) .getOrElse(json.convertTo[ResultMessage]) } } } case class PingMessage(instance: InvokerInstanceId) extends Message { override def serialize = PingMessage.serdes.write(this).compactPrint } object PingMessage extends DefaultJsonProtocol { def parse(msg: String) = Try(serdes.read(msg.parseJson)) implicit val serdes = jsonFormat(PingMessage.apply _, "name") } trait EventMessageBody extends Message { def typeName: String } object EventMessageBody extends DefaultJsonProtocol { implicit def format = new JsonFormat[EventMessageBody] { def write(eventMessageBody: EventMessageBody) = eventMessageBody match { case m: Metric => m.toJson case a: Activation => a.toJson } def read(value: JsValue) = if (value.asJsObject.fields.contains("metricName")) { value.convertTo[Metric] } else { value.convertTo[Activation] } } } case class Activation(name: String, statusCode: Int, duration: Long, waitTime: Long, initTime: Long, kind: String, conductor: Boolean, memory: Int, causedBy: Option[String]) extends EventMessageBody { val typeName = "Activation" override def serialize = toJson.compactPrint def toJson = Activation.activationFormat.write(this) } object Activation extends DefaultJsonProtocol { def parse(msg: String) = Try(activationFormat.read(msg.parseJson)) implicit val activationFormat = jsonFormat( Activation.apply _, "name", "statusCode", "duration", "waitTime", "initTime", "kind", "conductor", "memory", "causedBy") /** Constructs an "Activation" event from a WhiskActivation */ def from(a: WhiskActivation): Try[Activation] = { for { // There are no sensible defaults for these fields, so they are required. They should always be there but there is // no static analysis to proof that so we're defensive here. fqn <- a.annotations.getAs[String](WhiskActivation.pathAnnotation) kind <- a.annotations.getAs[String](WhiskActivation.kindAnnotation) } yield { Activation( fqn, a.response.statusCode, a.duration.getOrElse(0), a.annotations.getAs[Long](WhiskActivation.waitTimeAnnotation).getOrElse(0), a.annotations.getAs[Long](WhiskActivation.initTimeAnnotation).getOrElse(0), kind, a.annotations.getAs[Boolean](WhiskActivation.conductorAnnotation).getOrElse(false), a.annotations .getAs[ActionLimits](WhiskActivation.limitsAnnotation) .map(_.memory.megabytes) .getOrElse(0), a.annotations.getAs[String](WhiskActivation.causedByAnnotation).toOption) } } } case class Metric(metricName: String, metricValue: Long) extends EventMessageBody { val typeName = "Metric" override def serialize = toJson.compactPrint def toJson = Metric.metricFormat.write(this).asJsObject } object Metric extends DefaultJsonProtocol { def parse(msg: String) = Try(metricFormat.read(msg.parseJson)) implicit val metricFormat = jsonFormat(Metric.apply _, "metricName", "metricValue") } case class EventMessage(source: String, body: EventMessageBody, subject: Subject, namespace: String, userId: UUID, eventType: String, timestamp: Long = System.currentTimeMillis()) extends Message { override def serialize = EventMessage.format.write(this).compactPrint } object EventMessage extends DefaultJsonProtocol { implicit val format = jsonFormat(EventMessage.apply _, "source", "body", "subject", "namespace", "userId", "eventType", "timestamp") def from(a: WhiskActivation, source: String, userId: UUID): Try[EventMessage] = { Activation.from(a).map { body => EventMessage(source, body, a.subject, a.namespace.toString, userId, body.typeName) } } def parse(msg: String) = format.read(msg.parseJson) }
csantanapr/incubator-openwhisk
common/scala/src/main/scala/org/apache/openwhisk/core/connector/Message.scala
Scala
apache-2.0
10,043
package io.straight.ete.config import javax.sql.DataSource import java.io.{InputStream, File} /** * @author rbuckland */ abstract class SourceDataConfig(val dataSetId:String) // -------------- // JDBC type DataSources // -------------- abstract class JdbcSourceData( override val dataSetId:String, val datasource: Option[(String,DataSource)], val sqlStatements: Vector[SqlStatement] ) extends SourceDataConfig(dataSetId) case class JdbcDSPresetSourceData( override val dataSetId:String, override val datasource: Option[(String,DataSource)], override val sqlStatements: Vector[SqlStatement] ) extends JdbcSourceData(dataSetId,datasource,sqlStatements) { override def toString = getClass.getCanonicalName + s"($myName)" lazy val myName = datasource match { case None => "datasource#notset" case Some(ds) => ds._1 } } case class JndiJdbcSourceData( override val dataSetId:String, override val datasource: Option[(String,DataSource)], override val sqlStatements: Vector[SqlStatement], jndiUrl: String ) extends JdbcSourceData(dataSetId,datasource,sqlStatements) { override def toString = getClass.getCanonicalName + "(" + jndiUrl + ")" } case class SimpleJdbcSourceData( override val dataSetId:String, override val datasource: Option[(String,DataSource)], override val sqlStatements: Vector[SqlStatement], jdbcDriver: String, // eg "org.h2.Driver" jdbcUrl: String ) extends JdbcSourceData(dataSetId,datasource,sqlStatements) { override def toString = getClass.getCanonicalName + "(" + jdbcUrl + ")" } case class SimpleUserPassJdbcSourceData( override val dataSetId:String, override val datasource: Option[(String,DataSource)], override val sqlStatements: Vector[SqlStatement], jdbcDriver: String, // eg "org.h2.Driver" jdbcUrl: String, username: String, password: Option[String] ) extends JdbcSourceData(dataSetId,datasource,sqlStatements) { override def toString = getClass.getCanonicalName + "(" + jdbcUrl + ";username=" + username + ")" } case class SqlStatement(sqlString: String, sqlParameters: Vector[AnyRef] = Vector.empty) /** * An XlsSourceData config object. * * @param dataSetId the dataSetId specific for this XLS * @param file the file where we will find the XLS * @param sourceName the name of the Source * @param sheetName * @param rowRestrictor */ case class XlsSourceData( override val dataSetId:String, file: File, // the other form could be an Array[Byte] .. but need to look at POI as it // performs better when reading from File // http://poi.apache.org/spreadsheet/quick-guide.html#FileInputStream sourceName: String, sheetName: Option[String], rowRestrictor: Option[String] ) extends SourceDataConfig(dataSetId) /** * We will accept CSV data as a File, maybe later just a String * * @param dataSetId * @param csvFile * @param sourceName * @param sheetName * @param rowRestrictor */ case class CsvSourceDataConfig( override val dataSetId:String, csvFile: File, sourceName: String, sheetName: Option[String], rowRestrictor: Option[String]) extends SourceDataConfig(dataSetId)
rbuckland/ete
ete-extractor/src/main/scala/io/straight/ete/config/SourceDataConfig.scala
Scala
apache-2.0
4,151
package domala.internal.reflect.util import domala.internal.macros.reflect.ReflectAbortException import domala.jdbc.entity.{EntityCompanion, EntityDesc} import domala.jdbc.holder.{HolderCompanion, HolderDesc} import org.seasar.doma.message.MessageResource import scala.reflect.ClassTag object ReflectionUtil { def getCompanion[T](classTag: ClassTag[T]): Any = { Class .forName(classTag.runtimeClass.getName + "$", false, classTag.runtimeClass.getClassLoader) .getField("MODULE$") .get(null) // import scala.reflect.runtime.{currentMirror => cm} // val classSymbol = cm.classSymbol(classTag.runtimeClass) // val moduleSymbol = classSymbol.companion.asModule // val moduleMirror = cm.reflectModule(moduleSymbol) // moduleMirror.instance } def getEntityDesc[T](implicit classTag: ClassTag[T]): EntityDesc[T] = { getCompanion(classTag).asInstanceOf[EntityCompanion[T]].entityDesc } def getHolderDesc[T](classTag: ClassTag[T]): HolderDesc[Any, T] = { getCompanion(classTag).asInstanceOf[HolderCompanion[Any, T]].holderDesc } def getCompanion[T](clazz: Class[T]): Any = { Class .forName(clazz.getName + "$", false, clazz.getClassLoader) .getField("MODULE$") .get(null) } def getEntityDesc[T](clazz: Class[T]): EntityDesc[T] = { getCompanion(clazz).asInstanceOf[EntityCompanion[T]].entityDesc } def getHolderDesc[T](clazz: Class[T]): HolderDesc[Any, T] = { getCompanion(clazz).asInstanceOf[HolderCompanion[Any, T]].holderDesc } def extractionClassString(str: String): String = { val r = ".*\\[(.*)\\].*".r str match { case r(x) => x case _ => str } } def extractionQuotedString(str: String): String = { val r = """.*"(.*)".*""".r str match { case r(x) => x case _ => str } } def abort(message: MessageResource, args: AnyRef*): Nothing = throw new ReflectAbortException(message, null, args: _*) }
bakenezumi/domala
core/src/main/scala/domala/internal/reflect/util/ReflectionUtil.scala
Scala
apache-2.0
1,964
package org.openurp.edu.eams.teach.election import org.beangle.data.model.Entity import org.beangle.commons.entity.TimeEntity import org.beangle.ems.rule.model.RuleConfig trait ElectPlan extends Entity[Long] with TimeEntity { def getName(): String def setName(name: String): Unit def getDescription(): String def setDescription(description: String): Unit def getRuleConfigs(): Set[RuleConfig] def setRuleConfigs(ruleConfigs: Set[RuleConfig]): Unit }
openurp/edu-eams-webapp
election/src/main/scala/org/openurp/edu/eams/teach/election/ElectPlan.scala
Scala
gpl-3.0
473
/* * Copyright 2014 porter <https://github.com/eikek/porter> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package porter.app.openid.routes import spray.routing.{Directive0, PathMatchers, Directives, Route} import porter.app.client.spray.PorterDirectives import spray.http.{HttpData, MediaType, HttpEntity, ContentType} import porter.app.openid.AvatarActor.{AvatarImageResp, GetAvatarImage} trait AvatarRoute extends Directives with PorterDirectives { self: OpenIdActors => import akka.pattern.ask private val avatarPath = PathMatchers.separateOnSlashes(settings.openIdUrl.toRelative.path.dropChars(1).toString()) / "avatar" private def contentType(s: String) = ContentType(MediaType.custom(s)) private def imageSize = parameter("size".as[Int]) | provide(125) private def respondWithLastModified(time: Option[Long]): Directive0 = time.map(respondWithLastModifiedHeader).getOrElse(pass) def avatarRoute: Route = { (get & path(avatarPath / Segment)) { ident => imageSize { size => val f = (avatarRef ? GetAvatarImage(settings.defaultRealm, ident, size)).mapTo[AvatarImageResp] onSuccess(f) { resp => respondWithLastModified(resp.lastModified) { complete(HttpEntity(contentType(resp.contentType), HttpData(resp.data))) } } } } } }
eikek/porter
openid/src/main/scala/porter/app/openid/routes/AvatarRoute.scala
Scala
apache-2.0
1,850
package org.jetbrains.plugins.scala package codeInsight package intention package argument import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction import com.intellij.openapi.editor.Editor import com.intellij.openapi.project.Project import com.intellij.psi.PsiElement import org.jetbrains.plugins.scala.codeInsight.ScalaCodeInsightBundle /** * Jason Zaugg */ final class AddNameToArgumentIntention extends PsiElementBaseIntentionAction { override def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = addNameToArgumentsFix(element, onlyBoolean = false).isDefined override def invoke(project: Project, editor: Editor, element: PsiElement): Unit = { if (!element.isValid) return addNameToArgumentsFix(element, onlyBoolean = false).foreach(_.apply()) } override def getFamilyName: String = ScalaCodeInsightBundle.message("family.name.use.named.arguments") override def getText: String = ScalaCodeInsightBundle.message("use.named.arguments.for.current.and.subsequent.arguments") }
JetBrains/intellij-scala
scala/codeInsight/src/org/jetbrains/plugins/scala/codeInsight/intention/argument/AddNameToArgumentIntention.scala
Scala
apache-2.0
1,051
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.adaptive import org.apache.spark.SparkConf import org.apache.spark.annotation.Unstable import org.apache.spark.internal.Logging import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.sql.internal.SQLConf import org.apache.spark.util.Utils /** * An interface to represent the cost of a plan. * * @note This class is subject to be changed and/or moved in the near future. */ @Unstable trait Cost extends Ordered[Cost] /** * An interface to evaluate the cost of a physical plan. * * @note This class is subject to be changed and/or moved in the near future. */ @Unstable trait CostEvaluator { def evaluateCost(plan: SparkPlan): Cost } object CostEvaluator extends Logging { /** * Instantiates a [[CostEvaluator]] using the given className. */ def instantiate(className: String, conf: SparkConf): CostEvaluator = { logDebug(s"Creating CostEvaluator $className") val evaluators = Utils.loadExtensions(classOf[CostEvaluator], Seq(className), conf) require(evaluators.nonEmpty, "A valid AQE cost evaluator must be specified by config " + s"${SQLConf.ADAPTIVE_CUSTOM_COST_EVALUATOR_CLASS.key}, but $className resulted in zero " + "valid evaluator.") evaluators.head } }
ueshin/apache-spark
sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/costing.scala
Scala
apache-2.0
2,075
package scala.meta.internal.semanticdb.scalac import scala.{meta => m} trait ParseOps { self: SemanticdbOps => implicit class XtensionCompilationUnitSource(unit: g.CompilationUnit) { def toSource: m.Source = { val dialect = m.Dialect.standards.getOrElse(language, sys.error(s"unsupported dialect $language")) dialect(unit.source.toInput).parse[m.Source].get } } }
olafurpg/scalameta
semanticdb/scalac/library/src/main/scala/scala/meta/internal/semanticdb/scalac/ParseOps.scala
Scala
bsd-3-clause
399
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.streaming.scheduler import java.util.concurrent.{CountDownLatch, TimeUnit} import scala.collection.mutable.HashMap import scala.concurrent.ExecutionContext import scala.util.{Failure, Success} import org.apache.spark._ import org.apache.spark.internal.Logging import org.apache.spark.rdd.RDD import org.apache.spark.rpc._ import org.apache.spark.scheduler.{ExecutorCacheTaskLocation, TaskLocation} import org.apache.spark.streaming.{StreamingContext, Time} import org.apache.spark.streaming.receiver._ import org.apache.spark.streaming.util.WriteAheadLogUtils import org.apache.spark.util.{SerializableConfiguration, ThreadUtils, Utils} /** Enumeration to identify current state of a Receiver */ private[streaming] object ReceiverState extends Enumeration { type ReceiverState = Value val INACTIVE, SCHEDULED, ACTIVE = Value } /** * Messages used by the NetworkReceiver and the ReceiverTracker to communicate * with each other. */ private[streaming] sealed trait ReceiverTrackerMessage private[streaming] case class RegisterReceiver( streamId: Int, typ: String, host: String, executorId: String, receiverEndpoint: RpcEndpointRef ) extends ReceiverTrackerMessage private[streaming] case class AddBlock(receivedBlockInfo: ReceivedBlockInfo) extends ReceiverTrackerMessage private[streaming] case class ReportError(streamId: Int, message: String, error: String) private[streaming] case class DeregisterReceiver(streamId: Int, msg: String, error: String) extends ReceiverTrackerMessage /** * Messages used by the driver and ReceiverTrackerEndpoint to communicate locally. */ private[streaming] sealed trait ReceiverTrackerLocalMessage /** * This message will trigger ReceiverTrackerEndpoint to restart a Spark job for the receiver. */ private[streaming] case class RestartReceiver(receiver: Receiver[_]) extends ReceiverTrackerLocalMessage /** * This message is sent to ReceiverTrackerEndpoint when we start to launch Spark jobs for receivers * at the first time. */ private[streaming] case class StartAllReceivers(receiver: Seq[Receiver[_]]) extends ReceiverTrackerLocalMessage /** * This message will trigger ReceiverTrackerEndpoint to send stop signals to all registered * receivers. */ private[streaming] case object StopAllReceivers extends ReceiverTrackerLocalMessage /** * A message used by ReceiverTracker to ask all receiver's ids still stored in * ReceiverTrackerEndpoint. */ private[streaming] case object AllReceiverIds extends ReceiverTrackerLocalMessage private[streaming] case class UpdateReceiverRateLimit(streamUID: Int, newRate: Long) extends ReceiverTrackerLocalMessage private[streaming] case object GetAllReceiverInfo extends ReceiverTrackerLocalMessage /** * This class manages the execution of the receivers of ReceiverInputDStreams. Instance of * this class must be created after all input streams have been added and StreamingContext.start() * has been called because it needs the final set of input streams at the time of instantiation. * * @param skipReceiverLaunch Do not launch the receiver. This is useful for testing. */ private[streaming] class ReceiverTracker(ssc: StreamingContext, skipReceiverLaunch: Boolean = false) extends Logging { private val receiverInputStreams = ssc.graph.getReceiverInputStreams() private val receiverInputStreamIds = receiverInputStreams.map { _.id } private val receivedBlockTracker = new ReceivedBlockTracker( ssc.sparkContext.conf, ssc.sparkContext.hadoopConfiguration, receiverInputStreamIds, ssc.scheduler.clock, ssc.isCheckpointPresent, Option(ssc.checkpointDir) ) private val listenerBus = ssc.scheduler.listenerBus /** Enumeration to identify current state of the ReceiverTracker */ object TrackerState extends Enumeration { type TrackerState = Value val Initialized, Started, Stopping, Stopped = Value } import TrackerState._ /** State of the tracker. Protected by "trackerStateLock" */ @volatile private var trackerState = Initialized // endpoint is created when generator starts. // This not being null means the tracker has been started and not stopped private var endpoint: RpcEndpointRef = null private val schedulingPolicy = new ReceiverSchedulingPolicy() // Track the active receiver job number. When a receiver job exits ultimately, countDown will // be called. private val receiverJobExitLatch = new CountDownLatch(receiverInputStreams.length) /** * Track all receivers' information. The key is the receiver id, the value is the receiver info. * It's only accessed in ReceiverTrackerEndpoint. */ private val receiverTrackingInfos = new HashMap[Int, ReceiverTrackingInfo] /** * Store all preferred locations for all receivers. We need this information to schedule * receivers. It's only accessed in ReceiverTrackerEndpoint. */ private val receiverPreferredLocations = new HashMap[Int, Option[String]] /** Start the endpoint and receiver execution thread. */ def start(): Unit = synchronized { if (isTrackerStarted) { throw new SparkException("ReceiverTracker already started") } if (!receiverInputStreams.isEmpty) { endpoint = ssc.env.rpcEnv.setupEndpoint( "ReceiverTracker", new ReceiverTrackerEndpoint(ssc.env.rpcEnv)) if (!skipReceiverLaunch) launchReceivers() logInfo("ReceiverTracker started") trackerState = Started } } /** Stop the receiver execution thread. */ def stop(graceful: Boolean): Unit = synchronized { val isStarted: Boolean = isTrackerStarted trackerState = Stopping if (isStarted) { if (!skipReceiverLaunch) { // First, stop the receivers. Send the stop signal to all the receivers endpoint.askSync[Boolean](StopAllReceivers) // Wait for the Spark job that runs the receivers to be over // That is, for the receivers to quit gracefully. receiverJobExitLatch.await(10, TimeUnit.SECONDS) if (graceful) { logInfo("Waiting for receiver job to terminate gracefully") receiverJobExitLatch.await() logInfo("Waited for receiver job to terminate gracefully") } // Check if all the receivers have been deregistered or not val receivers = endpoint.askSync[Seq[Int]](AllReceiverIds) if (receivers.nonEmpty) { logWarning("Not all of the receivers have deregistered, " + receivers) } else { logInfo("All of the receivers have deregistered successfully") } } // Finally, stop the endpoint ssc.env.rpcEnv.stop(endpoint) endpoint = null } // `ReceivedBlockTracker` is open when this instance is created. We should // close this even if this `ReceiverTracker` is not started. receivedBlockTracker.stop() logInfo("ReceiverTracker stopped") trackerState = Stopped } /** Allocate all unallocated blocks to the given batch. */ def allocateBlocksToBatch(batchTime: Time): Unit = { if (receiverInputStreams.nonEmpty) { receivedBlockTracker.allocateBlocksToBatch(batchTime) } } /** Get the blocks for the given batch and all input streams. */ def getBlocksOfBatch(batchTime: Time): Map[Int, Seq[ReceivedBlockInfo]] = { receivedBlockTracker.getBlocksOfBatch(batchTime) } /** Get the blocks allocated to the given batch and stream. */ def getBlocksOfBatchAndStream(batchTime: Time, streamId: Int): Seq[ReceivedBlockInfo] = { receivedBlockTracker.getBlocksOfBatchAndStream(batchTime, streamId) } /** * Clean up the data and metadata of blocks and batches that are strictly * older than the threshold time. Note that this does not */ def cleanupOldBlocksAndBatches(cleanupThreshTime: Time): Unit = { // Clean up old block and batch metadata receivedBlockTracker.cleanupOldBatches(cleanupThreshTime, waitForCompletion = false) // Signal the receivers to delete old block data if (WriteAheadLogUtils.enableReceiverLog(ssc.conf)) { logInfo(s"Cleanup old received batch data: $cleanupThreshTime") synchronized { if (isTrackerStarted) { endpoint.send(CleanupOldBlocks(cleanupThreshTime)) } } } } /** * Get the executors allocated to each receiver. * @return a map containing receiver ids to optional executor ids. */ def allocatedExecutors(): Map[Int, Option[String]] = synchronized { if (isTrackerStarted) { endpoint.askSync[Map[Int, ReceiverTrackingInfo]](GetAllReceiverInfo).mapValues { _.runningExecutor.map { _.executorId } } } else { Map.empty } } def numReceivers(): Int = receiverInputStreams.length /** Register a receiver */ private def registerReceiver( streamId: Int, typ: String, host: String, executorId: String, receiverEndpoint: RpcEndpointRef, senderAddress: RpcAddress ): Boolean = { if (!receiverInputStreamIds.contains(streamId)) { throw new SparkException("Register received for unexpected id " + streamId) } if (isTrackerStopping || isTrackerStopped) { return false } val scheduledLocations = receiverTrackingInfos(streamId).scheduledLocations val acceptableExecutors = if (scheduledLocations.nonEmpty) { // This receiver is registering and it's scheduled by // ReceiverSchedulingPolicy.scheduleReceivers. So use "scheduledLocations" to check it. scheduledLocations.get } else { // This receiver is scheduled by "ReceiverSchedulingPolicy.rescheduleReceiver", so calling // "ReceiverSchedulingPolicy.rescheduleReceiver" again to check it. scheduleReceiver(streamId) } def isAcceptable: Boolean = acceptableExecutors.exists { case loc: ExecutorCacheTaskLocation => loc.executorId == executorId case loc: TaskLocation => loc.host == host } if (!isAcceptable) { // Refuse it since it's scheduled to a wrong executor false } else { val name = s"${typ}-${streamId}" val receiverTrackingInfo = ReceiverTrackingInfo( streamId, ReceiverState.ACTIVE, scheduledLocations = None, runningExecutor = Some(ExecutorCacheTaskLocation(host, executorId)), name = Some(name), endpoint = Some(receiverEndpoint)) receiverTrackingInfos.put(streamId, receiverTrackingInfo) listenerBus.post(StreamingListenerReceiverStarted(receiverTrackingInfo.toReceiverInfo)) logInfo("Registered receiver for stream " + streamId + " from " + senderAddress) true } } /** Deregister a receiver */ private def deregisterReceiver(streamId: Int, message: String, error: String): Unit = { val lastErrorTime = if (error == null || error == "") -1 else ssc.scheduler.clock.getTimeMillis() val errorInfo = ReceiverErrorInfo( lastErrorMessage = message, lastError = error, lastErrorTime = lastErrorTime) val newReceiverTrackingInfo = receiverTrackingInfos.get(streamId) match { case Some(oldInfo) => oldInfo.copy(state = ReceiverState.INACTIVE, errorInfo = Some(errorInfo)) case None => logWarning("No prior receiver info") ReceiverTrackingInfo( streamId, ReceiverState.INACTIVE, None, None, None, None, Some(errorInfo)) } receiverTrackingInfos(streamId) = newReceiverTrackingInfo listenerBus.post(StreamingListenerReceiverStopped(newReceiverTrackingInfo.toReceiverInfo)) val messageWithError = if (error != null && !error.isEmpty) { s"$message - $error" } else { s"$message" } logError(s"Deregistered receiver for stream $streamId: $messageWithError") } /** Update a receiver's maximum ingestion rate */ def sendRateUpdate(streamUID: Int, newRate: Long): Unit = synchronized { if (isTrackerStarted) { endpoint.send(UpdateReceiverRateLimit(streamUID, newRate)) } } /** Add new blocks for the given stream */ private def addBlock(receivedBlockInfo: ReceivedBlockInfo): Boolean = { receivedBlockTracker.addBlock(receivedBlockInfo) } /** Report error sent by a receiver */ private def reportError(streamId: Int, message: String, error: String): Unit = { val newReceiverTrackingInfo = receiverTrackingInfos.get(streamId) match { case Some(oldInfo) => val errorInfo = ReceiverErrorInfo(lastErrorMessage = message, lastError = error, lastErrorTime = oldInfo.errorInfo.map(_.lastErrorTime).getOrElse(-1L)) oldInfo.copy(errorInfo = Some(errorInfo)) case None => logWarning("No prior receiver info") val errorInfo = ReceiverErrorInfo(lastErrorMessage = message, lastError = error, lastErrorTime = ssc.scheduler.clock.getTimeMillis()) ReceiverTrackingInfo( streamId, ReceiverState.INACTIVE, None, None, None, None, Some(errorInfo)) } receiverTrackingInfos(streamId) = newReceiverTrackingInfo listenerBus.post(StreamingListenerReceiverError(newReceiverTrackingInfo.toReceiverInfo)) val messageWithError = if (error != null && !error.isEmpty) { s"$message - $error" } else { s"$message" } logWarning(s"Error reported by receiver for stream $streamId: $messageWithError") } private def scheduleReceiver(receiverId: Int): Seq[TaskLocation] = { val preferredLocation = receiverPreferredLocations.getOrElse(receiverId, None) val scheduledLocations = schedulingPolicy.rescheduleReceiver( receiverId, preferredLocation, receiverTrackingInfos, getExecutors) updateReceiverScheduledExecutors(receiverId, scheduledLocations) scheduledLocations } private def updateReceiverScheduledExecutors( receiverId: Int, scheduledLocations: Seq[TaskLocation]): Unit = { val newReceiverTrackingInfo = receiverTrackingInfos.get(receiverId) match { case Some(oldInfo) => oldInfo.copy(state = ReceiverState.SCHEDULED, scheduledLocations = Some(scheduledLocations)) case None => ReceiverTrackingInfo( receiverId, ReceiverState.SCHEDULED, Some(scheduledLocations), runningExecutor = None) } receiverTrackingInfos.put(receiverId, newReceiverTrackingInfo) } /** Check if any blocks are left to be processed */ def hasUnallocatedBlocks: Boolean = { receivedBlockTracker.hasUnallocatedReceivedBlocks } /** * Get the list of executors excluding driver */ private def getExecutors: Seq[ExecutorCacheTaskLocation] = { if (ssc.sc.isLocal) { val blockManagerId = ssc.sparkContext.env.blockManager.blockManagerId Seq(ExecutorCacheTaskLocation(blockManagerId.host, blockManagerId.executorId)) } else { ssc.sparkContext.env.blockManager.master.getMemoryStatus.filter { case (blockManagerId, _) => blockManagerId.executorId != SparkContext.DRIVER_IDENTIFIER // Ignore the driver location }.map { case (blockManagerId, _) => ExecutorCacheTaskLocation(blockManagerId.host, blockManagerId.executorId) }.toSeq } } /** * Run the dummy Spark job to ensure that all slaves have registered. This avoids all the * receivers to be scheduled on the same node. * * TODO Should poll the executor number and wait for executors according to * "spark.scheduler.minRegisteredResourcesRatio" and * "spark.scheduler.maxRegisteredResourcesWaitingTime" rather than running a dummy job. */ private def runDummySparkJob(): Unit = { if (!ssc.sparkContext.isLocal) { ssc.sparkContext.makeRDD(1 to 50, 50).map(x => (x, 1)).reduceByKey(_ + _, 20).collect() } assert(getExecutors.nonEmpty) } /** * Get the receivers from the ReceiverInputDStreams, distributes them to the * worker nodes as a parallel collection, and runs them. */ private def launchReceivers(): Unit = { val receivers = receiverInputStreams.map { nis => val rcvr = nis.getReceiver() rcvr.setReceiverId(nis.id) rcvr } runDummySparkJob() logInfo("Starting " + receivers.length + " receivers") endpoint.send(StartAllReceivers(receivers)) } /** Check if tracker has been marked for starting */ private def isTrackerStarted: Boolean = trackerState == Started /** Check if tracker has been marked for stopping */ private def isTrackerStopping: Boolean = trackerState == Stopping /** Check if tracker has been marked for stopped */ private def isTrackerStopped: Boolean = trackerState == Stopped /** RpcEndpoint to receive messages from the receivers. */ private class ReceiverTrackerEndpoint(override val rpcEnv: RpcEnv) extends ThreadSafeRpcEndpoint { private val walBatchingThreadPool = ExecutionContext.fromExecutorService( ThreadUtils.newDaemonCachedThreadPool("wal-batching-thread-pool")) @volatile private var active: Boolean = true override def receive: PartialFunction[Any, Unit] = { // Local messages case StartAllReceivers(receivers) => val scheduledLocations = schedulingPolicy.scheduleReceivers(receivers, getExecutors) for (receiver <- receivers) { val executors = scheduledLocations(receiver.streamId) updateReceiverScheduledExecutors(receiver.streamId, executors) receiverPreferredLocations(receiver.streamId) = receiver.preferredLocation startReceiver(receiver, executors) } case RestartReceiver(receiver) => // Old scheduled executors minus the ones that are not active any more val oldScheduledExecutors = getStoredScheduledExecutors(receiver.streamId) val scheduledLocations = if (oldScheduledExecutors.nonEmpty) { // Try global scheduling again oldScheduledExecutors } else { val oldReceiverInfo = receiverTrackingInfos(receiver.streamId) // Clear "scheduledLocations" to indicate we are going to do local scheduling val newReceiverInfo = oldReceiverInfo.copy( state = ReceiverState.INACTIVE, scheduledLocations = None) receiverTrackingInfos(receiver.streamId) = newReceiverInfo schedulingPolicy.rescheduleReceiver( receiver.streamId, receiver.preferredLocation, receiverTrackingInfos, getExecutors) } // Assume there is one receiver restarting at one time, so we don't need to update // receiverTrackingInfos startReceiver(receiver, scheduledLocations) case c: CleanupOldBlocks => receiverTrackingInfos.values.flatMap(_.endpoint).foreach(_.send(c)) case UpdateReceiverRateLimit(streamUID, newRate) => for (info <- receiverTrackingInfos.get(streamUID); eP <- info.endpoint) { eP.send(UpdateRateLimit(newRate)) } // Remote messages case ReportError(streamId, message, error) => reportError(streamId, message, error) } override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = { // Remote messages case RegisterReceiver(streamId, typ, host, executorId, receiverEndpoint) => val successful = registerReceiver(streamId, typ, host, executorId, receiverEndpoint, context.senderAddress) context.reply(successful) case AddBlock(receivedBlockInfo) => if (WriteAheadLogUtils.isBatchingEnabled(ssc.conf, isDriver = true)) { walBatchingThreadPool.execute(() => Utils.tryLogNonFatalError { if (active) { context.reply(addBlock(receivedBlockInfo)) } else { context.sendFailure( new IllegalStateException("ReceiverTracker RpcEndpoint already shut down.")) } }) } else { context.reply(addBlock(receivedBlockInfo)) } case DeregisterReceiver(streamId, message, error) => deregisterReceiver(streamId, message, error) context.reply(true) // Local messages case AllReceiverIds => context.reply(receiverTrackingInfos.filter(_._2.state != ReceiverState.INACTIVE).keys.toSeq) case GetAllReceiverInfo => context.reply(receiverTrackingInfos.toMap) case StopAllReceivers => assert(isTrackerStopping || isTrackerStopped) stopReceivers() context.reply(true) } /** * Return the stored scheduled executors that are still alive. */ private def getStoredScheduledExecutors(receiverId: Int): Seq[TaskLocation] = { if (receiverTrackingInfos.contains(receiverId)) { val scheduledLocations = receiverTrackingInfos(receiverId).scheduledLocations if (scheduledLocations.nonEmpty) { val executors = getExecutors.toSet // Only return the alive executors scheduledLocations.get.filter { case loc: ExecutorCacheTaskLocation => executors(loc) case loc: TaskLocation => true } } else { Nil } } else { Nil } } /** * Start a receiver along with its scheduled executors */ private def startReceiver( receiver: Receiver[_], scheduledLocations: Seq[TaskLocation]): Unit = { def shouldStartReceiver: Boolean = { // It's okay to start when trackerState is Initialized or Started !(isTrackerStopping || isTrackerStopped) } val receiverId = receiver.streamId if (!shouldStartReceiver) { onReceiverJobFinish(receiverId) return } val checkpointDirOption = Option(ssc.checkpointDir) val serializableHadoopConf = new SerializableConfiguration(ssc.sparkContext.hadoopConfiguration) // Function to start the receiver on the worker node val startReceiverFunc: Iterator[Receiver[_]] => Unit = (iterator: Iterator[Receiver[_]]) => { if (!iterator.hasNext) { throw new SparkException( "Could not start receiver as object not found.") } if (TaskContext.get().attemptNumber() == 0) { val receiver = iterator.next() assert(iterator.hasNext == false) val supervisor = new ReceiverSupervisorImpl( receiver, SparkEnv.get, serializableHadoopConf.value, checkpointDirOption) supervisor.start() supervisor.awaitTermination() } else { // It's restarted by TaskScheduler, but we want to reschedule it again. So exit it. } } // Create the RDD using the scheduledLocations to run the receiver in a Spark job val receiverRDD: RDD[Receiver[_]] = if (scheduledLocations.isEmpty) { ssc.sc.makeRDD(Seq(receiver), 1) } else { val preferredLocations = scheduledLocations.map(_.toString).distinct ssc.sc.makeRDD(Seq(receiver -> preferredLocations)) } receiverRDD.setName(s"Receiver $receiverId") ssc.sparkContext.setJobDescription(s"Streaming job running receiver $receiverId") ssc.sparkContext.setCallSite(Option(ssc.getStartSite()).getOrElse(Utils.getCallSite())) val future = ssc.sparkContext.submitJob[Receiver[_], Unit, Unit]( receiverRDD, startReceiverFunc, Seq(0), (_, _) => Unit, ()) // We will keep restarting the receiver job until ReceiverTracker is stopped future.onComplete { case Success(_) => if (!shouldStartReceiver) { onReceiverJobFinish(receiverId) } else { logInfo(s"Restarting Receiver $receiverId") self.send(RestartReceiver(receiver)) } case Failure(e) => if (!shouldStartReceiver) { onReceiverJobFinish(receiverId) } else { logError("Receiver has been stopped. Try to restart it.", e) logInfo(s"Restarting Receiver $receiverId") self.send(RestartReceiver(receiver)) } }(ThreadUtils.sameThread) logInfo(s"Receiver ${receiver.streamId} started") } override def onStop(): Unit = { active = false walBatchingThreadPool.shutdown() } /** * Call when a receiver is terminated. It means we won't restart its Spark job. */ private def onReceiverJobFinish(receiverId: Int): Unit = { receiverJobExitLatch.countDown() receiverTrackingInfos.remove(receiverId).foreach { receiverTrackingInfo => if (receiverTrackingInfo.state == ReceiverState.ACTIVE) { logWarning(s"Receiver $receiverId exited but didn't deregister") } } } /** Send stop signal to the receivers. */ private def stopReceivers(): Unit = { receiverTrackingInfos.values.flatMap(_.endpoint).foreach { _.send(StopReceiver) } logInfo("Sent stop signal to all " + receiverTrackingInfos.size + " receivers") } } }
rezasafi/spark
streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala
Scala
apache-2.0
25,772
package ch.epfl.yinyang package transformers import ch.epfl.yinyang._ import ch.epfl.yinyang.transformers._ import scala.reflect.macros.blackbox.Context import language.experimental.macros import scala.collection.mutable import scala.collection.mutable.ArrayBuffer /** * Converts captured variables to holes, which will be passed to the generated * code at runtime as arguments to the apply method. Exposes all holes in the * holeTable, which maps from holeIds to symbolIds. * * Features covered are: * - identifiers -> `hole[T](classTag[T], holeId)` * - fields (TODO) * - no parameter methods (TODO) * - no parameter functions (TODO) */ trait HoleTransformation extends MacroModule with TransformationUtils { def holeMethod: String import c.universe._ /** SymbolIds indexed by holeIds. */ val holeTable = new ArrayBuffer[Int] object HoleTransformer { def apply(toHoles: List[Symbol] = Nil, className: String)(tree: Tree) = { val t = new HoleTransformer(toHoles map symbolId).transform(tree) log("holeTransformed (transforming " + toHoles + "): " + code(t), 2) log("holeTable (holeId -> symbolId): " + holeTable, 2) t } } /** * Transforms all identifiers with symbolIds in `toHoles` to * `hole[T](classTag[T], holeId)` and builds the holeTable mapping from * holeIds to symbolIds. */ class HoleTransformer(toHoles: List[Int]) extends Transformer { override def transform(tree: Tree): Tree = tree match { case i @ Ident(s) if toHoles contains symbolId(i.symbol) => { val index = { val sId = symbolId(i.symbol) if (holeTable.contains(sId)) holeTable.indexOf(sId) else { holeTable += symbolId(i.symbol) holeTable.size - 1 } } Apply( Select(This(typeNames.EMPTY), TermName(holeMethod)), List( TypeApply( Select(This(typeNames.EMPTY), TermName("runtimeType")), List(TypeTree(i.tpe.widen))), Literal(Constant(index)))) } case _ => super.transform(tree) } } }
vjovanov/scala-yinyang
components/core/src/transformers/HoleTransformation.scala
Scala
bsd-3-clause
2,133
/* * Licensed to Intel Corporation under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Intel Corporation licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dataset import java.io.File import java.nio.file.Paths import java.util.concurrent.{Callable, Executors} import com.intel.analytics.bigdl.dataset.image._ import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.{Engine, RandomGenerator} import org.apache.spark.SparkContext import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} @com.intel.analytics.bigdl.tags.Serial class DataSetSpec extends FlatSpec with Matchers with BeforeAndAfter { var sc: SparkContext = null before { val nodeNumber = 1 val coreNumber = 1 Engine.init(nodeNumber, coreNumber, true) sc = new SparkContext("local[1]", "DataSetSpec") } after { if (sc != null) { sc.stop() } } private def processPath(path: String): String = { if (path.contains(":")) { path.substring(1) } else { path } } "mnist data source" should "load image correct" in { val resource = getClass().getClassLoader().getResource("mnist") val dataSet = DataSet.array(com.intel.analytics.bigdl.models.lenet.Utils.load( Paths.get(processPath(resource.getPath()) + File.separator, "t10k-images.idx3-ubyte"), Paths.get(processPath(resource.getPath()) + File.separator, "t10k-labels.idx1-ubyte") )) dataSet.size() should be(10000) var iter = dataSet.data(train = false) iter.map(_.label).min should be(1.0f) iter = dataSet.data(train = false) iter.map(_.label).max should be(10.0f) } "mnist rdd data source" should "load image correct" in { val resource = getClass().getClassLoader().getResource("mnist") val dataSet = DataSet.array(com.intel.analytics.bigdl.models.lenet.Utils.load( Paths.get(processPath(resource.getPath()) + File.separator, "t10k-images.idx3-ubyte"), Paths.get(processPath(resource.getPath()) + File.separator, "t10k-labels.idx1-ubyte") ), sc) dataSet.size() should be(10000) var rdd = dataSet.data(train = false) rdd.map(_.label).min should be(1.0f) rdd = dataSet.data(train = false) rdd.map(_.label).max should be(10.0f) } "cifar data source" should "load image correct" in { val resource = getClass().getClassLoader().getResource("cifar") val dataSet = DataSet.ImageFolder.images(Paths.get(processPath(resource.getPath())), BGRImage.NO_SCALE) dataSet.size() should be(7) val labelMap = LocalImageFiles.readLabels(Paths.get(processPath(resource.getPath()))) labelMap("airplane") should be(1) labelMap("deer") should be(2) val iter = dataSet.toLocal().data(train = false) val img1 = iter.next() img1.label() should be(1f) img1.content(2) should be(234 / 255f) img1.content(1) should be(125 / 255f) img1.content(0) should be(59 / 255f) img1.content((22 + 4 * 32) * 3 + 2) should be(253 / 255f) img1.content((22 + 4 * 32) * 3 + 1) should be(148 / 255f) img1.content((22 + 4 * 32) * 3) should be(31 / 255f) val img2 = iter.next() img2.label() should be(1f) val img3 = iter.next() img3.label() should be(2f) val img4 = iter.next() img4.label() should be(2f) img4.content((9 + 8 * 32) * 3 + 2) should be(40 / 255f) img4.content((9 + 8 * 32) * 3 + 1) should be(51 / 255f) img4.content((9 + 8 * 32) * 3) should be(37 / 255f) val img5 = iter.next() img5.label() should be(2f) val img6 = iter.next() img6.label() should be(2f) val img7 = iter.next() img7.label() should be(1f) } "cifar rdd data source" should "load image correct" in { val resource = getClass().getClassLoader().getResource("cifar") val dataSet = DataSet.ImageFolder.images(Paths.get(processPath(resource.getPath())), sc, BGRImage.NO_SCALE) dataSet.size() should be(7) val labelMap = LocalImageFiles.readLabels(Paths.get(processPath(resource.getPath()))) labelMap("airplane") should be(1) labelMap("deer") should be(2) val rdd = dataSet.toDistributed().data(train = false) rdd.filter(_.label() == 1f).count() should be(3) rdd.filter(_.label() == 2f).count() should be(4) val images = rdd.map(_.clone()) .filter(_.label() == 1f) .collect() .sortWith(_.content(0) < _.content(0)) val img1 = images(1) img1.label() should be(1f) img1.content(2) should be(234 / 255f) img1.content(1) should be(125 / 255f) img1.content(0) should be(59 / 255f) img1.content((22 + 4 * 32) * 3 + 2) should be(253 / 255f) img1.content((22 + 4 * 32) * 3 + 1) should be(148 / 255f) img1.content((22 + 4 * 32) * 3) should be(31 / 255f) val images2 = rdd.map(_.clone()) .filter(_.label() == 2) .collect() .sortWith(_.content(0) < _.content(0)) val img4 = images2(0) img4.label() should be(2f) img4.content((9 + 8 * 32) * 3 + 2) should be(40 / 255f) img4.content((9 + 8 * 32) * 3 + 1) should be(51 / 255f) img4.content((9 + 8 * 32) * 3) should be(37 / 255f) } "imagenet data source" should "load image correct" in { val resource = getClass().getClassLoader().getResource("imagenet") val dataSet = DataSet.ImageFolder.paths(Paths.get(processPath(resource.getPath()))) dataSet.size() should be(11) val labelMap = LocalImageFiles.readLabels(Paths.get(processPath(resource.getPath()))) labelMap("n02110063") should be(1) labelMap("n04370456") should be(2) labelMap("n15075141") should be(3) labelMap("n99999999") should be(4) val pathToImage = LocalImgReader(BGRImage.NO_SCALE) val imageDataSet = dataSet -> pathToImage val images = imageDataSet.toLocal().data(train = false) .map(_.clone()) .toArray .sortWith(_.content(0) < _.content(0)) val labels = images.map(_.label()) labels.mkString(",") should be("2.0,3.0,1.0,4.0,1.0,1.0,4.0,3.0,4.0,3.0,2.0") images(6).content((100 + 100 * 213) * 3 + 2) should be(35 / 255f) images(6).content((100 + 100 * 213) * 3 + 1) should be(30 / 255f) images(6).content((100 + 100 * 213) * 3) should be(36 / 255f) val path1 = java.io.File.createTempFile("UnitTest", "datasource1.jpg").getAbsolutePath images(6).save(path1) println(s"save test image to $path1") images(8).content((100 + 100 * 556) * 3 + 2) should be(24 / 255f) images(8).content((100 + 100 * 556) * 3 + 1) should be(24 / 255f) images(8).content((100 + 100 * 556) * 3) should be(24 / 255f) val path2 = java.io.File.createTempFile("UnitTest", "datasource2.jpg").getAbsolutePath images(8).save(path2) println(s"save test image to $path2") } "imagenet sequence data source" should "load image correct" in { val resource = getClass().getClassLoader().getResource("imagenet") val tmpFile = java.io.File.createTempFile("UnitTest", System.nanoTime().toString) require(tmpFile.delete()) require(tmpFile.mkdir()) // Convert the test imagenet files to seq files val files = (DataSet.ImageFolder.paths(Paths.get(processPath(resource.getPath()))) -> LocalImgReaderWithName(BGRImage.NO_SCALE) -> BGRImgToLocalSeqFile(2, Paths.get(tmpFile.getAbsolutePath(), "imagenet")) ).toLocal().data(train = false).map(s => { println(s); s }).toArray files.length should be(6) val imageIter = (DataSet.SeqFileFolder.paths(Paths.get(tmpFile.getAbsolutePath()), 11) -> LocalSeqFileToBytes() -> BytesToBGRImg()).toLocal().data(train = false) val img = imageIter.next() img.label() should be(4f) img.content((100 + 100 * 213) * 3 + 2) should be(35 / 255f) img.content((100 + 100 * 213) * 3 + 1) should be(30 / 255f) img.content((100 + 100 * 213) * 3) should be(36 / 255f) imageIter.next() img.label() should be(4f) img.content((100 + 100 * 556) * 3 + 2) should be(24 / 255f) img.content((100 + 100 * 556) * 3 + 1) should be(24 / 255f) img.content((100 + 100 * 556) * 3) should be(24 / 255f) imageIter.next() imageIter.next() imageIter.next() imageIter.next() imageIter.next() imageIter.next() imageIter.next() imageIter.next() imageIter.next() imageIter.hasNext should be(false) } "ImageNet data source" should "load image correct with parallel process" in { val resource = getClass().getClassLoader().getResource("imagenet") val labelMap = LocalImageFiles.readLabels(Paths.get(processPath(resource.getPath()))) val dataSet = DataSet.ImageFolder.images(Paths.get(processPath(resource.getPath())), BGRImage.NO_SCALE) val iter = dataSet.toLocal().data(train = false) val parallel = 10 val syncPool = Executors.newFixedThreadPool(parallel) val tasks = (0 until parallel).map(pid => { syncPool.submit(new Callable[Int] { override def call(): Int = { var cc = 0 while (iter.hasNext) { val img = iter.next() if (null != img) { cc += img.label().toInt } Thread.sleep(1) } cc } }) }) val count = tasks.map(_.get()).reduce(_ + _) count should be (28) syncPool.shutdown() } "image preprocess" should "be same with torch result" in { Engine.setNodeNumber(None) val resourceImageNet = getClass().getClassLoader().getResource("imagenet") def test(imgFolder: String, imgFileName: String, tensorFile: String): Unit = { val img1Path = Paths.get(processPath(resourceImageNet.getPath()), imgFolder, imgFileName) val iter = (DataSet.array(Array(LocalLabeledImagePath(1.0f, img1Path))) -> LocalImgReader() -> BGRImgCropper(224, 224) -> HFlip() -> BGRImgNormalizer((0.4, 0.5, 0.6), (0.1, 0.2, 0.3)) -> BGRImgToBatch(1) ).toLocal().data(train = false) val image1 = iter.next().data val resourceTorch = getClass().getClassLoader().getResource("torch") val tensor1Path = Paths.get(processPath(resourceTorch.getPath()), tensorFile) val tensor1 = com.intel.analytics.bigdl.utils.File.loadTorch[Tensor[Float]]( tensor1Path.toString).addSingletonDimension() image1.size() should be(tensor1.size()) image1.map(tensor1, (a, b) => { a should be(b +- 0.0001f) b }) } RandomGenerator.RNG.setSeed(100) test("n02110063", "n02110063_11239.JPEG", "n02110063_11239.t7") RandomGenerator.RNG.setSeed(100) test("n04370456", "n04370456_5753.JPEG", "n04370456_5753.t7") RandomGenerator.RNG.setSeed(100) test("n15075141", "n15075141_38508.JPEG", "n15075141_38508.t7") RandomGenerator.RNG.setSeed(100) test("n99999999", "n03000134_4970.JPEG", "n03000134_4970.t7") } "RDD from DataSet" should "give different position every time" in { val data = (1 to 4).toArray val trainRDD = DataSet.rdd(sc.parallelize(data, 1).mapPartitions(_ => { RandomGenerator.RNG.setSeed(100) (1 to 100).iterator })).data(train = true) trainRDD.mapPartitions(iter => { Iterator.single(iter.next()) }).collect()(0) should be(22) trainRDD.mapPartitions(iter => { Iterator.single(iter.next()) }).collect()(0) should be(41) trainRDD.mapPartitions(iter => { Iterator.single(iter.next()) }).collect()(0) should be(62) } }
SeaOfOcean/BigDL
dl/src/test/scala/com/intel/analytics/bigdl/dataset/DataSetSpec.scala
Scala
apache-2.0
12,030
package ChatShare import common._ import akka.actor.{Actor, ActorRef, ActorSelection} import akka.event.Logging import akka.pattern.ask import akka.util.Timeout import scala.concurrent.duration._ import scala.concurrent.{Future, ExecutionContext} import scala.collection.mutable.Map class ClientHandler extends Actor { val log = Logging(context.system, this) val datetime = new DateTime() implicit val ec = ExecutionContext.Implicits.global implicit lazy val timeout = Timeout(200 seconds) //Client Map (key: username, value: clientId) var clients = Map("" -> "") def receive = { case CONNECT(clientId: String, username: String, userHandler: ActorRef) => if(clients contains username){ log.info("<CLIENTHANDLER>Client: " + clientId + " already connected as " + username + ".") }else{ log.info("<CLIENTHANDLER>Client: " + clientId + " connnected as " + username + ".") } clients += (username -> clientId) val parent = sender val future = userHandler ? LOGIN(username) future.onSuccess{ case LOGGED(username: String) => log.info(datetime.getDateTime + "<CLIENTHANDLER>LOGGED SUCCESS FOR " + username) val response = username + " now connected to ChatShare." parent ! CONNECTED(response) } future.onFailure{ case e: Exception => log.info(datetime.getDateTime + "<CLIENTHANDLER>Exception on CONNECT") val response = "SERVER ERROR ON LOGIN" parent ! CONNECTED(response) } case DISCONNECT(clientId: String, username: String, userHandler: ActorRef) => if(clients contains clientId){ log.info("<CLIENTHANDLER>Client: " + clientId + " is disconnecting.") }else{ log.info("<CLIENTHANDLER>Client: " + clientId + " is not connected.") } clients = clients - username sender ! DISCONNECTED(true) case GETCLIENT(username: String) => log.info(datetime.getDateTime + "<CLIENTHANDLER>GETCLIENT: " + username) var client = "" if(clients contains username){ client = clients getOrElse (username, "") } sender ! CLIENT(client) case GETUSERNAME(clientId: String) => log.info(datetime.getDateTime + "<CLIENTHANDLER>GETUSERNAME: " + clientId) var username = "" var users = clients.map(_.swap) if(users contains clientId){ username = users getOrElse (clientId, "") } sender ! USERNAME(username) case FORWARDFOLLOW(clientId: String, userToFollow: String, userHandler: ActorRef) => log.info(datetime.getDateTime + "<CLIENTHANDLER>FORWARDFOLLOW: " + userToFollow + " by " + clientId) var username = "" var users = clients.map(_.swap) if(users contains clientId){ username = users getOrElse (clientId, "") } val parent = sender val future = userHandler ? FOLLOW(username, userToFollow) future.onSuccess{ case FOLLOWERS(followersList: List[String]) => log.info(datetime.getDateTime + "<CLIENTHANDLER>Success on FORWARDFOLLOW") parent ! FOLLOWERS(followersList) } future.onFailure{ case e: Exception => log.info(datetime.getDateTime + "<CLIENTHANDLER>Exception on FORWARDFOLLOW") parent ! FOLLOWERS(List[String]()) } case FORWARDSTOPFOLLOW(clientId: String, userToFollow: String, userHandler: ActorRef) => log.info(datetime.getDateTime + "<CLIENTHANDLER>FORWARDSTOPFOLLOW: " + userToFollow + " by " + clientId) var username = "" var users = clients.map(_.swap) if(users contains clientId){ username = users getOrElse (clientId, "") } val parent = sender val future = userHandler ? STOPFOLLOW(username, userToFollow) future.onSuccess{ case FOLLOWERS(followersList: List[String]) => log.info(datetime.getDateTime + "<CLIENTHANDLER>Success on FORWARDSTOPFOLLOW") parent ! FOLLOWERS(followersList) } future.onFailure{ case e: Exception => log.info(datetime.getDateTime + "<CLIENTHANDLER>Exception on FORWARDSTOPFOLLOW") parent ! FOLLOWERS(List[String]()) } case GETFOLLOWERSWITHCLIENTS(userHandler: ActorRef, username: String) => log.info(datetime.getDateTime + "<CLIENTHANDLER>GETFOLLOWERSWITHCLIENTS: " + username) val parent = sender val future = userHandler ? GETFOLLOWERS(username) future.onSuccess{ case FOLLOWERS(followersList: List[String]) => log.info(datetime.getDateTime + "<CLIENTHANDLER>Success on GETFOLLOWERSWITHCLIENTS") var clientList = List[String]() for(follower <- followersList){ var client = "" if(clients contains follower){ client = clients getOrElse (follower, "") } clientList = client :: clientList } parent ! FOLLOWERSWITHCLIENTS(clientList) } future.onFailure{ case e: Exception => log.info(datetime.getDateTime + "<CLIENTHANDLER>Exception on GETFOLLOWERSWITHCLIENTS") parent ! FOLLOWERSWITHCLIENTS(List[String]()) } } }
highlanderkev/ChatShare
ChatShare/chatshare/src/main/scala/clienthandler.scala
Scala
mit
5,225
/* * (c) Copyright 2016 Hewlett Packard Enterprise Development LP * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cogx.platform.cpumemory.readerwriter import cogx.cogmath.algebra.complex.Complex /** The interface for reading a complex field on the CPU. * * @author Greg Snider */ trait ComplexFieldReader extends FieldReader { /** Read the single value in a 0D vector field into `out`. */ def read(): Complex /** Read the value at (`col`) in a 1D vector field into `out`. */ def read(col: Int): Complex /** Read the value at (`row`, `col`) in a 2D vector field into `out`. */ def read(row: Int, col: Int): Complex /** Read the value at (`layer`, `row`, `col`) in a 3D vector field into `out`. */ def read(layer: Int, row: Int, col: Int): Complex }
hpe-cct/cct-core
src/main/scala/cogx/platform/cpumemory/readerwriter/ComplexFieldReader.scala
Scala
apache-2.0
1,302
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.graphx.impl import scala.reflect.ClassTag import org.apache.spark.graphx._ import org.apache.spark.graphx.util.collection.PrimitiveKeyOpenHashMap /** * A collection of edges stored in 3 large columnar arrays (src, dst, attribute). The arrays are * clustered by src. * * @param srcIds the source vertex id of each edge * @param dstIds the destination vertex id of each edge * @param data the attribute associated with each edge * @param index a clustered index on source vertex id * @tparam ED the edge attribute type. */ private[graphx] class EdgePartition[@specialized(Char, Int, Boolean, Byte, Long, Float, Double) ED: ClassTag]( @transient val srcIds: Array[VertexId], @transient val dstIds: Array[VertexId], @transient val data: Array[ED], @transient val index: PrimitiveKeyOpenHashMap[VertexId, Int]) extends Serializable { /** * Reverse all the edges in this partition. * * @return a new edge partition with all edges reversed. */ def reverse: EdgePartition[ED] = { val builder = new EdgePartitionBuilder(size) for (e <- iterator) { builder.add(e.dstId, e.srcId, e.attr) } builder.toEdgePartition } /** * Construct a new edge partition by applying the function f to all * edges in this partition. * * Be careful not to keep references to the objects passed to `f`. * To improve GC performance the same object is re-used for each call. * * @param f a function from an edge to a new attribute * @tparam ED2 the type of the new attribute * @return a new edge partition with the result of the function `f` * applied to each edge */ def map[ED2: ClassTag](f: Edge[ED] => ED2): EdgePartition[ED2] = { val newData = new Array[ED2](data.size) val edge = new Edge[ED]() val size = data.size var i = 0 while (i < size) { edge.srcId = srcIds(i) edge.dstId = dstIds(i) edge.attr = data(i) newData(i) = f(edge) i += 1 } new EdgePartition(srcIds, dstIds, newData, index) } /** * Construct a new edge partition by using the edge attributes * contained in the iterator. * * @note The input iterator should return edge attributes in the * order of the edges returned by `EdgePartition.iterator` and * should return attributes equal to the number of edges. * * @param iter an iterator for the new attribute values * @tparam ED2 the type of the new attribute * @return a new edge partition with the attribute values replaced */ def map[ED2: ClassTag](iter: Iterator[ED2]): EdgePartition[ED2] = { // Faster than iter.toArray, because the expected size is known. val newData = new Array[ED2](data.size) var i = 0 while (iter.hasNext) { newData(i) = iter.next() i += 1 } assert(newData.size == i) new EdgePartition(srcIds, dstIds, newData, index) } /** * Apply the function f to all edges in this partition. * * @param f an external state mutating user defined function. */ def foreach(f: Edge[ED] => Unit) { iterator.foreach(f) } /** * Merge all the edges with the same src and dest id into a single * edge using the `merge` function * * @param merge a commutative associative merge operation * @return a new edge partition without duplicate edges */ def groupEdges(merge: (ED, ED) => ED): EdgePartition[ED] = { val builder = new EdgePartitionBuilder[ED] var currSrcId: VertexId = null.asInstanceOf[VertexId] var currDstId: VertexId = null.asInstanceOf[VertexId] var currAttr: ED = null.asInstanceOf[ED] var i = 0 while (i < size) { if (i > 0 && currSrcId == srcIds(i) && currDstId == dstIds(i)) { currAttr = merge(currAttr, data(i)) } else { if (i > 0) { builder.add(currSrcId, currDstId, currAttr) } currSrcId = srcIds(i) currDstId = dstIds(i) currAttr = data(i) } i += 1 } if (size > 0) { builder.add(currSrcId, currDstId, currAttr) } builder.toEdgePartition } /** * Apply `f` to all edges present in both `this` and `other` and return a new EdgePartition * containing the resulting edges. * * If there are multiple edges with the same src and dst in `this`, `f` will be invoked once for * each edge, but each time it may be invoked on any corresponding edge in `other`. * * If there are multiple edges with the same src and dst in `other`, `f` will only be invoked * once. */ def innerJoin[ED2: ClassTag, ED3: ClassTag] (other: EdgePartition[ED2]) (f: (VertexId, VertexId, ED, ED2) => ED3): EdgePartition[ED3] = { val builder = new EdgePartitionBuilder[ED3] var i = 0 var j = 0 // For i = index of each edge in `this`... while (i < size && j < other.size) { val srcId = this.srcIds(i) val dstId = this.dstIds(i) // ... forward j to the index of the corresponding edge in `other`, and... while (j < other.size && other.srcIds(j) < srcId) { j += 1 } if (j < other.size && other.srcIds(j) == srcId) { while (j < other.size && other.srcIds(j) == srcId && other.dstIds(j) < dstId) { j += 1 } if (j < other.size && other.srcIds(j) == srcId && other.dstIds(j) == dstId) { // ... run `f` on the matching edge builder.add(srcId, dstId, f(srcId, dstId, this.data(i), other.data(j))) } } i += 1 } builder.toEdgePartition } /** * The number of edges in this partition * * @return size of the partition */ def size: Int = srcIds.size /** The number of unique source vertices in the partition. */ def indexSize: Int = index.size /** * Get an iterator over the edges in this partition. * * Be careful not to keep references to the objects from this iterator. * To improve GC performance the same object is re-used in `next()`. * * @return an iterator over edges in the partition */ def iterator = new Iterator[Edge[ED]] { private[this] val edge = new Edge[ED] private[this] var pos = 0 override def hasNext: Boolean = pos < EdgePartition.this.size override def next(): Edge[ED] = { edge.srcId = srcIds(pos) edge.dstId = dstIds(pos) edge.attr = data(pos) pos += 1 edge } } /** * Get an iterator over the edges in this partition whose source vertex ids match srcIdPred. The * iterator is generated using an index scan, so it is efficient at skipping edges that don't * match srcIdPred. */ def indexIterator(srcIdPred: VertexId => Boolean): Iterator[Edge[ED]] = index.iterator.filter(kv => srcIdPred(kv._1)).flatMap(Function.tupled(clusterIterator)) /** * Get an iterator over the cluster of edges in this partition with source vertex id `srcId`. The * cluster must start at position `index`. * * Be careful not to keep references to the objects from this iterator. To improve GC performance * the same object is re-used in `next()`. */ private def clusterIterator(srcId: VertexId, index: Int) = new Iterator[Edge[ED]] { private[this] val edge = new Edge[ED] private[this] var pos = index override def hasNext: Boolean = { pos >= 0 && pos < EdgePartition.this.size && srcIds(pos) == srcId } override def next(): Edge[ED] = { assert(srcIds(pos) == srcId) edge.srcId = srcIds(pos) edge.dstId = dstIds(pos) edge.attr = data(pos) pos += 1 edge } } }
zhangjunfang/eclipse-dir
spark/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
Scala
bsd-2-clause
8,353
package org.openurp.edu.eams.base.web.action import org.beangle.data.jpa.dao.OqlBuilder import org.beangle.data.model.Entity import org.openurp.edu.eams.base.School import org.openurp.edu.eams.base.code.ministry.Institution import org.openurp.edu.eams.web.action.BaseAction class SchoolAction extends BaseAction { protected def getEntityName(): String = classOf[School].getName protected def indexSetting() { } protected def editSetting(entity: Entity[_]) { val builder = OqlBuilder.from(classOf[Institution], "institution") .where("institution.effectiveAt <= :now and (institution.invalidAt is null or institution.invalidAt >= :now)", new java.util.Date()) if (entity.isPersisted) { builder.where("not exists(from " + classOf[School].getName + " school where school.institution = institution and institution <> :institution)", entity.asInstanceOf[School].getInstitution) } else { builder.where("not exists(from " + classOf[School].getName + " school where school.institution = institution)") } builder.orderBy("institution.code") put("institutions", entityDao.search(builder)) } }
openurp/edu-eams-webapp
web/src/main/scala/org/openurp/edu/eams/base/web/action/SchoolAction.scala
Scala
gpl-3.0
1,157