repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
---|---|---|
amitkumarj441/DynaML | src/main/scala-2.11/io/github/mandar2812/dynaml/DynaMLSSH.scala | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml
import java.io.{InputStream, OutputStream, PrintStream}
import ammonite.ops.Path
import ammonite.runtime.Storage
import ammonite.sshd.{SshServer, SshServerConfig}
import ammonite.sshd.util.Environment
import ammonite.util.{Bind, Colors}
/**
* An ssh server which serves ammonite repl as it's shell channel.
* To start listening for incoming connections call
* [[start()]] method. You can [[stop()]] the server at any moment.
* It will also close all running sessions
* @param sshConfig configuration of ssh server,
* such as users credentials or port to be listening to
* @param predef predef that will be installed on repl instances served by this server
* @param replArgs arguments to pass to ammonite repl on initialization of the session;
* an argument named "session" containing the SSHD session will be added
* @param classLoader classloader for ammonite to use
*/
class DynaMLSSH(
sshConfig: SshServerConfig,
predef: String = "",
defaultPredef: Boolean = true,
wd: Path = ammonite.ops.pwd,
replArgs: Seq[Bind[_]] = Nil,
classLoader: ClassLoader = DynaMLSSH.getClass.getClassLoader) {
private lazy val sshd = SshServer(
sshConfig,
shellServer = DynaMLSSH.runRepl(
sshConfig.ammoniteHome,
predef,
defaultPredef,
wd,
replArgs,
classLoader
)
)
def port = sshd.getPort
def start(): Unit = sshd.start()
def stop(): Unit = sshd.stop()
def stopImmediately(): Unit = sshd.stop(true)
}
object DynaMLSSH {
// Actually runs a repl inside of session serving a remote user shell.
private def runRepl(
homePath: Path,
predefCode: String,
defaultPredef: Boolean,
wd: Path,
replArgs: Seq[Bind[_]],
replServerClassLoader: ClassLoader)(
in: InputStream, out: OutputStream): Unit = {
// since sshd server has it's own customised environment,
// where things like System.out will output to the
// server's console, we need to prepare individual environment
// to serve this particular user's session
Environment.withEnvironment(Environment(replServerClassLoader, in, out)) {
try {
DynaML(
predefCode = predefCode,
predefFile = None,
defaultPredef = defaultPredef,
storageBackend = new Storage.Folder(homePath),
wd = wd,
inputStream = in,
outputStream = out,
errorStream = out,
verboseOutput = false,
remoteLogging = false,
colors = Colors.Default
).run(replArgs:_*)
} catch {
case any: Throwable =>
val sshClientOutput = new PrintStream(out)
sshClientOutput.println("What a terrible failure, DynaML just blew up!")
any.printStackTrace(sshClientOutput)
}
}
}
}
|
amitkumarj441/DynaML | dynaml-pipes/src/main/scala-2.11/io/github/mandar2812/dynaml/pipes/Encoder.scala | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.pipes
/**
* A deterministic and reversible encoding
* from a domain to a range. Mathematically equivalent
* to a bijective function.
* @tparam S The domain type
* @tparam D The output type
*
* @author mandar2812 date 23/10/2016.
*
* */
trait Encoder[S, D] extends DataPipe[S, D] {
/**
* Represents the decoding operation.
* */
val i: DataPipe[D, S]
/**
* Represents the composition of two
* encoders, resulting in a third encoder
* Schematically represented as:
*
* [[S]] -> [[D]] :: [[D]] -> [[Further]] ==
* [[S]] -> [[Further]]
*
* */
def >[Further](that: Encoder[D, Further]): Encoder[S, Further] = {
val fPipe1 = DataPipe(this.run _)
val rPipe1 = this.i
val fPipe2 = DataPipe(that.run _)
val rPipe2 = that.i
val fPipe = fPipe1 > fPipe2
val rPipe = rPipe2 > rPipe1
Encoder(fPipe, rPipe)
}
}
object Encoder {
/**
* Create an encoder on the fly by supplying the encode and decode function
* */
def apply[S, D](forwardEnc: (S) => D, reverseEnc: (D) => S): Encoder[S, D] =
new Encoder[S, D] {
val i = DataPipe(reverseEnc)
override def run(data: S): D = forwardEnc(data)
}
def apply[S, D](forwardPipe: DataPipe[S, D], reversePipe: DataPipe[D, S]) =
new Encoder[S, D] {
val i = reversePipe
override def run(data: S) = forwardPipe(data)
}
} |
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/statespace/POMP.scala | package io.github.mandar2812.dynaml.models.statespace
import java.io.Serializable
import breeze.linalg.DenseVector
import breeze.numerics._
import breeze.stats.distributions._
import scala.language.implicitConversions
object POMP {
type Eta = Vector[Double]
type Gamma = Double
type Observation = Double
type Time = Double
type TimeIncrement = Double
type LogLikelihood = Double
implicit def bool2obs(b: Boolean): Observation = if (b) 1.0 else 0.0
implicit def obs2bool(o: Observation): Boolean = if (o == 0.0) false else true
def studentTModel(stepFun: (SdeParameter) => (State, TimeIncrement) => Rand[State], df: Int): Parameters => StateSpaceModel = p => new StateSpaceModel {
def observation = x => {
new Rand[Observation] {
def draw = p match {
case LeafParameter(_,v,_) => v match {
case Some(scale) => StudentsT(df).draw*scale + x.head
}
}
}
}
def f(s: State, t: Time) = s.head
def x0 = p match {
case LeafParameter(stateParam, _, _) =>
stateParam match {
case GaussianParameter(m0, c0) =>
MultivariateGaussian(m0, sqrt(c0)) map (LeafState(_))
}
}
def stepFunction = (x, dt) => p match {
case LeafParameter(_,_,sdeparam @unchecked) => stepFun(sdeparam)(x, dt)
}
def dataLikelihood = (s, y) =>
p match {
case LeafParameter(_,v,_ @unchecked) => v match {
case Some(scale @unchecked) => 1/scale * StudentsT(df).logPdf((y - s.head) / scale)
}
}
}
def SeasonalModel(
period: Int,
harmonics: Int,
stepFun: (SdeParameter) => (State, TimeIncrement) => Rand[State]): Parameters => StateSpaceModel = p => new StateSpaceModel {
def observation = x => {
new Rand[Observation] {
def draw = p match {
case LeafParameter(_,v,_) => v match {
case Some(noisesd) => Gaussian(x.head, noisesd).draw
}
}
}
}
def buildF(harmonics: Int, t: Time): DenseVector[Double] = {
val frequency = 2 * math.Pi / period
DenseVector(((1 to harmonics) flatMap (a =>
Array(cos(frequency * a * t), sin(frequency * a * t)))).toArray)
}
def f(s: State, t: Time) = s match {
case LeafState(x @unchecked) => buildF(harmonics, t) dot DenseVector(x.toArray)
}
def x0 = p match {
case LeafParameter(stateParam, _, _) =>
stateParam match {
case GaussianParameter(m0, c0) =>
MultivariateGaussian(m0, sqrt(c0)) map (LeafState(_))
}
}
def stepFunction = (x, dt) => p match {
case LeafParameter(_,_,sdeparam @unchecked) => stepFun(sdeparam)(x, dt)
}
def dataLikelihood = (s, y) =>
p match {
case LeafParameter(_,v,_ @unchecked) => v match {
case Some(noisesd @unchecked) => Gaussian(s.head, noisesd).logPdf(y)
}
}
}
def LinearModel(stepFun: (SdeParameter) => (State, TimeIncrement) => Rand[State]): Parameters => StateSpaceModel = p => new StateSpaceModel {
def observation = x => new Rand[Observation] {
def draw = {
p match {
case LeafParameter(_,v,_ @unchecked) =>
v.map(Gaussian(x.head, _).draw).get
}
}
}
def f(s: State, t: Time) = s.head
def x0 = p match {
case LeafParameter(stateParam, _, _ @unchecked) =>
stateParam match {
case GaussianParameter(m0, c0 @unchecked) =>
MultivariateGaussian(m0, sqrt(c0)) map (LeafState(_))
}
}
def stepFunction = (x, dt) => p match {
case LeafParameter(_,_,sdeparam @unchecked) => stepFun(sdeparam)(x, dt)
}
def dataLikelihood = (s, y) => p match {
case LeafParameter(_,v,_) => v match {
case Some(noisesd @unchecked) => Gaussian(s.head, noisesd).logPdf(y)
}
}
}
def PoissonModel(stepFun: (SdeParameter) => (State, TimeIncrement) => Rand[State]): Parameters => StateSpaceModel = p => new StateSpaceModel with Serializable {
def observation = lambda => new Rand[Observation] { def draw = Poisson(lambda.head).draw }
override def link(x: Double) = Vector(exp(x))
def f(s: State, t: Time) = s.head
def x0 = p match {
case LeafParameter(stateParam, _, _ @unchecked) =>
stateParam match {
case GaussianParameter(m0, c0) =>
MultivariateGaussian(m0, sqrt(c0)) map (LeafState(_))
}
}
def stepFunction = (x, dt) => p match {
case LeafParameter(_,_,sdeparam @unchecked) => stepFun(sdeparam)(x, dt)
}
def dataLikelihood = (s, y) =>
(Poisson(s.head).logProbabilityOf(y.toInt))
}
def BernoulliModel(stepFun: (SdeParameter) => (State, TimeIncrement) => Rand[State]): Parameters => StateSpaceModel = params => new StateSpaceModel {
def observation = p => new Rand[Observation] {
def draw = {
val bern = new Bernoulli(p.head)
bern.draw
}
}
override def link(x: Gamma) =
if (x > 6) {
Vector(1.0)
} else if (x < -6) {
Vector(0.0)
} else {
Vector(1.0/(1 + exp(-x)))
}
def f(s: State, t: Time) = s.head
def x0 = params match {
case LeafParameter(stateParam, _, _ @unchecked) =>
stateParam match {
case GaussianParameter(m0, c0) =>
MultivariateGaussian(m0, sqrt(c0)) map (LeafState(_))
}
}
def stepFunction = (x, dt) => params match {
case LeafParameter(_,_,sdeparam @unchecked) => stepFun(sdeparam)(x, dt)
}
/**
* log(0) is undefined, so we just return a very small number for the log-likelihood when the probability is one
*/
def dataLikelihood = {
(p, y) =>
if (y) {
if (p.head == 0.0) -1e99 else log(p.head)
} else {
if ((1 - p.head) == 0.0) -1e99 else log(1-p.head)
}
}
}
/**
* The Log-Gaussian Cox-Process is used to model time to event data with
* log-gaussian varying hazard rate
*/
def LogGaussianCox(
stepFun: (SdeParameter) => (State, TimeIncrement) => Rand[State]): Parameters => StateSpaceModel = p => new StateSpaceModel {
def observation = ???
def f(s: State, t: Time) = s.head
def x0 = p match {
case LeafParameter(stateParam, _, _ @unchecked) =>
stateParam match {
case GaussianParameter(m0, c0) =>
MultivariateGaussian(m0, sqrt(c0)) map (LeafState(_))
}
}
def stepFunction = (x, dt) => p match {
case LeafParameter(_,_,sdeparam @unchecked) => stepFun(sdeparam)(x, dt)
}
/**
* The data likelihood requires two parameters, the hazard and cumulative hazard
*/
def dataLikelihood = (s, y) => s.head - s(1)
}
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/kernels/PolynomialKernel.scala | <gh_stars>0
package io.github.mandar2812.dynaml.kernels
import breeze.linalg.{DenseMatrix, DenseVector}
/**
* Standard Polynomial SVM Kernel
* of the form K(x,y) = (x<sup>T</sup> . y + 1.0)<sup>r</sup>
*/
class PolynomialKernel(
private var degree: Int = 2,
private var offset: Double = 1.0)
extends SVMKernel[DenseMatrix[Double]]
with LocalSVMKernel[DenseVector[Double]]
with Serializable{
override val hyper_parameters = List("degree", "offset")
state = Map("degree" -> degree, "offset" -> offset)
def setdegree(d: Int): Unit = {
this.degree = d
state += ("degree" -> d.toDouble)
}
def setoffset(o: Double): Unit = {
this.offset = o
state += ("offset" -> o)
}
override def evaluateAt(config: Map[String, Double])(
x: DenseVector[Double],
y: DenseVector[Double]): Double =
math.pow((x.t * y) + config("offset"), config("degree").toInt)
override def setHyperParameters(h: Map[String, Double]) = {
super.setHyperParameters(h)
if(h contains "offset")
state += ("offset" -> math.abs(h("offset")))
this
}
}
|
amitkumarj441/DynaML | dynaml-repl/src/main/scala-2.11/io/github/mandar2812/dynaml/repl/DynaMLInterpreter.scala | package io.github.mandar2812.dynaml.repl
import java.io.{OutputStream, PrintStream}
import java.util.regex.Pattern
import ammonite.interp.{Interpreter, Preprocessor}
import ammonite.ops.{Path, exists, ls}
import ammonite.runtime.{Evaluator, Frame, Storage}
import ammonite.util.Util.{CodeSource, VersionedWrapperId, newLine}
import ammonite.util._
import org.apache.commons.io.output.ByteArrayOutputStream
import scala.annotation.tailrec
class DynaMLInterpreter(
override val printer: Printer, storage: Storage,
basePredefs: Seq[PredefInfo], customPredefs: Seq[PredefInfo],
// Allows you to set up additional "bridges" between the REPL
// world and the outside world, by passing in the full name
// of the `APIHolder` object that will hold the bridge and
// the object that will be placed there. Needs to be passed
// in as a callback rather than run manually later as these
// bridges need to be in place *before* the predef starts
// running, so you can use them predef to e.g. configure
// the REPL before it starts
extraBridges: Seq[(String, String, AnyRef)], wd: Path,
colors: Ref[Colors], verboseOutput: Boolean = true,
getFrame: () => Frame, override val createFrame: () => Frame,
replCodeWrapper: Preprocessor.CodeWrapper,
scriptCodeWrapper: Preprocessor.CodeWrapper) extends Interpreter(
printer, storage, basePredefs, customPredefs,
extraBridges, wd, colors, verboseOutput, getFrame, createFrame,
replCodeWrapper, scriptCodeWrapper) {
override val eval: DynaMLEvaluator = DynaMLEvaluator(headFrame)
private var scriptImportCallback: Imports => Unit = handleImports
def processCellBlocks(blocks: Seq[BlockData],
splittedScript: => Res[IndexedSeq[(String, Seq[String])]],
startingImports: Imports,
codeSource: CodeSource,
evaluate: (Preprocessor.Output, Name) => Res[(Evaluated, Tag)],
autoImport: Boolean,
extraCode: String): Res[ScriptOutput.Metadata] = synchronized{
// we store the old value, because we will reassign this in the loop
val outerScriptImportCallback = scriptImportCallback
/**
* Iterate over the blocks of a script keeping track of imports.
*
* We keep track of *both* the `scriptImports` as well as the `lastImports`
* because we want to be able to make use of any import generated in the
* script within its blocks, but at the end we only want to expose the
* imports generated by the last block to who-ever loaded the script
*
* @param blocks the compilation block of the script, separated by `@`s.
* Each one is a tuple containing the leading whitespace and
* a sequence of statements in that block
*
* @param scriptImports the set of imports that apply to the current
* compilation block, excluding that of the last
* block that was processed since that is held
* separately in `lastImports` and treated
* specially
*
* @param lastImports the imports created by the last block that was processed;
* only imports created by that
*
* @param wrapperIndex a counter providing the index of the current block, so
* e.g. if `Foo.sc` has multiple blocks they can be named
* `Foo_1` `Foo_2` etc.
*
* @param perBlockMetadata an accumulator for the processed metadata of each block
* that is fed in
*/
@tailrec def loop(blocks: Seq[BlockData],
scriptImports: Imports,
lastImports: Imports,
wrapperIndex: Int,
perBlockMetadata: List[ScriptOutput.BlockMetadata])
: Res[ScriptOutput.Metadata] = {
if (blocks.isEmpty) {
// No more blocks
// if we have imports to pass to the upper layer we do that
if (autoImport) outerScriptImportCallback(lastImports)
Res.Success(ScriptOutput.Metadata(perBlockMetadata))
} else {
// imports from scripts loaded from this script block will end up in this buffer
var nestedScriptImports = Imports()
scriptImportCallback = { imports =>
nestedScriptImports = nestedScriptImports ++ imports
}
// pretty printing results is disabled for scripts
val indexedWrapperName = Interpreter.indexWrapperName(codeSource.wrapperName, wrapperIndex)
def compileRunBlock(leadingSpaces: String, hookInfo: ImportHookInfo) = {
val printSuffix = if (wrapperIndex == 1) "" else " #" + wrapperIndex
printer.info("Compiling " + codeSource.printablePath + printSuffix)
for{
processed <- compilerManager.preprocess(codeSource.fileName).transform(
hookInfo.stmts,
"",
leadingSpaces,
codeSource.pkgName,
indexedWrapperName,
scriptImports ++ hookInfo.imports,
_ => "scala.Iterator[String]()",
extraCode = extraCode,
skipEmpty = false,
codeWrapper = scriptCodeWrapper
)
(ev, tag) <- evaluate(processed, indexedWrapperName)
} yield ScriptOutput.BlockMetadata(
VersionedWrapperId(ev.wrapper.map(_.encoded).mkString("."), tag),
leadingSpaces,
hookInfo,
ev.imports
)
}
val cachedLoaded = for{
(classFiles, blockMetadata) <- blocks.head
// We don't care about the results of resolving the import hooks;
// Assuming they still *can* be resolved, the `envHash` check will
// ensure re-compile this block if the contents of any import hook
// changes
if resolveImportHooks(
blockMetadata.hookInfo.trees,
blockMetadata.hookInfo.stmts,
codeSource,
scriptCodeWrapper.wrapperPath
).isInstanceOf[Res.Success[_]]
} yield {
val envHash = Interpreter.cacheTag(evalClassloader.classpathHash)
if (envHash != blockMetadata.id.tag.env) {
compileRunBlock(blockMetadata.leadingSpaces, blockMetadata.hookInfo)
} else{
compilerManager.addToClasspath(classFiles)
val cls = eval.loadClass(blockMetadata.id.wrapperPath, classFiles)
val evaluated =
try cls.map(eval.evalMain(_, evalClassloader))
catch Evaluator.userCodeExceptionHandler
evaluated.map(_ => blockMetadata)
}
}
val res = cachedLoaded.getOrElse{
for{
allSplittedChunks <- splittedScript
(leadingSpaces, stmts) = allSplittedChunks(wrapperIndex - 1)
(hookStmts, importTrees) = parseImportHooks(codeSource, stmts)
hookInfo <- resolveImportHooks(
importTrees, hookStmts, codeSource, scriptCodeWrapper.wrapperPath
)
res <- compileRunBlock(leadingSpaces, hookInfo)
} yield res
}
res match{
case Res.Success(blockMetadata) =>
val last =
blockMetadata.hookInfo.imports ++
blockMetadata.finalImports ++
nestedScriptImports
loop(
blocks.tail,
scriptImports ++ last,
last,
wrapperIndex + 1,
blockMetadata :: perBlockMetadata
)
case r: Res.Exit => r
case r: Res.Failure => r
case r: Res.Exception => r
case Res.Skip =>
loop(blocks.tail, scriptImports, lastImports, wrapperIndex + 1, perBlockMetadata)
}
}
}
// wrapperIndex starts off as 1, so that consecutive wrappers can be named
// Wrapper, Wrapper2, Wrapper3, Wrapper4, ...
try {
for(res <- loop(blocks, startingImports, Imports(), wrapperIndex = 1, List()))
// We build up `blockInfo` backwards, since it's a `List`, so reverse it
// before giving it to the outside world
yield ScriptOutput.Metadata(res.blockInfo.reverse)
} finally scriptImportCallback = outerScriptImportCallback
}
def evaluateCell(processed: Preprocessor.Output,
printer: Printer,
fileName: String,
indexedWrapperName: Name,
silent: Boolean = false,
incrementLine: () => Unit,
outputStream: ByteArrayOutputStream): Res[(Evaluated, Tag)] = synchronized{
for{
_ <- Catching{ case e: ThreadDeath => Evaluator.interrupted(e) }
output <- compilerManager.compileClass(
processed,
printer,
fileName
)
_ = incrementLine()
res <- eval.processLine(
output.classFiles,
output.imports,
output.usedEarlierDefinitions.getOrElse(Nil),
printer,
indexedWrapperName,
replCodeWrapper.wrapperPath,
silent,
evalClassloader
)
} yield (res, Tag("", ""))
}
}
object DynaMLInterpreter {
def mtimeIfExists(p: Path) = if (exists(p)) p.mtime.toMillis else 0L
/**
* Recursively mtimes things, with the sole purpose of providing a number
* that will change if that file changes or that folder's contents changes
*
* Ensure we include the file paths within a folder as part of the folder
* signature, as file moves often do not update the mtime but we want to
* trigger a "something changed" event anyway
*/
def pathSignature(p: Path) =
if (!exists(p)) 0L
else try {
if (p.isDir) ls.rec(p).map(x => x.hashCode + mtimeIfExists(x)).sum
else p.mtime.toMillis
} catch { case e: java.nio.file.NoSuchFileException =>
0L
}
val SheBang = "#!"
val SheBangEndPattern = Pattern.compile(s"""((?m)^!#.*)$newLine""")
/**
* This gives our cache tags for compile caching. The cache tags are a hash
* of classpath, previous commands (in-same-script), and the block-code.
* Previous commands are hashed in the wrapper names, which are contained
* in imports, so we don't need to pass them explicitly.
*/
def cacheTag(classpathHash: Array[Byte]): String = {
val bytes = Util.md5Hash(Iterator(
classpathHash
))
bytes.map("%02x".format(_)).mkString
}
def skipSheBangLine(code: String) = {
val newLineLength = newLine.length
/**
* the skipMultipleLines function is necessary to support the parsing of
* multiple shebang lines. The NixOs nix-shell normally uses 2+ shebang lines.
*/
def skipMultipleLines(ind: Int = 0): Int = {
val index = code.indexOf('\n', ind)
if (code.substring(index + 1).startsWith(SheBang))
skipMultipleLines(ind + index + 1)
else index - (newLineLength - 1)
}
if (code.startsWith(SheBang)) {
val matcher = SheBangEndPattern matcher code
val shebangEnd = if (matcher.find) matcher.end else skipMultipleLines()
val numberOfStrippedLines = newLine.r.findAllMatchIn( code.substring(0, shebangEnd) ).length
(newLine * numberOfStrippedLines) + code.substring(shebangEnd)
} else
code
}
def indexWrapperName(wrapperName: Name, wrapperIndex: Int): Name = {
Name(wrapperName.raw + (if (wrapperIndex == 1) "" else "_" + wrapperIndex))
}
def initPrinters(colors0: Colors,
output: OutputStream,
error: OutputStream,
verboseOutput: Boolean) = {
val colors = Ref[Colors](colors0)
val printStream = new PrintStream(output, true)
val errorPrintStream = new PrintStream(error, true)
def printlnWithColor(stream: PrintStream, color: fansi.Attrs, s: String) = {
stream.println(color(s).render)
}
val printer = Printer(
printStream,
errorPrintStream,
printStream,
printlnWithColor(errorPrintStream, colors().warning(), _),
printlnWithColor(errorPrintStream, colors().error(), _),
s => if (verboseOutput) printlnWithColor(errorPrintStream, colors().info(), s)
)
(colors, printer)
}
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/statespace/Parameters.scala | <filename>dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/statespace/Parameters.scala
package io.github.mandar2812.dynaml.models.statespace
import breeze.linalg.{DenseMatrix, DenseVector, diag}
import breeze.numerics.exp
import breeze.stats.distributions.{Gaussian, Rand}
sealed trait Parameters {
override def toString = Parameters.flatten(this).mkString(", ")
def |+|(that: Parameters): Parameters =
Parameters.combine(this, that)
def length: Int = Parameters.length(this)
def isEmpty: Boolean = Parameters.isEmpty(this)
def perturb(delta: Double): Rand[Parameters] =
Parameters.perturb(delta)(this)
def perturbIndep(delta: Vector[Double]): Rand[Parameters] =
Parameters.perturbIndep(delta)(this)
}
case class LeafParameter(initParams: StateParameter, scale: Option[Double], sdeParam: SdeParameter) extends Parameters
case class BranchParameter(left: Parameters, right: Parameters) extends Parameters
object LeafParameter {
def apply(): LeafParameter = {
LeafParameter(EmptyParameter, None, EmptyStepParameter)
}
}
object Parameters {
/**
* A method to combine parameters
*/
def combine(lp: Parameters, rp: Parameters): Parameters =
if (lp.isEmpty) {
rp
} else if (rp.isEmpty) {
lp
} else {
BranchParameter(lp, rp)
}
/**
* A zero element, which represents an empty parameter and forms a left and right identity
*/
def zero: Parameters = LeafParameter()
/**
* Checks to see if a parameter is empty
*/
def isEmpty(p: Parameters): Boolean = p match {
case LeafParameter(p, v, s) => (p, v, s) match {
case (EmptyParameter, None, EmptyStepParameter) => true
case _ => false
}
case BranchParameter(lp, rp) => isEmpty(lp) && isEmpty(rp)
}
/**
* Perturbs parameters independently according to a single delta value, ie all parameters are perturbed on the same scale
*/
def perturb(delta: Double): Parameters => Rand[Parameters] = p => p match {
case LeafParameter(initParams, v, sdeParams) =>
for {
init <- initParams.perturb(delta)
sde <- sdeParams.perturb(delta)
} yield LeafParameter(init, v map (x => x * exp(Gaussian(0, delta).draw)), sde)
case BranchParameter(lp, rp) =>
for {
l <- perturb(delta)(lp)
r <- perturb(delta)(rp)
} yield BranchParameter(l, r)
}
/**
* Perturb parameters allowing for a different scale for each parameter
*/
def perturbIndep(delta: Vector[Double]): Parameters => Rand[Parameters] = p => p match {
case LeafParameter(initParams, v, sdeParams) =>
v match {
case Some(x) =>
for {
init <- initParams.perturbIndep(delta.take(initParams.length))
sde <- sdeParams.perturbIndep(delta.drop(initParams.length + 1))
} yield LeafParameter(init, v map (x => x * exp(Gaussian(0, delta(initParams.length)).draw)), sde)
case None =>
for {
init <- initParams.perturbIndep(delta.take(initParams.length))
sde <- sdeParams.perturbIndep(delta.drop(initParams.length))
} yield LeafParameter(init, None, sde)
}
case BranchParameter(lp, rp) =>
for {
l <- perturbIndep(delta.take(lp.length))(lp)
r <- perturbIndep(delta.drop(lp.length))(rp)
} yield BranchParameter(l, r)
}
/**
* Flattens parameters into a Vector of parameters
* useful for printing
*/
def flatten(p: Parameters): Vector[Double] = p match {
case LeafParameter(init, noise, sde) => noise match {
case Some(v) => init.flatten ++ Vector(v) ++ sde.flatten
case None => init.flatten ++ sde.flatten
}
case BranchParameter(lp, rp) => flatten(lp) ++ flatten(rp)
}
def length(p: Parameters): Int = {
flatten(p).length
}
def getSdeParameterNames(p: SdeParameter): IndexedSeq[String] = p match {
case BrownianParameter(m, s) =>
val paramSize = (0 to (m.size - 1))
(paramSize map (i => s"mu$i")) ++ (paramSize map (i => s"sigma$i"))
case OrnsteinParameter(t, a, s) =>
val paramSize = (0 to (t.size - 1))
(paramSize map (i => s"theta$i")) ++ (paramSize map (i => s"alpha$i")) ++ (paramSize map (i => s"sigma$i"))
case StepConstantParameter(a) => IndexedSeq("a")
}
def getInitParamNames(p: StateParameter): IndexedSeq[String] = p match {
case GaussianParameter(m, s) =>
val paramSize = (0 to (m.size -1))
(paramSize map (i => s"m0$i")) ++ (paramSize map (i => s"C0$i"))
}
/**
* Get parameter names using case class names
* useful for header rows in CSV files etc
* @param p the parameters to get the names of
* @return an indexed sequence of strings
*/
def getParameterNames(p: Parameters): IndexedSeq[String] = p match {
case LeafParameter(init, noise, sde) => noise match {
case Some(v) =>
getInitParamNames(init) ++ Vector("noiseSd") ++ getSdeParameterNames(sde)
case None => getInitParamNames(init) ++ getSdeParameterNames(sde)
}
case BranchParameter(lp, rp) => getParameterNames(lp) ++ getParameterNames(rp)
}
/**
* Transforms a parameter tree to a map of strings to doubles
* @param p a parameter tree
* @return a map of parameter names -> parameter value
*/
def paramsToMap(p: Parameters): Map[String, Double] = {
(getParameterNames(p), flatten(p)).zipped.map{ case (k, v) => (k -> v) }.toMap
}
}
sealed trait StateParameter {
def length: Int = StateParameter.length(this)
def flatten: Vector[Double] = StateParameter.flatten(this)
def perturb(delta: Double): Rand[StateParameter]
def perturbIndep(delta: Vector[Double]): Rand[StateParameter]
}
case class GaussianParameter(m0: DenseVector[Double], c0: DenseMatrix[Double]) extends StateParameter {
def perturb(delta: Double): Rand[GaussianParameter] = {
new Rand[GaussianParameter] {
def draw = {
GaussianParameter(
m0 map (Gaussian(_, delta).draw),
diag(diag(c0) map (x => x * exp(Gaussian(0, delta).draw))))
}
}
}
def perturbIndep(delta: Vector[Double]): Rand[GaussianParameter] = {
new Rand[GaussianParameter] {
def draw = {
GaussianParameter(
DenseVector(m0.data.zip(delta.take(m0.length)) map { case (x, d) => Gaussian(x, d).draw }),
diag(DenseVector(diag(c0).toArray.zip(delta.drop(m0.length)) map { case (x, d) => x * exp(Gaussian(0, d).draw) })))
}
}
}
}
case object EmptyParameter extends StateParameter {
def perturb(delta: Double): Rand[StateParameter] = ???
def perturbIndep(delta: Vector[Double]): Rand[StateParameter] = ???
}
object GaussianParameter {
def apply(m0: Double, c0: Double): GaussianParameter = {
new GaussianParameter(DenseVector(m0), DenseMatrix(c0))
}
}
object StateParameter {
def flatten(p: StateParameter): Vector[Double] = p match {
case GaussianParameter(m, s) => (m.data ++ diag(s).toArray).toVector
}
def length(p: StateParameter): Int = flatten(p).length
}
sealed trait SdeParameter {
def length: Int = SdeParameter.length(this)
def flatten: Vector[Double] = SdeParameter.flatten(this)
def perturb(delta: Double): Rand[SdeParameter]
def perturbIndep(delta: Vector[Double]): Rand[SdeParameter]
}
object SdeParameter {
def flatten(p: SdeParameter): Vector[Double] = p match {
case BrownianParameter(m, s) => m.data.toVector ++ diag(s).data.toVector
case OrnsteinParameter(theta, alpha, sigma) => theta.data.toVector ++ alpha.data.toVector ++ sigma.data.toVector
case StepConstantParameter(a) => a.data.toVector
}
def length(p: SdeParameter): Int = flatten(p).length
}
case class BrownianParameter(mu: DenseVector[Double], sigma: DenseMatrix[Double]) extends SdeParameter {
def perturb(delta: Double): Rand[BrownianParameter] = {
new Rand[BrownianParameter] {
def draw = {
BrownianParameter(
mu map (Gaussian(_, delta).draw),
diag(diag(sigma) map (x => x * exp(Gaussian(0, delta).draw))))
}
}
}
def perturbIndep(delta: Vector[Double]): Rand[BrownianParameter] = {
new Rand[BrownianParameter] {
def draw = {
BrownianParameter(
DenseVector(mu.data.zip(delta.take(mu.length)) map { case (x, d) => Gaussian(x, d).draw }),
diag(DenseVector(diag(sigma).toArray.zip(delta.drop(mu.length)) map { case (x, d) => x * exp(Gaussian(0, d).draw) } ))
)
}
}
}
}
object BrownianParameter {
def apply(mu: Double, sigma: Double): BrownianParameter = {
new BrownianParameter(DenseVector(mu), DenseMatrix(sigma))
}
def apply(mu: Array[Double], sigma: Array[Double]): BrownianParameter = {
new BrownianParameter(DenseVector(mu), diag(DenseVector(sigma)))
}
}
case class OrnsteinParameter(theta: DenseVector[Double], alpha: DenseVector[Double], sigma: DenseVector[Double]) extends SdeParameter {
def perturb(delta: Double): Rand[OrnsteinParameter] = {
new Rand[OrnsteinParameter] {
def draw = {
// alpha and sigme are non-negative, propose on log-scale
OrnsteinParameter(
theta map (Gaussian(_, delta).draw),
alpha map (x => x * exp(Gaussian(0, delta).draw)),
sigma map (x => x * exp(Gaussian(0, delta).draw)))
}
}
}
def perturbIndep(delta: Vector[Double]): Rand[OrnsteinParameter] = {
new Rand[OrnsteinParameter] {
def draw = {
// alpha and sigme are non-negative, propose on log-scale
OrnsteinParameter(
theta.data.zip(delta.take(theta.length)) map { case (t, d) => Gaussian(t, d).draw },
alpha.data.zip(delta.drop(theta.length).take(alpha.length)) map { case (x, d) => x * exp(Gaussian(0, d).draw) },
sigma.data.zip(delta.drop(theta.length + alpha.length)) map { case (x, d) => x * exp(Gaussian(0, d).draw) })
}
}
}
}
object OrnsteinParameter {
def apply(theta: Double, alpha: Double, sigma: Double): OrnsteinParameter = {
new OrnsteinParameter(DenseVector(theta), DenseVector(alpha), DenseVector(sigma))
}
def apply(theta: Array[Double], alpha: Array[Double], sigma: Array[Double]): OrnsteinParameter = {
new OrnsteinParameter(DenseVector(theta), DenseVector(alpha), DenseVector(sigma))
}
}
case class StepConstantParameter(a: DenseVector[Double]) extends SdeParameter {
def perturb(delta: Double): Rand[StepConstantParameter] = {
new Rand[StepConstantParameter] {
def draw = StepConstantParameter(
a map (Gaussian(_, delta).draw))
}
}
def perturbIndep(delta: Vector[Double]): Rand[StepConstantParameter] = {
new Rand[StepConstantParameter] {
def draw = StepConstantParameter(DenseVector(a.data.zip(delta) map { case (x, d) => Gaussian(x, d).draw }))
}
}
}
object StepConstantParameter {
def apply(a: Double): StepConstantParameter = {
new StepConstantParameter(DenseVector(a))
}
}
case object EmptyStepParameter extends SdeParameter {
def perturb(delta: Double): Rand[SdeParameter] = ???
def perturbIndep(delta: Vector[Double]): Rand[SdeParameter] = ???
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/analysis/PartitionedVectorField.scala | <filename>dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/analysis/PartitionedVectorField.scala
package io.github.mandar2812.dynaml.analysis
import io.github.mandar2812.dynaml.algebra.PartitionedVector
import io.github.mandar2812.dynaml.algebra.PartitionedMatrixOps._
import spire.algebra.{Eq, Field, InnerProductSpace}
import io.github.mandar2812.dynaml.analysis.implicits._
/**
* Created by mandar on 19/10/2016.
*/
class PartitionedVectorField(
num_dim: Long,
num_elements_per_block: Int) extends
Field[PartitionedVector] with
InnerProductSpace[PartitionedVector, Double] {
override def div(x: PartitionedVector, y: PartitionedVector): PartitionedVector = x /:/ y
override def quot(a: PartitionedVector, b: PartitionedVector): PartitionedVector = div(a, b) - mod(a, b)
override def mod(a: PartitionedVector, b: PartitionedVector): PartitionedVector = a %:% b
override def gcd(a: PartitionedVector, b: PartitionedVector)(implicit ev: Eq[PartitionedVector]) = a %:% b
override def lcm(a: PartitionedVector, b: PartitionedVector)(implicit ev: Eq[PartitionedVector]) = a %:% b
override def one: PartitionedVector = PartitionedVector.ones(num_dim, num_elements_per_block)
override def negate(x: PartitionedVector): PartitionedVector = x *:* -1.0
override def zero: PartitionedVector = PartitionedVector.zeros(num_dim, num_elements_per_block)
override def plus(x: PartitionedVector, y: PartitionedVector): PartitionedVector = x + y
override def times(x: PartitionedVector, y: PartitionedVector): PartitionedVector = x *:* y
override def dot(v: PartitionedVector, w: PartitionedVector) = v dot w
override implicit def scalar = Field[Double]
override def timesl(r: Double, v: PartitionedVector) = v*r
}
object PartitionedVectorField {
def apply(n: Long, nE: Int) = new PartitionedVectorField(n, nE)
}
abstract class InnerProductPV extends InnerProductSpace[PartitionedVector, Double] {
override def dot(v: PartitionedVector, w: PartitionedVector) = v dot w
override implicit def scalar = Field[Double]
override def timesl(r: Double, v: PartitionedVector) = v*r
override def negate(x: PartitionedVector) = x*(-1d)
override def plus(x: PartitionedVector, y: PartitionedVector) = x+y
}
object InnerProductPV {
def apply(num_dim: Long, num_elements_per_block: Int) = new InnerProductPV {
override def zero = PartitionedVector.zeros(num_dim, num_elements_per_block)
}
def apply(zeroElem: PartitionedVector): InnerProductPV = new InnerProductPV {
override def zero = zeroElem
}
} |
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/tensorflow/utils/TensorBasis.scala | <filename>dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/tensorflow/utils/TensorBasis.scala
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.tensorflow.utils
import io.github.mandar2812.dynaml.pipes.DataPipe
import org.apache.spark.annotation.Experimental
import org.platanios.tensorflow.api.Tensor
/**
* A basis function expansion yielding a TF tensor.
* */
@Experimental
case class TensorBasis[-I](f: (I) => Tensor) extends DataPipe[I, Tensor] {
self =>
override def run(data: I): Tensor = f(data)
def >(other: DataPipe[Tensor, Tensor]): TensorBasis[I] = TensorBasis((x: I) => other.run(self.f(x)))
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/DynaMLPipe.scala | <gh_stars>0
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml
import scala.collection.mutable.{MutableList => ML}
import breeze.linalg.DenseVector
import breeze.numerics.sqrt
import breeze.stats.distributions.ContinuousDistr
import io.github.mandar2812.dynaml.evaluation.RegressionMetrics
import io.github.mandar2812.dynaml.models.ParameterizedLearner
import io.github.mandar2812.dynaml.models.gp.AbstractGPRegressionModel
import io.github.mandar2812.dynaml.models.sgp.ESGPModel
import io.github.mandar2812.dynaml.optimization._
import io.github.mandar2812.dynaml.pipes._
import io.github.mandar2812.dynaml.probability.{ContinuousDistrRV, ContinuousRVWithDistr}
import io.github.mandar2812.dynaml.utils._
import io.github.mandar2812.dynaml.wavelets.{GroupedHaarWaveletFilter, HaarWaveletFilter, InvGroupedHaarWaveletFilter, InverseHaarWaveletFilter}
import org.apache.log4j.Logger
import org.apache.spark.rdd.RDD
import org.renjin.script.RenjinScriptEngine
import org.renjin.sexp._
import scalaxy.streams.optimize
import scala.reflect.ClassTag
import scala.util.Random
/**
* @author mandar2812 datum 3/2/16.
*
* A library of sorts for common data processing
* pipes.
*/
object DynaMLPipe {
val logger = Logger.getLogger(this.getClass)
/**
* A trivial identity data pipe
* */
def identityPipe[T] = DataPipe(identity[T] _)
/**
* Data pipe which takes a file name/path as a
* [[String]] and returns a [[Stream]] of [[String]].
* */
val fileToStream = DataPipe(utils.textFileToStream _)
/**
* Read a csv text file and store it in a R data frame.
* @param df The name of the data frame variable
* @param sep Separation character in the csv file
* @return A [[DataPipe]] instance which takes as input a file name
* and returns a renjin [[ListVector]] instance and stores data frame
* in the variable nameed as df.
* */
def csvToRDF(df: String, sep: Char)(implicit renjin: RenjinScriptEngine): DataPipe[String, ListVector] =
DataPipe((file: String) => renjin
.eval(df+""" <- read.csv(""""+file+"""", sep = '"""+sep+"""')""")
.asInstanceOf[ListVector])
/**
* Create a linear model from a R data frame.
* @param modelName The name of the variable to store model
* @param y The name of the target variable
* @param xs A list of names denoting input variables
* @return A [[DataPipe]] which takes as input data frame variable name
* and returns a [[ListVector]] containing linear model attributes.
* Also stores the model in the variable given by modelName in the ongoing
* R session.
* */
def rdfToGLM(modelName: String, y: String, xs: Array[String])(implicit renjin: RenjinScriptEngine)
: DataPipe[String, ListVector] = DataPipe((df: String) => renjin
.eval(modelName+" <- lm("+y+" ~ "+xs.mkString(" + ")+", "+df+")")
.asInstanceOf[ListVector])
/**
* Writes a [[Stream]] of [[String]] to
* a file.
*
* Usage: DynaMLPipe.streamToFile("abc.csv")
* */
val streamToFile = (fileName: String) => DataPipe(utils.writeToFile(fileName) _)
/**
* Writes a [[Stream]] of [[AnyVal]] to
* a file.
*
* Usage: DynaMLPipe.valuesToFile("abc.csv")
* */
val valuesToFile = (fileName: String) => DataPipe((stream: Stream[Seq[AnyVal]]) =>
utils.writeToFile(fileName)(stream.map(s => s.mkString(","))))
/**
* Drop the first element of a [[Stream]] of [[String]]
* */
val dropHead = DataPipe((s: Stream[String]) => s.tail)
/**
* Data pipe to replace all occurrences of a regular expression or string in a [[Stream]]
* of [[String]] with with a specified replacement string.
* */
val replace = (original: String, newString: String) =>
StreamDataPipe((s: String) => utils.replace(original)(newString)(s))
/**
* Data pipe to replace all white spaces in a [[Stream]]
* of [[String]] with the comma character.
* */
val replaceWhiteSpaces = replace("\\s+", ",")
/**
* Trim white spaces from each line in a [[Stream]]
* of [[String]]
* */
val trimLines = StreamDataPipe((s: String) => s.trim())
val splitLine = StreamDataPipe((s: String) => s.split(","))
/**
* Generate a numeric range by dividing an interval into bins.
* */
val numeric_range: MetaPipe21[Double, Double, Int, Seq[Double]] = MetaPipe21(
(lower: Double, upper: Double) => (bins: Int) =>
Seq.tabulate[Double](bins+1)(i =>
if(i == 0) lower
else if(i == bins) upper
else lower + i*(upper-lower)/bins
)
)
/**
* This pipe assumes its input to be of the form
* "YYYY,Day,Hour,Value"
*
* It takes as input a function (TFunc) which
* converts a [[Tuple3]] into a single "timestamp" like value.
*
* The pipe processes its data source line by line
* and outputs a [[Tuple2]] in the following format.
*
* (Timestamp,Value)
*
* Usage: DynaMLPipe.extractTimeSeries(TFunc)
* */
val extractTimeSeries = (Tfunc: (Double, Double, Double) => Double) =>
DataPipe((lines: Stream[String]) => lines.map{line =>
val splits = line.split(",")
val timestamp = Tfunc(splits(0).toDouble, splits(1).toDouble, splits(2).toDouble)
(timestamp, splits(3).toDouble)
})
/**
* This pipe is exactly similar to [[DynaMLPipe.extractTimeSeries]],
* with one key difference, it returns a [[Tuple2]] like
* (Timestamp, FeatureVector), where FeatureVector is
* a Vector of values.
* */
val extractTimeSeriesVec = (Tfunc: (Double, Double, Double) => Double) =>
DataPipe((lines: Stream[String]) => lines.map{line =>
val splits = line.split(",")
val timestamp = Tfunc(splits(0).toDouble, splits(1).toDouble, splits(2).toDouble)
val feat = DenseVector(splits.slice(3, splits.length).map(_.toDouble))
(timestamp, feat)
})
/**
* Inorder to generate features for auto-regressive models,
* one needs to construct sliding windows in time. This function
* takes two parameters
*
* deltaT: the auto-regressive order
* timelag: the time lag after which the windowing is conducted.
*
* E.g
*
* Let deltaT = 2 and timelag = 1
*
* This pipe will take stream data of the form
* (t, Value_t)
*
* and output a stream which looks like
*
* (t, Vector(Value_t-2, Value_t-3))
*
* */
val deltaOperation = (deltaT: Int, timelag: Int) =>
DataPipe((lines: Stream[(Double, Double)]) =>
lines.toList.sliding(deltaT+timelag+1).map((history) => {
val features = DenseVector(history.take(deltaT).map(_._2).toArray)
(features, history.last._2)
}).toStream)
/**
* The vector version of [[DynaMLPipe.deltaOperation]]
* */
val deltaOperationVec = (deltaT: Int) =>
DataPipe((lines: Stream[(Double, DenseVector[Double])]) =>
lines.toList.sliding(deltaT+1).map((history) => {
val hist = history.take(history.length - 1).map(_._2)
val featuresAcc: ML[Double] = ML()
(0 until hist.head.length).foreach((dimension) => {
//for each dimension/regressor take points t to t-order
featuresAcc ++= hist.map(vec => vec(dimension))
})
val features = DenseVector(featuresAcc.toArray)
(features, history.last._2(0))
}).toStream)
/**
* The vector ARX version of [[DynaMLPipe.deltaOperation]]
* */
val deltaOperationARX = (deltaT: List[Int]) =>
DataPipe((lines: Stream[(Double, DenseVector[Double])]) =>
lines.toList.sliding(deltaT.max+1).map((history) => {
val hist = history.take(history.length - 1).map(_._2)
val featuresAcc: ML[Double] = ML()
(0 until hist.head.length).foreach((dimension) => {
//for each dimension/regressor take points t to t-order
featuresAcc ++= hist.takeRight(deltaT(dimension))
.map(vec => vec(dimension))
})
val features = DenseVector(featuresAcc.toArray)
(features, history.last._2(0))
}).toStream)
/**
* From a [[Stream]] of [[String]] remove all records
* which contain missing values, this pipe should be applied
* after the application of [[DynaMLPipe.extractTrainingFeatures]].
* */
val removeMissingLines = StreamDataPipe((line: String) => !line.contains("<NA>"))
/**
* Take each line which is a comma separated string and extract
* all but the last element into a feature vector and leave the last
* element as the "target" value.
*
* This pipe outputs data in a [[Stream]] of [[Tuple2]] in the following form
*
* (Vector(features), value)
* */
val splitFeaturesAndTargets = StreamDataPipe((line: String) => {
val split = line.split(",")
(DenseVector(split.tail.map(_.toDouble)), split.head.toDouble)
})
/**
* Perform gaussian normalization on a data stream which
* is a [[Tuple2]] of the form.
*
* (Stream(training data), Stream(test data))
* */
@deprecated("*Standardization pipes are deprecated as of v1.4,"+
" use pipes that output io.github.mandar2812.dynaml.pipes.Scaler objects instead")
val trainTestGaussianStandardization: DataPipe[(Stream[(DenseVector[Double], Double)],
Stream[(DenseVector[Double], Double)]),
((Stream[(DenseVector[Double], Double)], Stream[(DenseVector[Double], Double)]),
(DenseVector[Double], DenseVector[Double]))] =
DataPipe((trainTest: (Stream[(DenseVector[Double], Double)],
Stream[(DenseVector[Double], Double)])) => {
val (mean, variance) = utils.getStats(trainTest._1.map(tup =>
DenseVector(tup._1.toArray ++ Array(tup._2))).toList)
val stdDev: DenseVector[Double] = sqrt(variance)
val normalizationFunc = (point: (DenseVector[Double], Double)) => {
val extendedpoint = DenseVector(point._1.toArray ++ Array(point._2))
val normPoint = (extendedpoint - mean) :/ stdDev
val length = normPoint.length
(normPoint(0 until length-1), normPoint(-1))
}
((trainTest._1.map(normalizationFunc),
trainTest._2.map(normalizationFunc)), (mean, stdDev))
})
/**
* Perform gaussian normalization on a data stream which
* is a [[Tuple2]] of the form.
*
* (Stream(training data), Stream(test data))
* */
@deprecated("*Standardization pipes are deprecated as of v1.4,"+
" use pipes that output io.github.mandar2812.dynaml.pipes.Scaler objects instead")
val featuresGaussianStandardization: DataPipe[(Stream[(DenseVector[Double], Double)],
Stream[(DenseVector[Double], Double)]),
((Stream[(DenseVector[Double], Double)], Stream[(DenseVector[Double], Double)]),
(DenseVector[Double], DenseVector[Double]))] =
DataPipe((trainTest: (Stream[(DenseVector[Double], Double)],
Stream[(DenseVector[Double], Double)])) => {
val (mean, variance) = utils.getStats(trainTest._1.map(tup =>
tup._1).toList)
val stdDev: DenseVector[Double] = sqrt(variance)
val normalizationFunc = (point: (DenseVector[Double], Double)) => {
val normPoint = (point._1 - mean) :/ stdDev
(normPoint, point._2)
}
((trainTest._1.map(normalizationFunc),
trainTest._2.map(normalizationFunc)), (mean, stdDev))
})
/**
* Perform gaussian normalization on a data stream which
* is a [[Tuple2]] of the form.
*
* (Stream(training data), Stream(test data))
* */
@deprecated("*Standardization pipes are deprecated as of v1.4,"+
" use pipes that output io.github.mandar2812.dynaml.pipes.Scaler objects instead")
val trainTestGaussianStandardizationMO: DataPipe[
(Stream[(DenseVector[Double], DenseVector[Double])], Stream[(DenseVector[Double], DenseVector[Double])]),
((Stream[(DenseVector[Double], DenseVector[Double])], Stream[(DenseVector[Double], DenseVector[Double])]),
(DenseVector[Double], DenseVector[Double]))] =
DataPipe((trainTest: (Stream[(DenseVector[Double], DenseVector[Double])],
Stream[(DenseVector[Double], DenseVector[Double])])) => {
val (mean, variance) = utils.getStats(trainTest._1.map(tup =>
DenseVector(tup._1.toArray ++ tup._2.toArray)).toList)
val stdDev: DenseVector[Double] = sqrt(variance)
val normalizationFunc = (point: (DenseVector[Double], DenseVector[Double])) => {
val extendedpoint = DenseVector(point._1.toArray ++ point._2.toArray)
val normPoint = (extendedpoint - mean) :/ stdDev
val length = point._1.length
val outlength = point._2.length
(normPoint(0 until length),
normPoint(length until length+outlength))
}
((trainTest._1.map(normalizationFunc),
trainTest._2.map(normalizationFunc)), (mean, stdDev))
})
/**
* Returns a pipe which takes a data set and calculates the mean and standard deviation of each dimension.
* @param standardize Set to true if one wants the standardized data and false if one
* does wants the original data with the [[GaussianScaler]] instances.
* */
def calculateGaussianScales(standardize: Boolean = true): DataPipe[
Stream[(DenseVector[Double], DenseVector[Double])],
(Stream[(DenseVector[Double], DenseVector[Double])], (GaussianScaler, GaussianScaler))] =
DataPipe((data: Stream[(DenseVector[Double], DenseVector[Double])]) => {
val (num_features, num_targets) = (data.head._1.length, data.head._2.length)
val (mean, variance) = utils.getStats(data.map(tup =>
DenseVector(tup._1.toArray ++ tup._2.toArray)).toList)
val stdDev: DenseVector[Double] = sqrt(variance)
val featuresScaler = GaussianScaler(mean(0 until num_features), stdDev(0 until num_features))
val targetsScaler = GaussianScaler(
mean(num_features until num_features + num_targets),
stdDev(num_features until num_features + num_targets))
val result = if(standardize) (featuresScaler * targetsScaler)(data) else data
(result, (featuresScaler, targetsScaler))
})
/**
* Returns a pipe which takes a data set and mean centers it.
* @param standardize Set to true if one wants the standardized data and false if one
* does wants the original data with the [[MeanScaler]] instances.
* */
def calculateMeanScales(standardize: Boolean = true): DataPipe[
Stream[(DenseVector[Double], DenseVector[Double])],
(Stream[(DenseVector[Double], DenseVector[Double])], (MeanScaler, MeanScaler))] =
DataPipe((data: Stream[(DenseVector[Double], DenseVector[Double])]) => {
val (num_features, num_targets) = (data.head._1.length, data.head._2.length)
val (mean, _) = utils.getStats(data.map(tup =>
DenseVector(tup._1.toArray ++ tup._2.toArray)).toList)
val featuresScaler = MeanScaler(mean(0 until num_features))
val targetsScaler = MeanScaler(mean(num_features until num_features + num_targets))
val result = if(standardize) (featuresScaler * targetsScaler)(data) else data
(result, (featuresScaler, targetsScaler))
})
/**
* Multivariate version of [[calculateGaussianScales]]
* @param standardize Set to true if one wants the standardized data and false if one
* does wants the original data with the [[MVGaussianScaler]] instances.
* */
def calculateMVGaussianScales(standardize: Boolean = true): DataPipe[
Stream[(DenseVector[Double], DenseVector[Double])],
(Stream[(DenseVector[Double], DenseVector[Double])], (MVGaussianScaler, MVGaussianScaler))] =
DataPipe((data: Stream[(DenseVector[Double], DenseVector[Double])]) => {
val (num_features, num_targets) = (data.head._1.length, data.head._2.length)
val (m, sigma) = utils.getStatsMult(data.map(tup =>
DenseVector(tup._1.toArray ++ tup._2.toArray)).toList)
val featuresScaler = MVGaussianScaler(
m(0 until num_features),
sigma(0 until num_features, 0 until num_features))
val targetsScaler = MVGaussianScaler(
m(num_features until num_features + num_targets),
sigma(num_features until num_features + num_targets, num_features until num_features + num_targets))
val result = if(standardize) (featuresScaler * targetsScaler)(data) else data
(result, (featuresScaler, targetsScaler))
})
/**
* Returns a pipe which performs PCA on data features and gaussian scaling on data targets
* @param standardize Set to true if one wants the standardized data and false if one
* does wants the original data with the [[MVGaussianScaler]] instances.
* */
def calculatePCAScales(standardize: Boolean = true): DataPipe[
Stream[(DenseVector[Double], DenseVector[Double])],
(Stream[(DenseVector[Double], DenseVector[Double])], (PCAScaler, MVGaussianScaler))] =
DataPipe((data: Stream[(DenseVector[Double], DenseVector[Double])]) => {
val (num_features, num_targets) = (data.head._1.length, data.head._2.length)
val (m, sigma) = utils.getStatsMult(data.map(tup =>
DenseVector(tup._1.toArray ++ tup._2.toArray)).toList)
val featuresScaler = PCAScaler(
m(0 until num_features),
sigma(0 until num_features, 0 until num_features))
val targetsScaler = MVGaussianScaler(
m(num_features until num_features + num_targets),
sigma(num_features until num_features + num_targets, num_features until num_features + num_targets))
val result = if(standardize) (featuresScaler * targetsScaler)(data) else data
(result, (featuresScaler, targetsScaler))
})
/**
* Returns a pipe which performs PCA on data features and gaussian scaling on data targets
* @param standardize Set to true if one wants the standardized data and false if one
* does wants the original data with the [[MVGaussianScaler]] instances.
* */
def calculatePCAScalesFeatures(standardize: Boolean = true): DataPipe[
Stream[DenseVector[Double]],
(Stream[DenseVector[Double]], PCAScaler)] =
DataPipe((data: Stream[DenseVector[Double]]) => {
val (m, sigma) = utils.getStatsMult(data.toList)
val featuresScaler = PCAScaler(m, sigma)
val result = if(standardize) featuresScaler(data) else data
(result, featuresScaler)
})
/**
* Returns a pipe which takes a data set and calculates the minimum and maximum of each dimension.
* @param standardize Set to true if one wants the standardized data and false if one
* does wants the original data with the [[MinMaxScaler]] instances.
* */
def calculateMinMaxScales(standardize: Boolean = true): DataPipe[
Stream[(DenseVector[Double], DenseVector[Double])],
(Stream[(DenseVector[Double], DenseVector[Double])], (MinMaxScaler, MinMaxScaler))] =
DataPipe((data: Stream[(DenseVector[Double], DenseVector[Double])]) => {
val (num_features, num_targets) = (data.head._1.length, data.head._2.length)
val (min, max) = utils.getMinMax(data.map(tup =>
DenseVector(tup._1.toArray ++ tup._2.toArray)).toList)
val featuresScaler = MinMaxScaler(min(0 until num_features), max(0 until num_features))
val targetsScaler = MinMaxScaler(
min(num_features until num_features + num_targets),
max(num_features until num_features + num_targets))
val result = if(standardize) (featuresScaler * targetsScaler)(data) else data
(result, (featuresScaler, targetsScaler))
})
/**
* A helper method which takes a scaled data set and applies its scales to
* a test set.
* */
private[dynaml] def scaleTestPipe[I, R <: ReversibleScaler[I]] = DataPipe(
(couple: ((
Stream[(I, I)], (R, R)),
Stream[(I, I)])
) => (couple._1._1, (couple._1._2._1*couple._1._2._2)(couple._2), couple._1._2)
)
/**
* Scale a data set which is stored as a [[Stream]],
* return the scaled data as well as a [[GaussianScaler]] instance
* which can be used to reverse the scaled values to the original
* data.
*
* */
val gaussianScaling: DataPipe[
Stream[(DenseVector[Double], DenseVector[Double])],
(Stream[(DenseVector[Double], DenseVector[Double])], (GaussianScaler, GaussianScaler))] =
calculateGaussianScales()
/**
* Scale a data set which is stored as a [[Stream]],
* return the scaled data as well as a [[MVGaussianScaler]] instance
* which can be used to reverse the scaled values to the original
* data.
* */
val multivariateGaussianScaling: DataPipe[
Stream[(DenseVector[Double], DenseVector[Double])],
(Stream[(DenseVector[Double], DenseVector[Double])], (MVGaussianScaler, MVGaussianScaler))] =
calculateMVGaussianScales()
/**
* Perform gaussian normalization on a data stream which
* is a [[Tuple2]] of the form.
*
* (Stream(training data), Stream(test data))
* */
val gaussianScalingTrainTest: DataPipe[
(Stream[(DenseVector[Double], DenseVector[Double])], Stream[(DenseVector[Double], DenseVector[Double])]),
(Stream[(DenseVector[Double], DenseVector[Double])], Stream[(DenseVector[Double], DenseVector[Double])],
(GaussianScaler, GaussianScaler))] =
(calculateGaussianScales()*identityPipe[Stream[(DenseVector[Double], DenseVector[Double])]]) >
scaleTestPipe[DenseVector[Double], GaussianScaler]
/**
* Scale a data set which is stored as a [[Stream]],
* return the scaled data as well as a [[MVGaussianScaler]] instance
* which can be used to reverse the scaled values to the original
* data.
* */
val multivariateGaussianScalingTrainTest =
(calculateMVGaussianScales()*identityPipe[Stream[(DenseVector[Double], DenseVector[Double])]]) >
scaleTestPipe[DenseVector[Double], MVGaussianScaler]
/**
* Transform a data set by performing PCA on its patterns.
* */
val pcaFeatureScaling = calculatePCAScalesFeatures()
/**
* Transform a data set consisting of features and targets.
* Perform PCA scaling of features and gaussian scaling of targets.
* */
val pcaScaling = calculatePCAScales()
/**
* Scale a data set which is stored as a [[Stream]],
* return the scaled data as well as a [[MinMaxScaler]] instance
* which can be used to reverse the scaled values to the original
* data.
*
* */
val minMaxScaling: DataPipe[
Stream[(DenseVector[Double], DenseVector[Double])],
(Stream[(DenseVector[Double], DenseVector[Double])], (MinMaxScaler, MinMaxScaler))] =
calculateMinMaxScales()
/**
* Perform [0,1] scaling on a data stream which
* is a [[Tuple2]] of the form.
*
* (Stream(training data), Stream(test data))
* */
val minMaxScalingTrainTest =
(calculateMinMaxScales()*identityPipe[Stream[(DenseVector[Double], DenseVector[Double])]]) >
scaleTestPipe[DenseVector[Double], MinMaxScaler]
/**
* Extract a subset of the data into a [[Tuple2]] which
* can be used as a training, test combo for model learning and evaluation.
*
* Usage: DynaMLPipe.splitTrainingTest(num_training, num_test)
* */
def splitTrainingTest[P](num_training: Int, num_test: Int) =
DataPipe((data: (Stream[P], Stream[P])) => {
(data._1.take(num_training), data._2.takeRight(num_test))
})
/**
* Extract a subset of columns from a [[Stream]] of comma separated [[String]]
* also replace any missing value strings with the empty string.
*
* Usage: DynaMLPipe.extractTrainingFeatures(List(1,2,3), Map(1 -> "N.A.", 2 -> "NA", 3 -> "na"))
* */
val extractTrainingFeatures =
(columns: List[Int], m: Map[Int, String]) => DataPipe((l: Stream[String]) =>
utils.extractColumns(l, ",", columns, m))
/**
* Returns a pipeline which performs a bagging based sub-sampling
* of a stream of [[T]].
*
* @param proportion The sampling proportion between 0 and 1
* @param nBags The number of bags to generate.
* */
def baggingStream[T](proportion: Double, nBags: Int) = {
require(proportion > 0.0 && proportion <= 1.0 && nBags > 0,
"Sampling proprotion must be between 0 and 1; "+
"Number of bags must be positive")
DataPipe((data: Stream[T]) =>{
val sizeOfBag: Int = (data.length*proportion).toInt
(1 to nBags).map(_ =>
Stream.tabulate[T](sizeOfBag)(_ => data(Random.nextInt(data.length)))
).toStream
})
}
/**
* Returns a pipeline which performs a bagging based sub-sampling
* of an Apache Spark [[RDD]] of [[T]].
*
* @param proportion The sampling proportion between 0 and 1
* @param nBags The number of bags to generate.
* */
def baggingRDD[T](proportion: Double, nBags: Int) = {
require(proportion > 0.0 && proportion <= 1.0 && nBags > 0,
"Sampling proprotion must be between 0 and 1; "+
"Number of bags must be positive")
DataPipe((data: RDD[T]) => (1 to nBags).map(_ => data.sample(withReplacement = true, proportion)))
}
/**
* Takes a base pipe and creates a parallel pipe by duplicating it.
*
* @param pipe The base data pipe
* @return a [[io.github.mandar2812.dynaml.pipes.ParallelPipe]] object.
* */
def duplicate[Source, Destination](pipe: DataPipe[Source, Destination]) =
DataPipe(pipe, pipe)
/**
* Constructs a data pipe which performs discrete Haar wavelet transform
* on a (breeze) vector signal.
* */
val haarWaveletFilter = (order: Int) => HaarWaveletFilter(order)
/**
* Constructs a data pipe which performs inverse discrete Haar wavelet transform
* on a (breeze) vector signal.
* */
val invHaarWaveletFilter = (order: Int) => InverseHaarWaveletFilter(order)
val groupedHaarWaveletFilter = (orders: Array[Int]) => GroupedHaarWaveletFilter(orders)
val invGroupedHaarWaveletFilter = (orders: Array[Int]) => InvGroupedHaarWaveletFilter(orders)
def genericReplicationEncoder[I](n: Int)(implicit tag: ClassTag[I]): Encoder[I, Array[I]] =
Encoder[I, Array[I]](
(v: I) => {
Array.fill[I](n)(v)
},
(vs: Array[I]) => {
vs.head
})
/**
* Creates an [[Encoder]] which can split
* [[DenseVector]] instances into uniform splits and
* put them back together.
* */
val breezeDVSplitEncoder = (n: Int) => Encoder((v: DenseVector[Double]) => {
optimize {
Array.tabulate(v.length/n)(i => v(i*n until math.min((i+1)*n, v.length)))
}
}, (vs: Array[DenseVector[Double]]) => {
optimize {
DenseVector(vs.map(_.toArray).reduceLeft((a,b) => a++b))
}
})
/**
* Creates an [[Encoder]] which replicates a
* [[DenseVector]] instance n times.
* */
val breezeDVReplicationEncoder = (n: Int) => genericReplicationEncoder[DenseVector[Double]](n)
def trainParametricModel[
G, T, Q, R, S, M <: ParameterizedLearner[G, T, Q, R, S]
](regParameter: Double, step: Double = 0.05,
maxIt: Int = 50, mini: Double = 1.0) = DataPipe((model: M) => {
model.setLearningRate(step)
.setMaxIterations(maxIt)
.setBatchFraction(mini)
.setRegParam(regParameter)
.learn()
model
})
def modelTuning[M <: GloballyOptWithGrad](
startingState: Map[String, Double],
globalOpt: String = "GS",
grid: Int = 3, step: Double = 0.02) =
DataPipe((model: M) => {
val gs = globalOpt match {
case "GS" => new GridSearch[M](model)
.setGridSize(grid)
.setStepSize(step)
.setLogScale(false)
case "ML" => new GradBasedGlobalOptimizer[M](model)
case "CSA" => new CoupledSimulatedAnnealing(model)
.setGridSize(grid)
.setStepSize(step)
.setLogScale(false)
.setVariant(AbstractCSA.MwVC)
}
gs.optimize(startingState, Map("tolerance" -> "0.0001",
"step" -> step.toString,
"maxIterations" -> grid.toString))
})
def gpTuning[T, I:ClassTag](
startingState: Map[String, Double],
globalOpt: String = "GS",
grid: Int = 3, step: Double = 0.02,
maxIt: Int = 20, policy: String = "GS",
prior: Map[String, ContinuousRVWithDistr[Double, ContinuousDistr[Double]]] = Map()) =
DataPipe((model: AbstractGPRegressionModel[T, I]) => {
val gs = globalOpt match {
case "GS" => new GridSearch(model)
.setGridSize(grid)
.setStepSize(step)
.setLogScale(false)
.setPrior(prior)
.setNumSamples(prior.size*grid)
case "ML" => new GradBasedGlobalOptimizer(model)
case "CSA" => new CoupledSimulatedAnnealing(model)
.setGridSize(grid)
.setStepSize(step)
.setLogScale(false)
.setMaxIterations(maxIt)
.setVariant(AbstractCSA.MwVC)
.setPrior(prior)
.setNumSamples(prior.size*grid)
case "GPC" => new ProbGPCommMachine(model)
.setPolicy(policy)
.setGridSize(grid)
.setStepSize(step)
.setMaxIterations(maxIt)
.setPrior(prior)
.setNumSamples(prior.size*grid)
}
gs.optimize(
startingState,
Map(
"tolerance" -> "0.0001",
"step" -> step.toString,
"maxIterations" -> grid.toString,
"persist" -> "true"))
})
def sgpTuning[T, I:ClassTag](
startingState: Map[String, Double], globalOpt: String = "GS",
grid: Int = 3, step: Double = 0.02, maxIt: Int = 20,
prior: Map[String, ContinuousRVWithDistr[Double, ContinuousDistr[Double]]] = Map()) =
DataPipe((model: ESGPModel[T, I]) => {
val gs = globalOpt match {
case "GS" => new GridSearch(model)
.setGridSize(grid)
.setStepSize(step)
.setLogScale(false)
.setPrior(prior)
.setNumSamples(prior.size*grid)
case "CSA" => new CoupledSimulatedAnnealing(model)
.setGridSize(grid)
.setStepSize(step)
.setLogScale(false)
.setMaxIterations(maxIt)
.setVariant(AbstractCSA.MwVC)
.setPrior(prior)
.setNumSamples(prior.size*grid)
}
gs.optimize(
startingState,
Map(
"tolerance" -> "0.0001",
"step" -> step.toString,
"maxIterations" -> grid.toString,
"persist" -> "true"))
})
def GPRegressionTest[T <: AbstractGPRegressionModel[
Seq[(DenseVector[Double], Double)],
DenseVector[Double]]](model:T) =
DataPipe(
(trainTest: (Stream[(DenseVector[Double], Double)],
(DenseVector[Double], DenseVector[Double]))) => {
val res = model.test(trainTest._1)
val scoresAndLabelsPipe =
DataPipe(
(res: Seq[(DenseVector[Double], Double, Double, Double, Double)]) =>
res.map(i => (i._3, i._2)).toList) > DataPipe((list: List[(Double, Double)]) =>
list.map{l => (l._1*trainTest._2._2(-1) + trainTest._2._1(-1),
l._2*trainTest._2._2(-1) + trainTest._2._1(-1))})
val scoresAndLabels = scoresAndLabelsPipe.run(res)
val metrics = new RegressionMetrics(scoresAndLabels,
scoresAndLabels.length)
metrics.print()
metrics.generatePlots()
})
}
|
amitkumarj441/DynaML | dynaml-pipes/src/test/scala-2.11/io/github/mandar2812/dynaml/pipes/DataPipesSpec.scala | package io.github.mandar2812.dynaml.pipes
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by mandar on 13/7/16.
*/
class DataPipesSpec extends FlatSpec with Matchers {
"A Data Pipe" should "have type consistency" in {
val pipe = DataPipe((x: Int) => x+1)
val num = pipe(1)
assert(num == 2)
assert(num match {case _: Int => true})
}
"DataPipes" should "be consistent with respect to composition" in {
val pipe1 = DataPipe((x: Int) => x+1)
val pipe2 = DataPipe((y: Int) => y/2.0)
val p = pipe1 > pipe2
val num = p(0)
assert(num == 0.5)
assert(num match {case _: Double => true})
}
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/utils/PCAScaler.scala | package io.github.mandar2812.dynaml.utils
import breeze.linalg.eig.Eig
import breeze.linalg.{DenseMatrix, DenseVector, eig}
import io.github.mandar2812.dynaml.pipes.{ReversibleScaler, Scaler}
/**
* Transforms data by projecting
* on the principal components (eigen-vectors)
* of the sample covariance matrix.
*
* @param center The empirical mean of the data features
* @param covmat The empirical covariance matrix of the data features
* @author mandar2812 date 30/05/2017.
* */
case class PCAScaler(
center: DenseVector[Double],
covmat: DenseMatrix[Double]) extends
ReversibleScaler[DenseVector[Double]] {
lazy val Eig(eigenvalues, _, eigenvectors) = eig(covmat)
override val i = Scaler((data: DenseVector[Double]) => (eigenvectors*data)+center)
override def run(data: DenseVector[Double]) = eigenvectors.t*(data-center)
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/kernels/KernelOps.scala | package io.github.mandar2812.dynaml.kernels
import breeze.generic.UFunc
/**
* @author mandar2812 date: 26/10/2016.
*/
@deprecated
trait KernelOps[+This] extends Any {
def repr: This
/** Alias for :+(b) for all b. */
final def +[TT >: This, B, That](b: B)(implicit op: KernelOpAdd.Impl2[TT, B, That]) = op(repr, b)
final def *[TT >: This, B, That](b: B)(implicit op: KernelOpMult.Impl2[TT, B, That]) = op(repr, b)
final def :*[TT >: This, B, That](b: B)(implicit op: KernelOuterMult.Impl2[TT, B, That]) = op(repr, b)
final def :+[TT >: This, B, That](b: B)(implicit op: KernelOuterAdd.Impl2[TT, B, That]) = op(repr, b)
}
object KernelOps extends UFunc {
class Ops[Index] extends Serializable {
implicit object addLocalScKernels extends KernelOpAdd.Impl2[
LocalScalarKernel[Index],
LocalScalarKernel[Index],
CompositeCovariance[Index]] {
override def apply(
firstKern: LocalScalarKernel[Index],
otherKernel: LocalScalarKernel[Index]): CompositeCovariance[Index] =
new CompositeCovariance[Index] {
val (fID, sID) = (firstKern.toString.split("\\.").last, otherKernel.toString.split("\\.").last)
override val hyper_parameters =
firstKern.hyper_parameters.map(h => fID+"/"+h) ++
otherKernel.hyper_parameters.map(h => sID+"/"+h)
override def evaluateAt(config: Map[String, Double])(x: Index, y: Index) = firstKern.evaluateAt(config)(x,y) + otherKernel.evaluateAt(config)(x,y)
state = firstKern.state.map(h => (fID+"/"+h._1, h._2)) ++ otherKernel.state.map(h => (sID+"/"+h._1, h._2))
blocked_hyper_parameters =
firstKern.blocked_hyper_parameters.map(h => fID+"/"+h) ++
otherKernel.blocked_hyper_parameters.map(h => sID+"/"+h)
override def setHyperParameters(h: Map[String, Double]): this.type = {
firstKern.setHyperParameters(h.filter(_._1.contains(fID))
.map(kv => (kv._1.split("/").tail.mkString("/"), kv._2)))
otherKernel.setHyperParameters(h.filter(_._1.contains(sID))
.map(kv => (kv._1.split("/").tail.mkString("/"), kv._2)))
this
}
override def gradientAt(config: Map[String, Double])(x: Index, y: Index): Map[String, Double] =
firstKern.gradientAt(config)(x, y) ++ otherKernel.gradientAt(config)(x,y)
override def buildKernelMatrix[S <: Seq[Index]](mappedData: S, length: Int) =
SVMKernel.buildSVMKernelMatrix[S, Index](mappedData, length, this.evaluate)
override def buildCrossKernelMatrix[S <: Seq[Index]](dataset1: S, dataset2: S) =
SVMKernel.crossKernelMatrix(dataset1, dataset2, this.evaluate)
}
}
implicit object multLocalScKernels extends KernelOpMult.Impl2[
LocalScalarKernel[Index],
LocalScalarKernel[Index],
CompositeCovariance[Index]] {
override def apply(firstKern: LocalScalarKernel[Index],
otherKernel: LocalScalarKernel[Index]): CompositeCovariance[Index] =
new CompositeCovariance[Index] {
val (fID, sID) = (firstKern.toString.split("\\.").last, otherKernel.toString.split("\\.").last)
override val hyper_parameters =
firstKern.hyper_parameters.map(h => fID+"/"+h) ++
otherKernel.hyper_parameters.map(h => sID+"/"+h)
override def evaluateAt(config: Map[String, Double])(x: Index, y: Index) = firstKern.evaluateAt(config)(x,y) * otherKernel.evaluateAt(config)(x,y)
state = firstKern.state.map(h => (fID+"/"+h._1, h._2)) ++ otherKernel.state.map(h => (sID+"/"+h._1, h._2))
blocked_hyper_parameters =
firstKern.blocked_hyper_parameters.map(h => fID+"/"+h) ++
otherKernel.blocked_hyper_parameters.map(h => sID+"/"+h)
override def setHyperParameters(h: Map[String, Double]): this.type = {
firstKern.setHyperParameters(h.filter(_._1.contains(fID))
.map(kv => (kv._1.split("/").tail.mkString("/"), kv._2)))
otherKernel.setHyperParameters(h.filter(_._1.contains(sID))
.map(kv => (kv._1.split("/").tail.mkString("/"), kv._2)))
this
}
override def gradientAt(config: Map[String, Double])(x: Index, y: Index): Map[String, Double] =
firstKern.gradientAt(config)(x, y).map((couple) => (couple._1, couple._2*otherKernel.evaluateAt(config)(x,y))) ++
otherKernel.gradientAt(config)(x,y).map((couple) => (couple._1, couple._2*firstKern.evaluateAt(config)(x,y)))
override def buildKernelMatrix[S <: Seq[Index]](mappedData: S, length: Int) =
SVMKernel.buildSVMKernelMatrix[S, Index](mappedData, length, this.evaluate)
override def buildCrossKernelMatrix[S <: Seq[Index]](dataset1: S, dataset2: S) =
SVMKernel.crossKernelMatrix(dataset1, dataset2, this.evaluate)
}
}
}
class PairOps[Index, Index1] extends Serializable {
implicit object tensorMultLocalScKernels
extends KernelOuterMult.Impl2[
LocalScalarKernel[Index],
LocalScalarKernel[Index1],
CompositeCovariance[(Index, Index1)]] {
override def apply(firstkernel: LocalScalarKernel[Index],
otherKernel: LocalScalarKernel[Index1]): CompositeCovariance[(Index, Index1)] =
new CompositeCovariance[(Index, Index1)] {
override val hyper_parameters: List[String] = firstkernel.hyper_parameters ++ otherKernel.hyper_parameters
state = firstkernel.state ++ otherKernel.state
blocked_hyper_parameters = otherKernel.blocked_hyper_parameters ++ firstkernel.blocked_hyper_parameters
override def setHyperParameters(h: Map[String, Double]): this.type = {
firstkernel.setHyperParameters(h)
otherKernel.setHyperParameters(h)
super.setHyperParameters(h)
}
override def gradientAt(config: Map[String, Double])(x: (Index, Index1), y: (Index, Index1)): Map[String, Double] =
firstkernel.gradientAt(config)(x._1, y._1).mapValues(v => v*otherKernel.evaluateAt(config)(x._2, y._2)) ++
otherKernel.gradientAt(config)(x._2, y._2).mapValues(v => v*firstkernel.evaluateAt(config)(x._1, y._1))
override def buildKernelMatrix[S <: Seq[(Index, Index1)]](mappedData: S, length: Int) =
SVMKernel.buildSVMKernelMatrix(mappedData, length, this.evaluate)
override def buildCrossKernelMatrix[S <: Seq[(Index, Index1)]](dataset1: S, dataset2: S) =
SVMKernel.crossKernelMatrix(dataset1, dataset2, this.evaluate)
override def evaluateAt(config: Map[String, Double])(x: (Index, Index1), y: (Index, Index1)): Double =
firstkernel.evaluateAt(config)(x._1, y._1)*otherKernel.evaluateAt(config)(x._2, y._2)
}
}
implicit object tensorAddLocalScKernels
extends KernelOuterMult.Impl2[
LocalScalarKernel[Index],
LocalScalarKernel[Index1],
CompositeCovariance[(Index, Index1)]] {
override def apply(firstkernel: LocalScalarKernel[Index],
otherKernel: LocalScalarKernel[Index1]): CompositeCovariance[(Index, Index1)] =
new CompositeCovariance[(Index, Index1)] {
override val hyper_parameters: List[String] = firstkernel.hyper_parameters ++ otherKernel.hyper_parameters
state = firstkernel.state ++ otherKernel.state
blocked_hyper_parameters = otherKernel.blocked_hyper_parameters ++ firstkernel.blocked_hyper_parameters
override def setHyperParameters(h: Map[String, Double]): this.type = {
firstkernel.setHyperParameters(h)
otherKernel.setHyperParameters(h)
super.setHyperParameters(h)
}
override def gradientAt(config: Map[String, Double])(x: (Index, Index1), y: (Index, Index1)): Map[String, Double] =
firstkernel.gradientAt(config)(x._1, y._1) ++ otherKernel.gradientAt(config)(x._2,y._2)
override def buildKernelMatrix[S <: Seq[(Index, Index1)]](mappedData: S, length: Int) =
SVMKernel.buildSVMKernelMatrix(mappedData, length, this.evaluate)
override def buildCrossKernelMatrix[S <: Seq[(Index, Index1)]](dataset1: S, dataset2: S) =
SVMKernel.crossKernelMatrix(dataset1, dataset2, this.evaluate)
override def evaluateAt(config: Map[String, Double])(x: (Index, Index1), y: (Index, Index1)): Double =
firstkernel.evaluateAt(config)(x._1, y._1)+otherKernel.evaluateAt(config)(x._2, y._2)
}
}
implicit object tensorAddPartLocalScKernels
extends KernelOuterMult.Impl2[
LocalSVMKernel[Index],
LocalSVMKernel[Index],
CompositeCovariance[(Index, Index)]] {
override def apply(firstkernel: LocalSVMKernel[Index],
otherKernel: LocalSVMKernel[Index]): CompositeCovariance[(Index, Index)] =
new CompositeCovariance[(Index, Index)] {
override val hyper_parameters: List[String] = firstkernel.hyper_parameters ++ otherKernel.hyper_parameters
state = firstkernel.state ++ otherKernel.state
blocked_hyper_parameters = otherKernel.blocked_hyper_parameters ++ firstkernel.blocked_hyper_parameters
override def setHyperParameters(h: Map[String, Double]): this.type = {
firstkernel.setHyperParameters(h)
otherKernel.setHyperParameters(h)
super.setHyperParameters(h)
}
override def gradientAt(config: Map[String, Double])(x: (Index, Index), y: (Index, Index)): Map[String, Double] =
firstkernel.gradientAt(config)(x._1, y._1) ++ otherKernel.gradientAt(config)(x._2,y._2)
override def buildKernelMatrix[S <: Seq[(Index, Index)]](mappedData: S, length: Int) =
SVMKernel.buildSVMKernelMatrix(mappedData, length, this.evaluate)
override def buildCrossKernelMatrix[S <: Seq[(Index, Index)]](dataset1: S, dataset2: S) =
SVMKernel.crossKernelMatrix(dataset1, dataset2, this.evaluate)
override def evaluateAt(config: Map[String, Double])(x: (Index, Index), y: (Index, Index)): Double =
firstkernel.evaluateAt(config)(x._1, y._1)+otherKernel.evaluateAt(config)(x._2, y._2)
}
}
}
} |
amitkumarj441/DynaML | dynaml-examples/src/main/scala-2.11/io/github/mandar2812/dynaml/examples/TestSUSY.scala | package io.github.mandar2812.dynaml.examples
import java.io.File
import breeze.linalg.{DenseMatrix, DenseVector}
import com.github.tototoshi.csv.CSVWriter
import io.github.mandar2812.dynaml.kernels.{SVMKernel, RBFKernel}
import io.github.mandar2812.dynaml.models.KernelizedModel
import io.github.mandar2812.dynaml.models.svm.{LSSVMSparkModel, KernelSparkModel}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* @author mandar2812 on 1/7/15.
*/
object TestSUSY {
def main(args: Array[String]) = {
val prot = args(0).toInt
val kern = args(1)
val go = args(2)
val grid = args(3).toInt
val step = args(4).toDouble
val dataRoot = args(5)
val ex = args(6).toInt
val cores = args(7).toInt
val ans = TestSUSY(cores, prot, kern, go,
grid, step, false, 1.0, dataRoot,
ex)
}
def apply(nCores: Int = 4, prototypes: Int = 1, kernel: String,
globalOptMethod: String = "gs", grid: Int = 7,
step: Double = 0.45, logscale: Boolean = false,
frac: Double, dataRoot: String, executors: Int = 1,
local: Boolean = false, paraFactor: Int = 2): DenseVector[Double] = {
val trainFile = dataRoot+"susy.csv"
val testFile = dataRoot+"susytest.csv"
val config = Map(
"file" -> trainFile,
"delim" -> ",",
"head" -> "false",
"task" -> "classification",
"parallelism" -> nCores.toString,
"executors" -> executors.toString,
"factor" -> paraFactor.toString
)
val configtest = Map("file" -> testFile,
"delim" -> ",",
"head" -> "false")
val conf = new SparkConf().setAppName("SUSY")
if(local) {
conf.setMaster("local["+nCores.toString+"]")
}
conf.registerKryoClasses(Array(classOf[LSSVMSparkModel], classOf[KernelSparkModel],
classOf[KernelizedModel[RDD[(Long, LabeledPoint)], RDD[LabeledPoint],
DenseVector[Double], DenseVector[Double], Double, Int, Int]],
classOf[SVMKernel[DenseMatrix[Double]]], classOf[RBFKernel],
classOf[DenseVector[Double]],
classOf[DenseMatrix[Double]]))
val sc = new SparkContext(conf)
val model = LSSVMSparkModel(config, sc)
val nProt = if (kernel == "Linear") {
model.npoints.toInt
} else {
if(prototypes > 0)
prototypes
else
math.sqrt(model.npoints.toDouble).toInt
}
model.setBatchFraction(frac)
val (optModel, optConfig) = KernelizedModel.getOptimizedModel[RDD[(Long, LabeledPoint)],
RDD[LabeledPoint], model.type](model, globalOptMethod,
kernel, nProt, grid, step, logscale)
optModel.setMaxIterations(2).learn()
val met = optModel.evaluate(configtest)
met.print()
println("Optimal Configuration: "+optConfig)
val scale = if(logscale) "log" else "linear"
val perf = met.kpi()
val row = Seq(kernel, prototypes.toString, globalOptMethod,
grid.toString, step.toString, scale,
perf(0), perf(1), perf(2), optConfig.toString)
val writer = CSVWriter.open(new File(dataRoot+"resultsSUSY.csv"), append = true)
writer.writeRow(row)
writer.close()
optModel.unpersist
perf
}
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/modelpipe/GPRegressionPipe.scala | <filename>dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/modelpipe/GPRegressionPipe.scala
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.modelpipe
import breeze.linalg._
import io.github.mandar2812.dynaml.DynaMLPipe._
import io.github.mandar2812.dynaml.kernels.LocalScalarKernel
import io.github.mandar2812.dynaml.probability.MultGaussianRV
import io.github.mandar2812.dynaml.models.gp.{AbstractGPRegressionModel, GPBasisFuncRegressionModel}
import io.github.mandar2812.dynaml.pipes.{DataPipe, DataPipe2}
import scala.reflect.ClassTag
/**
* <h3>GP Pipes</h3>
*
* A data pipe which can spawn a Gaussian Process regression model
* from a provided training data set.
*
* @tparam IndexSet Type of features of each data pattern
* @tparam Source Input data type
* @param pre A function which converts the input data
* into a scala [[Seq]] of [[IndexSet]]
* and [[Double]] pairs.
* @param cov The covariance function of the resultant GP,
* as an instance of [[LocalScalarKernel]] defined on
* the [[IndexSet]] type.
* @param n The measurement noise of the output signal/data, also
* as in instance of [[LocalScalarKernel]]
* @param order Size of the auto-regressive time lag of the output signal
* that is used to create the training data. Ignore if not working
* with GP-NAR or GP-NARX models.
* @param ex Size of the auto-regressive time lag of the exogenous inputs
* that is used to create the training data. Ignore if not working
* with GP-NARX models.
* @param meanFunc The trend function of the resulting GP model, as an instance
* of [[DataPipe]].
* @author mandar2812 on 15/6/16.
* */
class GPRegressionPipe[Source, IndexSet: ClassTag](
pre: (Source) => Seq[(IndexSet, Double)],
cov: LocalScalarKernel[IndexSet],
n: LocalScalarKernel[IndexSet],
order: Int = 0, ex: Int = 0,
meanFunc: DataPipe[IndexSet, Double] = DataPipe((_: IndexSet) => 0.0))
extends ModelPipe[
Source, Seq[(IndexSet, Double)], IndexSet, Double,
AbstractGPRegressionModel[Seq[(IndexSet, Double)], IndexSet]] {
override val preProcess: (Source) => Seq[(IndexSet, Double)] = pre
implicit val transform = identityPipe[Seq[(IndexSet, Double)]]
override def run(data: Source): AbstractGPRegressionModel[Seq[(IndexSet, Double)], IndexSet] =
AbstractGPRegressionModel(cov, n, meanFunc)(preProcess(data), 0)
}
/**
* <h3>GP Basis Function Pipe</h3>
*
* A data pipe which can spawn a Gaussian Process Basis Function
* regression model from a provided training data set.
*
* @tparam IndexSet Type of features of each data pattern
* @tparam Source Input data type
* @param pre A function which converts the input data
* into a scala [[Seq]] of [[IndexSet]]
* and [[Double]] pairs.
* @param cov The covariance function of the resultant GP,
* as an instance of [[LocalScalarKernel]] defined on
* the [[IndexSet]] type.
* @param n The measurement noise of the output signal/data, also
* as in instance of [[LocalScalarKernel]]
* @param order Size of the auto-regressive time lag of the output signal
* that is used to create the training data. Ignore if not working
* with GP-NAR or GP-NARX models.
* @param ex Size of the auto-regressive time lag of the exogenous inputs
* that is used to create the training data. Ignore if not working
* with GP-NARX models.
*
* @param basisFunc A basis function representation for the input features,
* represented as a [[DataPipe]].
*
* @param basis_param_prior A Gaussian prior on the basis function trend coefficients.
*
* @author mandar2812 date 2017/08/09
* */
class GPBasisFuncRegressionPipe[Source, IndexSet: ClassTag](
pre: (Source) => Seq[(IndexSet, Double)],
cov: LocalScalarKernel[IndexSet],
n: LocalScalarKernel[IndexSet],
basisFunc: DataPipe[IndexSet, DenseVector[Double]],
basis_param_prior: MultGaussianRV)
extends ModelPipe[
Source, Seq[(IndexSet, Double)], IndexSet, Double,
GPBasisFuncRegressionModel[Seq[(IndexSet, Double)], IndexSet]] {
override val preProcess: (Source) => Seq[(IndexSet, Double)] = pre
implicit val transform = identityPipe[Seq[(IndexSet, Double)]]
override def run(data: Source): GPBasisFuncRegressionModel[Seq[(IndexSet, Double)], IndexSet] =
AbstractGPRegressionModel(cov, n, basisFunc, basis_param_prior)(preProcess(data), 0)
}
object GPRegressionPipe {
/**
* Convenience method for creating [[GPRegressionPipe]] instances
* */
def apply[Source, IndexSet: ClassTag](
pre: (Source) => Seq[(IndexSet, Double)],
cov: LocalScalarKernel[IndexSet], n: LocalScalarKernel[IndexSet],
order: Int = 0, ex: Int = 0,
meanFunc: DataPipe[IndexSet, Double] = DataPipe((_: IndexSet) => 0.0)) =
new GPRegressionPipe[Source, IndexSet](pre, cov, n, order, ex, meanFunc)
}
/**
* <h3>GP Pipes: Alternate</h3>
* A [[DataPipe2]] which takes a data set,
* a trend and outputs a GP Regression model.
*
* @tparam IndexSet Type of features of each data pattern
* @param cov The covariance function of the resultant GP,
* as an instance of [[LocalScalarKernel]] defined on
* the [[IndexSet]] type.
* @param n The measurement noise of the output signal/data, also
* as in instance of [[LocalScalarKernel]]
*
* */
class GPRegressionPipe2[IndexSet: ClassTag](
cov: LocalScalarKernel[IndexSet],
n: LocalScalarKernel[IndexSet]) extends DataPipe2[
Seq[(IndexSet, Double)], DataPipe[IndexSet, Double],
AbstractGPRegressionModel[Seq[(IndexSet, Double)], IndexSet]] {
implicit val transform = identityPipe[Seq[(IndexSet, Double)]]
override def run(data: Seq[(IndexSet, Double)], trend: DataPipe[IndexSet, Double]) =
AbstractGPRegressionModel(cov, n, trend)(data, data.length)
}
object GPRegressionPipe2 {
/**
* Convenience method for creating [[GPRegressionPipe2]] instances
* */
def apply[IndexSet: ClassTag](
cov: LocalScalarKernel[IndexSet],
n: LocalScalarKernel[IndexSet]): GPRegressionPipe2[IndexSet] =
new GPRegressionPipe2(cov, n)
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/evaluation/MetricsTF.scala | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.evaluation
import org.platanios.tensorflow.api.{---, ::, Tensor}
/**
* Top level class for metrics computed on Tensorflow objects.
*
* @param preds Predictions
*
* @param targets The actual output values.
* */
abstract class MetricsTF(val names: Seq[String], val preds: Tensor, val targets: Tensor) {
protected val scoresAndLabels: (Tensor, Tensor) = (preds, targets)
protected var name = "Target"
lazy val results: Tensor = run()
def _target_quantity: String = name
def target_quantity_(n: String): Unit = {
name = n
}
def print(): Unit = {
println("\nModel Performance: "+name)
println("============================")
println()
names.zipWithIndex.foreach(n => {
val value: Tensor = results(n._2, ---)
val metric = n._1
println(metric+": "+value.summarize(maxEntries = value.size.toInt, flattened = true))
println()
})
}
def generatePlots(): Unit = {}
/**
* Has the actual computational logic of producing
* the metrics which are to be calculated.
*
* Implement this method in sub-classes.
* */
protected def run(): Tensor
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/modelpipe/ModelPipe.scala | package io.github.mandar2812.dynaml.modelpipe
import io.github.mandar2812.dynaml.models.Model
import io.github.mandar2812.dynaml.DynaMLPipe._
import io.github.mandar2812.dynaml.pipes.{DataPipe, ReversibleScaler}
/**
* Top level trait for Pipes returning ML models.
*/
trait ModelPipe[-Source, T, Q, R, +M <: Model[T, Q, R]]
extends DataPipe[Source, M] {
val preProcess: (Source) => T
override def run(data: Source): M
}
/**
* A pipeline which encapsulates a DynaML [[Model.predict()]] functionality.
*
* @tparam T The training data type accepted by the encapsulated model
* @tparam P The type of unprocessed input to the pipe
* @tparam Q The type of input features the model accepts
* @tparam R The type of output returned by [[Model.predict()]]
* @tparam S The type of the processed output.
*
* @param m The underlying model
* @param pre Pre-processing [[DataPipe]]
* @param po Post-processing [[DataPipe]]
*
* */
class ModelPredictionPipe[T, -P, Q, R, +S, M <: Model[T, Q, R]](
pre: DataPipe[P, Q], m: M, po: DataPipe[R, S])
extends DataPipe[P, S] {
val preprocess: DataPipe[P, Q] = pre
val baseModel: M = m
val postprocess: DataPipe[R, S] = po
protected val netFlow: DataPipe[P, S] =
preprocess > DataPipe((x: Q) => baseModel.predict(x)) > postprocess
override def run(data: P) = netFlow(data)
}
object ModelPredictionPipe {
/**
* Create a [[ModelPredictionPipe]] instance given
* a pre-processing flow, a DynaML [[Model]] and a post-processing flow
* respectively.
* */
def apply[T, P, Q, R, S, M <: Model[T, Q, R]](
pre: DataPipe[P, Q], m: M, po: DataPipe[R, S]) =
new ModelPredictionPipe[T, P, Q, R, S, M](pre, m, po)
/**
* Create a [[ModelPredictionPipe]] instance
* (having no pre or post processing steps)
* given a DynaML [[Model]]
*
* */
def apply[T, Q, R, M <: Model[T, Q, R]](m: M) =
new ModelPredictionPipe[T, Q, Q, R, R, M](identityPipe[Q], m, identityPipe[R])
/**
* Create a [[ModelPredictionPipe]] instance
* given scaling relationships for features and outputs,
* along with a DynaML [[Model]]
*
* */
def apply[T, Q, R, M <: Model[T, Q, R]](
featuresSc: ReversibleScaler[Q], outputSc: ReversibleScaler[R], m: M) =
new ModelPredictionPipe[T, Q, Q, R, R, M](featuresSc, m, outputSc.i)
}
|
amitkumarj441/DynaML | project/Dependencies.scala | import sbt._
object Dependencies {
val scala = "2.11.8"
val platform: String = {
// Determine platform name using code similar to javacpp
// com.googlecode.javacpp.Loader.java line 60-84
val jvmName = System.getProperty("java.vm.name").toLowerCase
var osName = System.getProperty("os.name").toLowerCase
var osArch = System.getProperty("os.arch").toLowerCase
if (jvmName.startsWith("dalvik") && osName.startsWith("linux")) {
osName = "android"
} else if (jvmName.startsWith("robovm") && osName.startsWith("darwin")) {
osName = "ios"
osArch = "arm"
} else if (osName.startsWith("mac os x")) {
osName = "macosx"
} else {
val spaceIndex = osName.indexOf(' ')
if (spaceIndex > 0) {
osName = osName.substring(0, spaceIndex)
}
}
if (osArch.equals("i386") || osArch.equals("i486") || osArch.equals("i586") || osArch.equals("i686")) {
osArch = "x86"
} else if (osArch.equals("amd64") || osArch.equals("x86-64") || osArch.equals("x64")) {
osArch = "x86_64"
} else if (osArch.startsWith("arm")) {
osArch = "arm"
}
val platformName = osName + "-" + osArch
println("platform: " + platformName)
platformName
}
val tfscala_version = "0.2.4"
//Set to true if, building with Nvidia GPU support.
val gpuFlag: Boolean = false
//Set to false if using self compiled tensorflow library
val packagedTFFlag: Boolean = true
val tensorflow_classifier: String = {
val platform_splits = platform.split("-")
val (os, arch) = (platform_splits.head, platform_splits.last)
val tf_c =
if (os.contains("macosx")) "darwin-cpu-"+arch
else if(os.contains("linux")) {
if(gpuFlag) "linux-gpu-"+arch else "linux-cpu-"+arch
} else ""
println("Tensorflow-Scala Classifier: "+tf_c)
tf_c
}
val baseDependencies = Seq(
"org.scala-lang" % "scala-compiler" % scala % "compile",
"org.scala-lang" % "scala-library" % scala % "compile",
"org.scala-lang" % "scala-reflect" % scala % "compile",
"com.typesafe" % "config" % "1.2.1" % "compile",
"junit" % "junit" % "4.11",
"com.github.tototoshi" % "scala-csv_2.11" % "1.1.2" % "compile",
"org.scala-lang" % "jline" % "2.11.0-M3" % "compile",
"org.scalaforge" % "scalax" % "0.1" % "compile",
"org.scala-lang" % "scala-pickling_2.11" % "0.9.1" % "compile",
"org.scalaz" %% "scalaz-core" % "7.2.0",
"org.scalactic" % "scalactic_2.11" % "2.2.6",
"org.scala-graph" %% "graph-core" % "1.11.3",
"org.scalatest" % "scalatest_2.11" % "3.0.1" % "test",
"com.typesafe.akka" % "akka-actor_2.11" % "2.5.3",
"com.typesafe.akka" % "akka-stream_2.11" % "2.5.3",
"com.typesafe.akka" % "akka-testkit_2.11" % "2.5.3",
"com.typesafe.akka" % "akka-http_2.11" % "10.0.9",
"com.typesafe.akka" % "akka-http-spray-json_2.11" % "10.0.9",
"com.typesafe.akka" % "akka-http-testkit_2.11" % "10.0.9",
"com.github.scopt" % "scopt_2.11" % "3.5.0",
"com.nativelibs4java" % "scalaxy-streams_2.11" % "0.3.4" % "provided",
"org.scalameta" % "scalameta_2.11" % "2.0.1",
"javax.ws.rs" % "javax.ws.rs-api" % "2.0-m10"
)
val apacheSparkDependency = Seq(
"javax.servlet" % "javax.servlet-api" % "3.1.0" % "test",
"org.apache.spark" % "spark-core_2.11" % "2.2.0" % "compile",
"org.apache.spark" % "spark-mllib_2.11" % "2.2.0" % "compile")
.map(_.exclude("org.slf4j", "slf4j-log4j12"))
.map(_.exclude("org.scalanlp", "breeze_2.11"))
.map(_.exclude("javax.ws.rs" , "javax.ws.rs-api"))
val loggingDependency = Seq("log4j" % "log4j" % "1.2.17" % "compile")
val linearAlgebraDependencies = Seq(
"org.typelevel" % "spire_2.11" % "0.14.1",
"org.scalanlp" % "breeze_2.11" % "0.13.2" % "compile",
"org.scalanlp" % "breeze-natives_2.11" % "0.13.2" % "compile",
"org.la4j" % "la4j" % "0.6.0" % "compile",
"com.github.vagmcs" % "optimus_2.11" % "2.0.0")
.map(_.exclude("org.spire-math", "spire_2.11"))
val chartsDependencies = Seq(
"com.github.wookietreiber" % "scala-chart_2.11" % "0.4.2" % "compile",
"org.jzy3d" % "jzy3d-api" % "1.0.2" % "compile",
"com.quantifind" % "wisp_2.11" % "0.0.4" % "compile",
"co.theasi" % "plotly_2.11" % "0.1",
("org.vegas-viz" % "vegas_2.11" % "0.3.11").exclude("io.circe", "circe-parser")
)
val replDependency = Seq(
"com.lihaoyi" % "ammonite-repl_2.11.8" % "1.1.0",
"com.lihaoyi" % "ammonite-sshd_2.11.8" % "1.1.0"
)
val commons_io = Seq("commons-io" % "commons-io" % "2.6")
val openMLDependency = Seq("org.openml" % "apiconnector" % "1.0.11")
val tinkerpopDependency = Seq(
"com.tinkerpop.gremlin" % "gremlin-java" % "2.6.0" % "compile",
"com.tinkerpop" % "frames" % "2.5.0" % "compile"
)
val rejinDependency = Seq(
"org.renjin" % "renjin-script-engine" % "0.9.2643"
)
val rPackages = Seq(
"org.renjin.cran" % "plyr" % "1.8.4-b82",
"org.renjin.cran" % "abc" % "2.1-b294",
"org.renjin.cran" % "ggplot2" % "2.2.1-b112"
)
val notebookInterfaceDependency = Seq(
"org.apache.zeppelin" % "zeppelin-interpreter" % "0.7.3" % "provided",
"org.apache.zeppelin" % "zeppelin-server" % "0.7.3",
"org.apache.zeppelin" % "zeppelin-web" % "0.7.3")
.map(_.exclude("org.slf4j", "slf4j-log4j12"))
.map(_.exclude("javax.ws.rs" , "javax.ws.rs-api"))
val imageDependencies = Seq(
"com.sksamuel.scrimage" % "scrimage-core_2.11" % "2.1.8",
"com.sksamuel.scrimage" % "scrimage-io-extra_2.11" % "2.1.8",
"com.sksamuel.scrimage" % "scrimage-filters_2.11" % "2.1.8"
)
val dataFormatDependencies = Seq(
"info.folone" % "poi-scala_2.11" % "0.18",
"com.diffplug.matsim" % "matfilerw" % "3.0.0"
)
val tensorflowDependency = Seq(
"org.platanios" % "tensorflow_2.11" % tfscala_version classifier tensorflow_classifier,
"org.platanios" % "tensorflow-data_2.11" % tfscala_version
).map(_.exclude("org.typelevel", "spire_2.11"))
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/svm/LSSVMSparkModel.scala | package io.github.mandar2812.dynaml.models.svm
import breeze.linalg.{DenseMatrix, DenseVector}
import breeze.numerics.sqrt
import com.github.tototoshi.csv.CSVReader
import com.tinkerpop.frames.FramedGraphFactory
import io.github.mandar2812.dynaml.utils.MinMaxAccumulator
import org.apache.spark.{Accumulator, SparkContext}
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import io.github.mandar2812.dynaml.evaluation.{Metrics, MetricsSpark}
import io.github.mandar2812.dynaml.optimization._
import org.apache.log4j.Logger
import org.apache.spark.mllib.linalg.Vector
import scala.util.Random
/**
* Implementation of the Least Squares SVM
* using Apache Spark RDDs
*/
class LSSVMSparkModel(data: RDD[LabeledPoint], task: String)
extends KernelSparkModel(data, task) with Serializable {
override protected val optimizer = LSSVMSparkModel.getOptimizer(task)
override protected var params: DenseVector[Double] = DenseVector.ones(featuredims+1)
private var featureMatricesCache: (DenseMatrix[Double], DenseVector[Double]) = (null, null)
def dimensions = featuredims
/**
* Predict the value of the
* target variable given a
* point.
*
**/
override def predict(point: DenseVector[Double]): Double =
LSSVMSparkModel.predictBDV(params)(task)(point)
/**
* Learn the parameters
* of the model which
* are in a node of the
* graph.
*
**/
override def learn(): Unit = {
val featureMapb = g.context.broadcast(featureMap)
val meanb = g.context.broadcast(DenseVector(colStats.mean.toArray))
val varianceb = g.context.broadcast(DenseVector(colStats.variance.toArray))
val trainingData = this.g.map(point => {
val vec = DenseVector(point._2.features.toArray)
val ans = vec - meanb.value
ans :/= sqrt(varianceb.value)
new LabeledPoint(
point._2.label,
Vectors.dense(DenseVector.vertcat(
featureMapb.value(ans),
DenseVector(1.0))
.toArray)
)
})
params = this.optimizer.optimize(nPoints, trainingData, params)
}
override def clearParameters: Unit = {
params = DenseVector.ones[Double](featuredims+1)
}
override def setRegParam(l: Double): this.type = {
this.optimizer.setRegParam(l)
this
}
override def getRegParam = this.optimizer.getRegParam
override def evaluate(config: Map[String, String]) = {
val sc = g.context
val (file, delim, head, _) = LSSVMModel.readConfig(config)
val csv = sc.textFile(file).map(line => line split delim)
.map(_.map(_.toDouble)).map(vector => LabeledPoint(vector(vector.length-1),
Vectors.dense(vector.slice(0, vector.length-1))))
val test_data = head match {
case true =>
csv.mapPartitionsWithIndex { (idx, iter) => if (idx == 0) iter.drop(1) else iter }
case false =>
csv
}
val paramsb = sc.broadcast(params)
val minmaxacc = sc.accumulator(DenseVector(Double.MaxValue, Double.MinValue),
"Min Max Score acc")(MinMaxAccumulator)
val featureMapbroadcast = sc.broadcast(featureMap)
val meanb = g.context.broadcast(DenseVector(colStats.mean.toArray))
val varianceb = g.context.broadcast(DenseVector(colStats.variance.toArray))
val results = test_data.map(point => {
val vec = DenseVector(point.features.toArray)
val ans = vec - meanb.value
ans :/= sqrt(varianceb.value)
val sco: Double = paramsb.value dot
DenseVector.vertcat(featureMapbroadcast.value(ans), DenseVector(1.0))
minmaxacc += DenseVector(sco, sco)
(sco, point.label)
})
val minmax = minmaxacc.value
MetricsSpark(task)(results, results.count(), (minmax(0), minmax(1)))
}
def GetStatistics(): Unit = {
println("Feature Statistics: \n")
println("Mean: "+this.colStats.mean)
println("Variance: \n"+this.colStats.variance)
}
def unpersist: Unit = {
this.processed_g.unpersist()
this.g.context.stop()
}
override def evaluateFold(params: DenseVector[Double])
(test_data_set: RDD[LabeledPoint])
(task: String): Metrics[Double] = {
val sc = test_data_set.context
var index: Accumulator[Long] = sc.accumulator(1)
val paramsb = sc.broadcast(params)
val minmaxacc = sc.accumulator(DenseVector(Double.MaxValue, Double.MinValue),
"Min Max Score acc")(MinMaxAccumulator)
val scoresAndLabels = test_data_set.map((e) => {
index += 1
val sco: Double = paramsb.value dot DenseVector(e.features.toArray)
minmaxacc += DenseVector(sco, sco)
(sco, e.label)
})
val minmax = minmaxacc.value
MetricsSpark(task)(scoresAndLabels, index.value, (minmax(0), minmax(1)))
}
override def crossvalidate(folds: Int, reg: Double,
optionalStateFlag: Boolean = false): (Double, Double, Double) = {
//Create the folds as lists of integers
//which index the data points
/*this.optimizer.setRegParam(reg).setNumIterations(2)
.setStepSize(0.001).setMiniBatchFraction(1.0)*/
val shuffle = Random.shuffle((1L to this.npoints).toList)
if(!optionalStateFlag || featureMatricesCache == (null, null)) {
featureMatricesCache = LSSVMSparkModel.getFeatureMatrix(npoints, processed_g.map(_._2),
this.initParams(), 1.0, reg)
val smoother:DenseMatrix[Double] = DenseMatrix.eye[Double](effectivedims)/reg
smoother(-1,-1) = 0.0
featureMatricesCache._1 :+= smoother
} else {
val smoother_old:DenseMatrix[Double] = DenseMatrix.eye[Double](effectivedims)/current_state("RegParam")
smoother_old(-1,-1) = 0.0
val smoother_new:DenseMatrix[Double] = DenseMatrix.eye[Double](effectivedims)/reg
smoother_new(-1,-1) = 0.0
featureMatricesCache._1 :-= smoother_old
featureMatricesCache._1 :+= smoother_new
}
val avg_metrics: DenseVector[Double] = (1 to folds).map{a =>
//For the ath fold
//partition the data
//ceil(a-1*npoints/folds) -- ceil(a*npoints/folds)
//as test and the rest as training
val test = shuffle.slice((a-1)*this.nPoints.toInt/folds, a*this.nPoints.toInt/folds)
val test_data = processed_g.filter((keyValue) =>
test.contains(keyValue._1)).map(_._2).cache()
val (a_folda, b_folda) = LSSVMSparkModel.getFeatureMatrix(npoints,
test_data,
this.initParams(),
1.0, reg)
val featureMatrix_a = featureMatricesCache._1 - a_folda
val bias = featureMatricesCache._2 - b_folda
val tempparams = ConjugateGradientSpark.runCG(featureMatrix_a,
bias, this.initParams(), 0.001, 35)
val metrics = this.evaluateFold(tempparams)(test_data)(this.task)
val res: DenseVector[Double] = metrics.kpi() / folds.toDouble
res
}.reduce(_+_)
this.processed_g.unpersist(blocking = true)
(avg_metrics(0),
avg_metrics(1),
avg_metrics(2))
}
}
object LSSVMSparkModel {
def scaleAttributes(mean: Vector,
variance: Vector)(x: Vector)
: Vector = {
val ans = DenseVector(x.toArray) - DenseVector(mean.toArray)
ans :/= sqrt(DenseVector(variance.toArray))
Vectors.dense(ans.toArray)
}
def apply(implicit config: Map[String, String], sc: SparkContext): LSSVMSparkModel = {
val (file, delim, head, task) = LSSVMModel.readConfig(config)
val minPartitions = if(config.contains("parallelism") &&
config.contains("executors") && config.contains("factor"))
config("factor").toInt * config("parallelism").toInt * config("executors").toInt
else 2
val csv = sc.textFile(file, minPartitions).map(line => line split delim)
.map(_.map(_.toDouble)).map(vector => {
val label = vector(vector.length-1)
vector(vector.length-1) = 1.0
LabeledPoint(label, Vectors.dense(vector.slice(0, vector.length - 1)))
})
val data = head match {
case true =>
csv.mapPartitionsWithIndex { (idx, iter) => if (idx == 0) iter.drop(1) else iter }
case false =>
csv
}
new LSSVMSparkModel(data, task)
}
/**
* Returns an indexed [[RDD]] from a non indexed [[RDD]] of [[T]]
*
* @param data : An [[RDD]] of [[T]]
*
* @return An (Int, T) Key-Value RDD indexed
* from 0 to data.count() - 1
* */
def indexedRDD[T](data: RDD[T]): RDD[(Long, T)] =
data.zipWithIndex().map((p) => (p._2, p._1))
def predict(params: DenseVector[Double])
(_task: String)
(point: LabeledPoint): (Double, Double) = {
val margin: Double = predictSparkVector(params)(point.features)(_task)
val loss = point.label - margin
(margin, math.pow(loss, 2))
}
def scoreSparkVector(params: DenseVector[Double])
(point: Vector): Double = {
params dot DenseVector.vertcat(
DenseVector(point.toArray),
DenseVector(1.0))
}
def predictBDV(params: DenseVector[Double])
(_task: String)
(point: DenseVector[Double]): Double = {
val margin: Double = params dot DenseVector.vertcat(
point,
DenseVector(1.0))
_task match {
case "classification" => math.tanh(margin)
case "regression" => margin
}
}
def predictSparkVector(params: DenseVector[Double])
(point: Vector)
(implicit _task: String): Double =
predictBDV(params)(_task)(DenseVector(point.toArray))
/**
* Factory method to create the appropriate
* optimization object required for the LSSVM
* model
* */
def getOptimizer(task: String): ConjugateGradientSpark = new ConjugateGradientSpark
def getFeatureMatrix(nPoints: Long,
ParamOutEdges: RDD[LabeledPoint],
initialP: DenseVector[Double],
frac: Double, regParam: Double) = {
val dims = initialP.length
//Cast as problem of form A.w = b
//A = Phi^T . Phi + I_dims*regParam
//b = Phi^T . Y
val (a,b): (DenseMatrix[Double], DenseVector[Double]) =
ParamOutEdges.filter((_) => Random.nextDouble() <= frac)
.mapPartitions((edges) => {
Seq(edges.map((edge) => {
val phi = DenseVector(edge.features.toArray)
val label = edge.label
val phiY: DenseVector[Double] = phi * label
(phi*phi.t, phiY)
}).reduce((couple1, couple2) => {
(couple1._1+couple2._1, couple1._2+couple2._2)
})).toIterator
}).reduce((couple1, couple2) => {
(couple1._1+couple2._1, couple1._2+couple2._2)
})
(a,b)
}
}
object LSSVMModel {
val manager: FramedGraphFactory = new FramedGraphFactory
val logger = Logger.getLogger(this.getClass)
/**
* Factory function to rescale attributes
* given a vector of means and the Cholesky
* factorization of the inverse variance matrix
*
* */
def scaleAttributes(mean: DenseVector[Double],
sigmaInverse: DenseMatrix[Double])(x: DenseVector[Double])
: DenseVector[Double] = sigmaInverse * (x - mean)
/**
* Factory method to create the appropriate
* optimization object required for the Gaussian
* model
* */
def getOptimizer(task: String): ConjugateGradient = new ConjugateGradient
def readCSV(reader: CSVReader, head: Boolean):
(Iterable[(DenseVector[Double], Double)], Int) = {
val stream = reader.toStream().toIterable
val dim = stream.head.length
def lines = if(head) {
stream.drop(1)
} else {
stream
}
(lines.map{parseLine}, dim)
}
def parseLine = {line : List[String] =>
//Parse line and extract features
val yv = line.apply(line.length - 1).toDouble
val xv: DenseVector[Double] =
DenseVector(line.slice(0, line.length - 1).map{x => x.toDouble}.toArray)
(xv, yv)
}
def readConfig(config: Map[String, String]): (String, Char, Boolean, String) = {
assert(config.isDefinedAt("file"), "File name must be Defined!")
val file: String = config("file")
val delim: Char = if(config.isDefinedAt("delim")) {
config("delim").toCharArray()(0)
} else {
','
}
val head: Boolean = if(config.isDefinedAt("head")) {
config("head") match {
case "true" => true
case "True" => true
case "false" => false
case "False" => false
}
} else {
true
}
val task: String = if(config.isDefinedAt("task")) config("task") else ""
(file, delim, head, task)
}
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/tensorflow/utils/GaussianScalerTF.scala | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.tensorflow.utils
import org.platanios.tensorflow.api._
import _root_.io.github.mandar2812.dynaml.pipes._
/**
* Scales attributes of a vector pattern using the sample mean and variance of
* each dimension. This assumes that there is no covariance between the data
* dimensions.
*
* @param mean Sample mean of the data
* @param sigma Sample variance of each data dimension
* @author mandar2812 date: 07/03/2018.
*
* */
case class GaussianScalerTF(mean: Tensor, sigma: Tensor) extends TFScaler {
override val i: Scaler[Tensor] = Scaler((xc: Tensor) => xc.multiply(sigma).add(mean))
override def run(data: Tensor): Tensor = data.subtract(mean).divide(sigma)
def apply(indexers: Indexer*): GaussianScalerTF = this.copy(mean(indexers:_*), sigma(indexers:_*))
}
case class GaussianScalerTO(mean: Output, sigma: Output) extends TOScaler {
override val i: Scaler[Output] = Scaler((xc: Output) => xc.multiply(sigma).add(mean))
override def run(data: Output): Output = data.subtract(mean).divide(sigma)
def apply(indexers: Indexer*): GaussianScalerTO = this.copy(mean(indexers:_*), sigma(indexers:_*))
}
|
amitkumarj441/DynaML | dynaml-examples/src/main/scala-2.11/io/github/mandar2812/dynaml/examples/TestNNWineQuality.scala | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.examples
import breeze.linalg.{DenseVector => BDV}
import io.github.mandar2812.dynaml.DynaMLPipe
import io.github.mandar2812.dynaml.evaluation.BinaryClassificationMetrics
import io.github.mandar2812.dynaml.graph.FFNeuralGraph
import io.github.mandar2812.dynaml.kernels.LocalSVMKernel
import io.github.mandar2812.dynaml.modelpipe.GLMPipe
import io.github.mandar2812.dynaml.models.lm.{GeneralizedLinearModel, LogisticGLM, ProbitGLM}
import io.github.mandar2812.dynaml.models.neuralnets.FeedForwardNetwork
import io.github.mandar2812.dynaml.models.svm.DLSSVM
import io.github.mandar2812.dynaml.pipes._
/**
* Created by mandar on 11/1/16.
*/
object TestNNWineQuality {
def apply (hidden: Int = 2, nCounts:List[Int] = List(), acts:List[String],
training: Int = 100, test: Int = 1000,
columns: List[Int] = List(11,0,1,2,3,4,5,6,7,8,9,10),
stepSize: Double = 0.01, maxIt: Int = 30, mini: Double = 1.0,
alpha: Double = 0.5, regularization: Double = 0.5,
wineType: String = "red"): Unit = {
//Load wine quality data into a stream
//Extract the time and Dst values
//separate data into training and test
//pipe training data to model and then generate test predictions
//create RegressionMetrics instance and produce plots
val modelTrainTest =
(trainTest: ((Stream[(BDV[Double], Double)],
Stream[(BDV[Double], Double)]),
(BDV[Double], BDV[Double]))) => {
val gr = FFNeuralGraph(trainTest._1._1.head._1.length, 1, hidden,
acts, nCounts)
val transform = DataPipe(
(d: Stream[(BDV[Double], Double)]) =>
d.map(el => (el._1, BDV(el._2)))
)
val model = new FeedForwardNetwork[
Stream[(BDV[Double], Double)]
](trainTest._1._1, gr)(transform)
model.setLearningRate(stepSize)
.setMaxIterations(maxIt)
.setBatchFraction(mini)
.setMomentum(alpha)
.setRegParam(regularization)
.learn()
val res = model.test(trainTest._1._2)
val scoresAndLabelsPipe =
DataPipe(
(res: Seq[(BDV[Double], BDV[Double])]) =>
res.map(i => (i._1(0), i._2(0))).toList)
val scoresAndLabels = scoresAndLabelsPipe.run(res)
val flag = if(acts.last == "logsig") {
true
} else {
false
}
val metrics = new BinaryClassificationMetrics(
scoresAndLabels,
scoresAndLabels.length,
logisticFlag = flag)
metrics.setName(wineType+" wine quality")
metrics.print()
metrics.generatePlots()
}
val processLabelsinPatterns = acts.last match {
case "tansig" => StreamDataPipe((pattern:(BDV[Double], Double)) =>
if(pattern._2 <= 6.0) (pattern._1, -1.0) else (pattern._1, 1.0))
case "linear" => StreamDataPipe((pattern:(BDV[Double], Double)) =>
if(pattern._2 <= 6.0) (pattern._1, -1.0) else (pattern._1, 1.0))
case "logsig" => StreamDataPipe((pattern:(BDV[Double], Double)) =>
if(pattern._2 <= 6.0) (pattern._1, 0.0) else (pattern._1, 1.0))
}
val preProcessPipe = DynaMLPipe.fileToStream >
DynaMLPipe.dropHead >
DynaMLPipe.replace(";", ",") >
DynaMLPipe.extractTrainingFeatures(columns, Map()) >
DynaMLPipe.splitFeaturesAndTargets >
processLabelsinPatterns
val trainTestPipe = DataPipe(preProcessPipe, preProcessPipe) >
DynaMLPipe.splitTrainingTest(training, test) >
DynaMLPipe.featuresGaussianStandardization >
DataPipe(modelTrainTest)
val dataFile = dataDir+"winequality-" + wineType + ".csv"
trainTestPipe(dataFile, dataFile)
}
}
object TestLogisticWineQuality {
def apply(training: Int = 100, test: Int = 1000,
columns: List[Int] = List(11, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10),
stepSize: Double = 0.01, maxIt: Int = 30, mini: Double = 1.0,
regularization: Double = 0.5, wineType: String = "red",
modelType: String = "logistic"): Unit = {
//Load wine quality data into a stream
//Extract the time and Dst values
//separate data into training and test
//pipe training data to model and then generate test predictions
//create RegressionMetrics instance and produce plots
val modelpipe = new GLMPipe[
Stream[(BDV[Double], Double)],
((Stream[(BDV[Double], Double)], Stream[(BDV[Double], Double)]),
(BDV[Double], BDV[Double]))
]((tt: ((Stream[(BDV[Double], Double)], Stream[(BDV[Double], Double)]),
(BDV[Double], BDV[Double]))) => tt._1._1,
task = "classification", modelType = modelType) >
DynaMLPipe.trainParametricModel[
Stream[(BDV[Double], Double)],
BDV[Double], BDV[Double], Double,
Stream[(BDV[Double], Double)],
GeneralizedLinearModel[Stream[(BDV[Double], Double)]]
](regularization, stepSize, maxIt, mini)
val testPipe = DataPipe(
(modelAndData: (GeneralizedLinearModel[Stream[(BDV[Double], Double)]],
Stream[(BDV[Double], Double)])) => {
val pipe1 = StreamDataPipe((couple: (BDV[Double], Double)) => {
(modelAndData._1.predict(couple._1), couple._2)
})
val scoresAndLabelsPipe = pipe1
val scoresAndLabels = scoresAndLabelsPipe.run(modelAndData._2).toList
val metrics = new BinaryClassificationMetrics(
scoresAndLabels,
scoresAndLabels.length,
logisticFlag = true)
metrics.setName(wineType + " wine quality")
metrics.print()
metrics.generatePlots()
})
val preProcessPipe = DynaMLPipe.fileToStream >
DynaMLPipe.dropHead >
DynaMLPipe.replace(";", ",") >
DynaMLPipe.extractTrainingFeatures(columns, Map()) >
DynaMLPipe.splitFeaturesAndTargets >
StreamDataPipe((pattern: (BDV[Double], Double)) =>
if (pattern._2 <= 6.0) (pattern._1, 0.0) else (pattern._1, 1.0))
val trainTestPipe = DataPipe(preProcessPipe, preProcessPipe) >
DynaMLPipe.splitTrainingTest(training, test) >
DynaMLPipe.featuresGaussianStandardization >
BifurcationPipe(modelpipe,
DataPipe((tt: (
(Stream[(BDV[Double], Double)], Stream[(BDV[Double], Double)]),
(BDV[Double], BDV[Double]))) => tt._1._2)) >
testPipe
val dataFile = dataDir+"winequality-" + wineType + ".csv"
trainTestPipe(dataFile, dataFile)
}
}
object TestLSSVMWineQuality {
def apply (kernel: LocalSVMKernel[BDV[Double]],
training: Int = 100, test: Int = 1000,
columns: List[Int] = List(11,0,1,2,3,4,5,6,7,8,9,10),
regularization: Double = 0.5, wineType: String = "red"): Unit = {
//Load wine quality data into a stream
//Extract the time and Dst values
//separate data into training and test
//pipe training data to model and then generate test predictions
//create RegressionMetrics instance and produce plots
val modelTrainTest =
(trainTest: ((Stream[(BDV[Double], Double)],
Stream[(BDV[Double], Double)]),
(BDV[Double], BDV[Double]))) => {
val model = new DLSSVM(trainTest._1._1, training, kernel, "classification")
model.setRegParam(regularization).learn()
val pipe1 = StreamDataPipe((couple: (BDV[Double], Double)) => {
(model.predict(couple._1), couple._2)
})
val scoresAndLabelsPipe = pipe1
val scoresAndLabels = scoresAndLabelsPipe.run(trainTest._1._2).toList
val metrics = new BinaryClassificationMetrics(
scoresAndLabels,
scoresAndLabels.length,
logisticFlag = true)
metrics.setName(wineType+" wine quality")
metrics.print()
metrics.generatePlots()
}
val preProcessPipe = DynaMLPipe.fileToStream >
DynaMLPipe.dropHead >
DynaMLPipe.replace(";", ",") >
DynaMLPipe.extractTrainingFeatures(columns, Map()) >
DynaMLPipe.splitFeaturesAndTargets >
StreamDataPipe((pattern:(BDV[Double], Double)) =>
if(pattern._2 <= 6.0) (pattern._1, -1.0) else (pattern._1, 1.0))
val trainTestPipe = DataPipe(preProcessPipe, preProcessPipe) >
DynaMLPipe.splitTrainingTest(training, test) >
DynaMLPipe.featuresGaussianStandardization >
DataPipe(modelTrainTest)
val dataFile = dataDir+"winequality-" + wineType + ".csv"
trainTestPipe(dataFile, dataFile)
}
} |
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/statespace/ParticleFilter.scala | <filename>dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/statespace/ParticleFilter.scala
package io.github.mandar2812.dynaml.models.statespace
import POMP._
import DataTypes._
import State._
import SimData._
import akka.stream.scaladsl.Source
import scala.language.higherKinds._
import breeze.stats.distributions.{Multinomial, Rand, Uniform}
import breeze.stats.distributions.Rand._
import breeze.numerics.exp
import breeze.linalg.DenseVector
import breeze.numerics.exp
import breeze.stats.distributions.Rand._
import breeze.stats.distributions.{Multinomial, Rand, Uniform}
import io.github.mandar2812.dynaml.models.statespace.ParticleFilter._
/**
* Representation of the state of the particle filter, at each step the previous observation time, t0, and
* particle cloud, particles, is required to compute forward.
* The meanState and intervals are recorded in each step, so they can be outputted immediately without having
* to calculate these from the particle cloud after
*/
case class PfState(
t: Time,
observation: Option[Observation],
particles: Vector[State],
weights: Vector[LogLikelihood],
ll: LogLikelihood) {
override def toString = observation match {
case Some(y) => s"$t, $y, ${weightedMean(particles, weights).flatten.mkString(", ")}"
case None => s"$t, ${weightedMean(particles, weights).flatten.mkString(", ")}"
}
}
trait ParticleFilter {
val unparamMod: Parameters => StateSpaceModel
val t0: Time
def advanceState(x: Vector[State], dt: TimeIncrement, t: Time)(p: Parameters): Vector[(State, Eta)]
def calculateWeights(x: Eta, y: Observation)(p: Parameters): LogLikelihood
def resample: Resample[State]
/**
* Perform one step of a particle filter
* @param y a single timestamped observation
* @param s the state of the particle filter at the time of the last observation
* @return the state of the particle filter after observation y
*/
def stepFilter(y: Data, s: PfState)(p: Parameters): PfState = {
val dt = y.t - s.t // calculate time between observations
val (w, x1) =
(for {
(x1, eta) <- advanceState(s.particles, dt, y.t)(p)
w = calculateWeights(eta, y.observation)(p)
} yield (w, x1)).unzip
val max = w.max // log sum exp
val w1 = w map { a => exp(a - max) }
val x = resample(x1, w1)
val ll = s.ll + max + math.log(breeze.stats.mean(w1))
PfState(y.t, Some(y.observation), x, w1, ll)
}
def llFilter(data: Vector[Data])(particles: Int)(p: Parameters): LogLikelihood = {
val mod = unparamMod(p)
val initState: PfState = PfState(t0, None, Vector.fill(particles)(mod.x0.draw), Vector.fill(particles)(1.0), 0.0)
data.foldLeft(initState)((s, y) => stepFilter(y, s)(p)).ll
}
/**
* Run a filter over a vector of data and return a vector of PfState
*/
def accFilter(data: Vector[Data])(particles: Int)(p: Parameters): Vector[PfState] = {
val mod = unparamMod(p)
val initState: PfState = PfState(t0, None, Vector.fill(particles)(mod.x0.draw), Vector.fill(particles)(1.0), 0.0)
val x = data.
foldLeft(Vector(initState))(
(acc, y) => stepFilter(y, acc.head)(p) +: acc)
x.reverse.tail
}
/**
* Run a filter over a stream of data
*/
def filter(data: Source[Data, Any])(particles: Int)(p: Parameters): Source[PfState, Any] = {
val mod = unparamMod(p)
val initState: PfState = PfState(t0, None, Vector.fill(particles)(mod.x0.draw), Vector.fill(particles)(1.0), 0.0)
data.
scan(initState)((s, y) => stepFilter(y, s)(p))
}
def parFilter(data: Source[Data, Any])(particles: Int)(p: Parameters): Source[PfState, Any] = {
val mod = unparamMod(p)
val initState: PfState = PfState(t0, None, Vector.fill(particles)(mod.x0.draw), Vector.fill(particles)(1.0), 0.0)
data.
scan(initState)((s, y) => stepFilter(y, s)(p))
}
}
object ParticleFilter {
type Resample[A] = (Vector[A], Vector[LogLikelihood]) => Vector[A]
/**
* Return a vector of lag 1 time differences
* @param x a list of times
* @return a list of differenced times
*/
def diff(x: Iterable[Time]): Iterable[TimeIncrement] = {
(x.tail zip x) map { a => a._1 - a._2 }
}
/**
* Sample integers from 1 to n with replacement according to their associated probabilities
* @param n a number matching the number of probabilities
* @param prob a vector of probabilities corresponding to the probability of sampling that integer
* @return a vector containing the samples
*/
def sample(n: Int, prob: DenseVector[Double]): Vector[Int] = {
Multinomial(prob).sample(n).toVector
}
/**
* Given a vector of doubles, returns a normalised vector with probabilities summing to one
* @param prob a vector of unnormalised probabilities
* @return a vector of normalised probabilities
*/
def normalise(prob: Vector[Double]): Vector[Double] = {
prob map (_/prob.sum)
}
def cumsum(x: Vector[Double]): Vector[Double] = {
val sums = x.foldLeft(Vector(0.0))((acc: Vector[Double], num: Double) => (acc.head + num) +: acc)
sums.reverse.tail
}
/**
* Multinomial Resampling, sample from a categorical distribution with probabilities
* equal to the particle weights
*/
def multinomialResampling[A](particles: Vector[A], weights: Vector[LogLikelihood]): Vector[A] = {
val indices = sample(particles.size, DenseVector(weights.toArray))
indices map { particles(_) }
}
/**
* Produces a histogram output of a vector of Data
*/
def hist(x: Vector[Int]): Unit = {
val h = x.
groupBy(identity).
toVector.map{ case (n, l) => (n, l.length) }.
sortBy(_._1)
h foreach { case (n, count) => println(s"$n: ${Vector.fill(count)("#").mkString("")}") }
}
/**
* Return the value x such that, F(p) = x, where F is the empirical cumulative distribution function over
* the particles
*/
def invecdf[A](ecdf: Vector[(A, LogLikelihood)], p: Double): A = {
ecdf.
filter{ case (_, w) => w > p }.
map{ case (x, _) => x }.head
}
/**
* Stratified resampling
* Sample n ORDERED uniform random numbers (one for each particle) using a linear transformation of a U(0,1) RV
*/
def stratifiedResampling[A](particles: Vector[A], weights: Vector[LogLikelihood]): Vector[A] = {
// generate n uniform random numbers
val n = weights.length
val u = (1 to n).map(k => (k - 1 + Uniform(0,1).draw) / n).toVector
val ecdf = particles.zip(cumsum(normalise(weights)))
u map (invecdf(ecdf, _))
}
/**
* Systematic Resampling
* Sample n ORDERED numbers (one for each particle), reusing the same U(0,1) variable
*/
def systematicResampling[A](particles: Vector[A], weights: Vector[LogLikelihood]): Vector[A] = {
val n = weights.length
val u = Uniform(0,1).draw
val k = (1 to n).map(a => (a - 1 + u) / n).toVector
val ecdf = particles.zip(cumsum(normalise(weights)))
k map (invecdf(ecdf, _))
}
/**
* Residual Resampling
* Select particles in proportion to their weights, ie particle xi appears ki = n * wi times
* Resample m (= n - total allocated particles) particles according to w = n * wi - ki using other resampling technique
*/
def residualResampling[A](particles: Vector[A], weights: Vector[LogLikelihood]): Vector[A] = {
val n = weights.length
val normalisedWeights = normalise(weights)
val ki = normalisedWeights.
map (w => math.floor(w * n).toInt)
val indices = ki.zipWithIndex.
map { case (n, i) => Vector.fill(n)(i) }.
flatten
val x = indices map { particles(_) }
val m = n - indices.length
val residualWeights = normalisedWeights.zip(ki) map { case (w, k) => n * w - k }
val i = sample(m, DenseVector(residualWeights.toArray))
x ++ (i map { particles(_) })
}
/**
* map2 implementation for Rand
*/
def map2[A,B,C](ma: Rand[A], mb: Rand[B])(f: (A, B) => C): Rand[C] = {
for {
a <- ma
b <- mb
} yield f(a, b)
}
/**
* Traverse method for Rand and Vector
*/
def traverse[A,B](l: Vector[A])(f: A => Rand[B]): Rand[Vector[B]] = {
l.foldRight(always(Vector[B]()))((a, mlb) => map2(f(a), mlb)(_ +: _))
}
/**
* Sequence, Traverse with the identity
*/
def sequence[A](l: Vector[Rand[A]]): Rand[Vector[A]] = {
traverse(l)(a => a)
}
}
case class Filter(model: Parameters => StateSpaceModel, resamplingScheme: Resample[State], t0: Time) extends ParticleFilter {
val unparamMod = model
def advanceState(x: Vector[State], dt: TimeIncrement, t: Time)(p: Parameters): Vector[(State, Eta)] = {
val mod = unparamMod(p)
val x1 = x map (p => mod.stepFunction(p, dt).draw)
val eta = x1 map ((s: State) => mod.link(mod.f(s, t)))
x1.zip(eta)
}
def calculateWeights(x: Eta, y: Observation)(p: Parameters): LogLikelihood = {
val mod = unparamMod(p)
mod.dataLikelihood(x, y)
}
def resample: Resample[State] = resamplingScheme
}
/**
* In order to calculate Eta in the LGCP model, we need to merge the advance state and transform state functions
*/
case class FilterLgcp(model: Parameters => StateSpaceModel, resamplingScheme: Resample[State], precision: Int, t0: Time) extends ParticleFilter {
val unparamMod = model
def calcWeight(x: State, dt: TimeIncrement, t: Time)(p: Parameters): (State, Eta) = {
val mod = unparamMod(p)
val x1 = simSdeStream(x, t - dt, dt, precision, mod.stepFunction)
val transformedState = x1 map (a => mod.f(a.state, a.time))
(x1.last.state, Vector(transformedState.last, transformedState.map(x => exp(x) * dt).sum))
}
def advanceState(x: Vector[State], dt: TimeIncrement, t: Time)(p: Parameters): Vector[(State, Eta)] = {
x map(calcWeight(_, dt, t)(p))
}
def calculateWeights(x: Eta, y: Observation)(p: Parameters): LogLikelihood = {
val mod = unparamMod(p)
mod.dataLikelihood(x, y)
}
def resample: Resample[State] = resamplingScheme
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/statespace/State.scala | package io.github.mandar2812.dynaml.models.statespace
import breeze.linalg.DenseVector
sealed trait State {
import State._
def map(f: DenseVector[Double] => DenseVector[Double]): State = State.map(this)(f)
def flatten: Vector[Double] = State.flatten(this)
def head: Double = State.head(this)
def |+|(that: State): State = {
combine(this, that)
}
def isEmpty: Boolean = State.isEmpty(this)
override def toString: String = this.flatten.mkString(", ")
}
case class LeafState(data: DenseVector[Double]) extends State with Serializable
case class BranchState(left: State, right: State) extends State with Serializable
object LeafState {
def apply(a: Double): LeafState = {
new LeafState(DenseVector(a))
}
}
object State {
def combine(state1: State, state2: State): State = {
if (state1.isEmpty) {
state2
} else if (state2.isEmpty) {
state1
} else {
BranchState(state1, state2)
}
}
def zero: State = {
LeafState(DenseVector[Double]())
}
def head(s: State): Double = s match {
case LeafState(x) => x(0)
case BranchState(l, _) => head(l)
}
/**
* Determines if a state contains
*/
def isEmpty(state: State): Boolean = state match {
case LeafState(x) => x.length == 0
case BranchState(lp, rp) => isEmpty(lp) && isEmpty(rp)
}
def toList(s: State): List[DenseVector[Double]] = s match {
case BranchState(l, r) => toList(l) ::: toList(r)
case LeafState(x) => List(x)
}
/**
* Get the node element at position n from the left, indexed from 0
* @param n the node position from the left
*/
def getState(s: State, n: Int): LeafState = {
val l = toList(s)
LeafState(l(n))
}
/**
* Maps all the values contained inside of all leaf nodes
* @param s a given tree of states
* @param f a function from a vector to a vector, usually defined using map, eg. x => x map (_ + 1)
* @return the state in the same structure only changed by the provided f
*/
def map(s: State)(f: DenseVector[Double] => DenseVector[Double]): State = s match {
case LeafState(x) => LeafState(f(x))
case BranchState(l, r) => BranchState(map(l)(f), map(r)(f))
}
/**
* Calculate the weighted mean of a list of States
* @param x a list of States
* @param w their associated weights
* @return the weighted mean
*/
def weightedMean(x: Vector[State], w: Vector[Double]): State = {
val normalisedWeights = w map (_ / w.sum)
val st = x.zip(normalisedWeights) map {
case (s, weight) =>
def loop(s: State, w: Double): State =
s match {
case LeafState(state) => LeafState(state map (_ * w))
case BranchState(ls, rs) => BranchState(loop(ls, w), loop(rs, w))
}
loop(s, weight)
}
st.reduceLeft((a: State, b: State) => addStates(a,b))
}
/**
* Add two states with the same structure, used in weighted mean
*/
def addStates(
s1: State,
s2: State): State =
(s1, s2) match {
case (x: LeafState, y: LeafState) if x.isEmpty => y
case (x: LeafState, y: LeafState) if y.isEmpty => x
case (LeafState(x), LeafState(x1)) => LeafState(x + x1)
case (BranchState(l, r), BranchState(l1, r1)) => BranchState(addStates(l, l1), addStates(r, r1))
}
def flatten(s: State): Vector[Double] =
s match {
case LeafState(x) => x.data.toVector
case BranchState(ls, rs) => flatten(ls) ++ flatten(rs)
}
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/kernels/MLPKernel.scala | <reponame>amitkumarj441/DynaML
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.kernels
import breeze.linalg.{DenseMatrix, DenseVector}
/**
* @author mandar2812 date 13/09/16.
*
* Implementation of the Maximum Likelihood Perceptron (MLP) kernel
*/
class MLPKernel(w: Double, b: Double) extends SVMKernel[DenseMatrix[Double]]
with LocalSVMKernel[DenseVector[Double]]
with Serializable{
override val hyper_parameters = List("w", "b")
state = Map("w" -> w, "b" -> b)
def setw(d: Double): Unit = {
state += ("w" -> d.toDouble)
}
def setoffset(o: Double): Unit = {
state += ("b" -> o)
}
override def evaluateAt(config: Map[String, Double])(
x: DenseVector[Double],
y: DenseVector[Double]): Double =
math.asin(
(config("w")*(x.t*y) + config("b"))/
(math.sqrt(config("w")*(x.t*x) + config("b") + 1) * math.sqrt(config("w")*(y.t*y) + config("b") + 1))
)
override def gradientAt(config: Map[String, Double])(x: DenseVector[Double], y: DenseVector[Double]) = {
val (wxy, wxx, wyy) = (
config("w")*(x.t*y) + config("b"),
math.sqrt(config("w")*(x.t*x) + config("b") + 1),
math.sqrt(config("w")*(y.t*y) + config("b") + 1))
val (numerator, denominator) = (wxy, wxx*wyy)
val z = numerator/denominator
val alpha = 1.0/(1.0 - z*z)
Map(
"w" ->
alpha*((denominator*(x.t*y) - numerator*0.5*(wyy*(x.t*x)/wxx + wxx*(y.t*y)/wyy))/(denominator*denominator)),
"b" ->
alpha*((denominator - numerator*0.5*(wyy/wxx + wxx/wyy))/(denominator*denominator))
)
}
}
class MLP1dKernel(w: Double, b: Double) extends LocalSVMKernel[Double]
with Serializable {
override val hyper_parameters = List("w", "b")
state = Map("w" -> w, "b" -> b)
def setw(d: Double): Unit = {
state += ("w" -> d.toDouble)
}
def setoffset(o: Double): Unit = {
state += ("b" -> o)
}
override def evaluateAt(config: Map[String, Double])(
x: Double,
y: Double): Double =
math.asin(
(config("w")*(x*y) + config("b"))/
(math.sqrt(config("w")*(x*x) + config("b") + 1) * math.sqrt(config("w")*(y*y) + config("b") + 1))
)
override def gradientAt(config: Map[String, Double])(x: Double, y: Double) = {
val (wxy, wxx, wyy) = (
config("w")*(x*y) + config("b"),
math.sqrt(config("w")*(x*x) + config("b") + 1),
math.sqrt(config("w")*(y*y) + config("b") + 1))
val (numerator, denominator) = (wxy, wxx*wyy)
val z = numerator/denominator
val alpha = 1.0/(1.0 - z*z)
Map(
"w" ->
alpha*((denominator*(x*y) - numerator*0.5*(wyy*(x*x)/wxx + wxx*(y*y)/wyy))/(denominator*denominator)),
"b" ->
alpha*((denominator - numerator*0.5*(wyy/wxx + wxx/wyy))/(denominator*denominator))
)
}
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/kernels/DiracKernel.scala | <gh_stars>0
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.kernels
import breeze.linalg.{DenseMatrix, DenseVector, norm}
/**
* Dirac kernel is equivalent to the
* classical Dirac delta function scaled by
* a hyper-parameter called the noise level.
*
* K(x,y) = noise*DiracDelta(x,y)
*/
class DiracKernel(private var noiseLevel: Double = 1.0)
extends SVMKernel[DenseMatrix[Double]]
with LocalSVMKernel[DenseVector[Double]]
with Serializable {
override val hyper_parameters = List("noiseLevel")
state = Map("noiseLevel" -> noiseLevel)
def setNoiseLevel(d: Double): Unit = {
this.state += ("noiseLevel" -> d)
this.noiseLevel = d
}
override def evaluateAt(
config: Map[String, Double])(
x: DenseVector[Double],
y: DenseVector[Double]): Double =
if (norm(x-y, 2) == 0) math.abs(config("noiseLevel"))*1.0 else 0.0
override def gradientAt(
config: Map[String, Double])(
x: DenseVector[Double],
y: DenseVector[Double]): Map[String, Double] =
Map("noiseLevel" -> 1.0*evaluateAt(config)(x,y)/math.abs(config("noiseLevel")))
override def buildKernelMatrix[S <: Seq[DenseVector[Double]]](mappedData: S,
length: Int)
: KernelMatrix[DenseMatrix[Double]] =
new SVMKernelMatrix(DenseMatrix.eye[Double](length)*state("noiseLevel"), length)
}
class MAKernel(private var noiseLevel: Double = 1.0)
extends LocalSVMKernel[Double]
with Serializable {
override val hyper_parameters = List("noiseLevel")
state = Map("noiseLevel" -> noiseLevel)
def setNoiseLevel(d: Double): Unit = {
this.state += ("noiseLevel" -> d)
this.noiseLevel = d
}
override def evaluateAt(
config: Map[String, Double])(
x: Double,
y: Double): Double =
if (x-y == 0.0) math.abs(config("noiseLevel"))*1.0 else 0.0
override def gradientAt(
config: Map[String, Double])(
x: Double, y: Double): Map[String, Double] =
Map("noiseLevel" -> 1.0*evaluateAt(config)(x,y)/math.abs(config("noiseLevel")))
override def buildKernelMatrix[S <: Seq[Double]](mappedData: S,
length: Int)
: KernelMatrix[DenseMatrix[Double]] =
new SVMKernelMatrix(DenseMatrix.eye[Double](length)*state("noiseLevel"), length)
}
class CoRegDiracKernel extends LocalSVMKernel[Int] {
override val hyper_parameters: List[String] = List()
override def gradientAt(config: Map[String, Double])(x: Int, y: Int): Map[String, Double] = Map()
override def evaluateAt(config: Map[String, Double])(x: Int, y: Int): Double =
if(x == y) 1.0 else 0.0
}
|
amitkumarj441/DynaML | dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/tensorflow/implicits/package.scala | <filename>dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/tensorflow/implicits/package.scala
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.tensorflow
import io.github.mandar2812.dynaml.pipes._
import org.platanios.tensorflow.api._
import _root_.io.github.mandar2812.dynaml.DynaMLPipe
import org.platanios.tensorflow.api.implicits.helpers.OutputToTensor
import org.platanios.tensorflow.api.learn.estimators.Estimator.SupportedInferInput
import org.platanios.tensorflow.api.ops.Function
import org.platanios.tensorflow.api.ops.io.data.{Data, Dataset}
package object implicits {
implicit def singleOutputInferInput[T, O, D, S, I](
implicit
evOToT: OutputToTensor.Aux[O, T],
ev: Data.Aux[T, O, D, S],
evFunctionInput: Function.ArgType[O]): SupportedInferInput[O, I, T, O, D, S, I] =
new SupportedInferInput[O, I, T, O, D, S, I] {
override def toDataset(value: O): Dataset[T, O, D, S] = tf.data.OutputDataset[T, O, D, S](value)
override def convertFetched(iterator: Iterator[(T, I)]): I = iterator.next()._2
}
implicit val tensorSplit: MetaPipe12[Tensor, Int, Int, Tensor] = MetaPipe12(
(workingData: Tensor) => (index_start: Int, index_end: Int) => workingData(index_start::index_end + 1, ---)
)
implicit val tensorTup2Split: MetaPipe12[(Tensor, Tensor), Int, Int, (Tensor, Tensor)] = MetaPipe12(
(workingData: (Tensor, Tensor)) => (index_start: Int, index_end: Int) => (
workingData._1(index_start::index_end + 1, ---),
workingData._2(index_start::index_end + 1, ---)
)
)
implicit val outputSplit: MetaPipe12[Output, Int, Int, Output] = MetaPipe12(
(workingData: Output) => (index_start: Int, index_end: Int) => workingData(index_start::index_end + 1, ---)
)
implicit val concatTensorSplits: DataPipe[Iterable[Tensor], Tensor] =
DataPipe((ds: Iterable[Tensor]) => tfi.concatenate(ds.toSeq, axis = 0))
implicit val concatTensorTup2Splits: DataPipe[Iterable[(Tensor, Tensor)], (Tensor, Tensor)] = DataPipe(
(ds: Iterable[(Tensor, Tensor)]) => {
val separated_splits = ds.unzip
(
tfi.concatenate(separated_splits._1.toSeq, axis = 0),
tfi.concatenate(separated_splits._2.toSeq, axis = 0)
)
})
implicit val concatOutputTup2Splits: DataPipe[Iterable[(Output, Output)], (Output, Output)] = DataPipe(
(ds: Iterable[(Output, Output)]) => {
val separated_splits = ds.unzip
(
tf.concatenate(separated_splits._1.toSeq).toOutput,
tf.concatenate(separated_splits._2.toSeq).toOutput
)
})
implicit val convOutputToOutput: DataPipe[Output, Output] = DynaMLPipe.identityPipe[Output]
implicit val convTensorToOutput: DataPipe[Tensor, Output] = DataPipe((t: Tensor) => t.toOutput)
implicit val convOutputTensorToOutputTup: DataPipe[(Output, Tensor), (Output, Output)] =
convOutputToOutput * convTensorToOutput
implicit val convTensorOutputToOutputTup: DataPipe[(Tensor, Output), (Output, Output)] =
convTensorToOutput * convOutputToOutput
implicit val convTensorTupToOutputTup: DataPipe[(Tensor, Tensor), (Output, Output)] =
convTensorToOutput * convTensorToOutput
implicit val convOutputTupToOutputTup: DataPipe[(Output, Output), (Output, Output)] =
convOutputToOutput * convOutputToOutput
}
|
iykeawauma/awadevops | sbt-example/build.sbt | lazy val root = (project in file(".")).
settings(
name := "SBT_Example",
version := "1.1-SNAPSHOT",
scalaVersion := "2.11.4"
)
resolvers += "Artifactory" at "http://localhost:8081/artifactory/jcenter"
publishTo := Some("Artifactory Realm" at "http://localhost:8081/artifactory/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
credentials += Credentials(new File("credentials.properties"))
libraryDependencies += "org.apache.derby" % "derby" % "10.4.1.3"
|
iykeawauma/awadevops | sbt-example/src/main/scala/Example.scala | <filename>sbt-example/src/main/scala/Example.scala
object Example {
def main(args: Array[String]) = println("Sample Scala Project")
} |
ihji/dotty | tests/pos/objXfun.scala | <gh_stars>0
object Foo extends (Int => Int) { // OK
def apply(x: Int) = x
}
enum class E(x: Int) // used to generate Int => new E(x) as the parent of object E --> crash
object E {
case C(x: Int) extends E(x)
}
|
ihji/dotty | sbt-bridge/test/xsbt/TestDriver.scala | package xsbt
import dotty.tools.dotc._
import core.Contexts._
class TestDriver extends Driver {
protected def newCompiler(implicit ctx: Context): Compiler = new Compiler
override protected def sourcesRequired = false
def getCompiler(args: Array[String], rootCtx: Context) = {
val (fileNames, ctx) = setup(args, rootCtx)
(newCompiler(ctx), ctx)
}
}
|
ihji/dotty | tests/neg/phantom-assume-1.scala | object Boo extends Phantom {
type BooAny = this.Any
def assume1: BooAny = assume() // error: method assume in trait Phantom does not take parameters
}
|
ihji/dotty | tests/run/phantom-decls-5.scala | /* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomTermErasure,phantomTypeErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
new Boo5[Blinky](boo[Pinky])
new Boo5[Pinky](boo[Pinky])
}
class Boo5[P <: Blinky](p5: P) {
println("Boo5")
}
}
object Boo extends Phantom {
type Blinky <: this.Any
type Inky <: Blinky
type Pinky <: Inky
def boo[B <: Blinky]: B = assume
}
|
ihji/dotty | tests/run/phantom-5.scala | <reponame>ihji/dotty<filename>tests/run/phantom-5.scala
/* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomTermErasure,phantomTypeErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
fun5(boo[Blinky])(15)(boo[Pinky])(16)
fun5(boo[Inky])(17)(boo[Pinky])(18)
fun5(boo[Pinky])(19)(boo[Casper])(20)
}
def fun5(top: Blinky)(n: Int)(bottom: Clyde)(n2: Int): Unit = {
println("fun5")
}
}
object Boo extends Phantom {
type Blinky <: this.Any
type Inky <: Blinky
type Pinky <: Inky
type Clyde >: Pinky <: Inky
type Casper = Pinky
def boo[B <: Blinky]: B = assume
}
|
ihji/dotty | tests/neg/equality.scala | object equality {
case class Str(str: String)
case class Num(x: Int)
case class Other(x: Int)
trait Option[+T]
case class Some[+T](x: T) extends Option[T]
case object None extends Option[Nothing]
implicit def eqStr: Eq[Str, Str] = Eq
implicit def eqNum: Eq[Num, Num] = Eq
implicit def eqOption[T, U](implicit e: Eq[T, U]): Eq[Option[T], Option[U]] = Eq
case class PString(a: String) extends Proxy {
def self = a
}
/*
implicit def eqString: Eq[String, String] = Eq
implicit def eqInt: Eq[Int, Int] = Eq
implicit def eqNumber: Eq[Number, Number] = Eq
implicit def eqIntNumber: Eq[Int, Number] = Eq
implicit def eqNumberInt: Eq[Number, Int] = Eq
*/
def main(args: Array[String]): Unit = {
Some(Other(3)) == None
val x = Str("abc")
x == x
val n = Num(2)
val m = Num(3)
n == m
val so: Object = "abc"
so == "abc"
"abc" == so
Other(1) == Other(2)
Some(x) == None
Some(x) == Some(Str(""))
val z: Option[Str] = Some(Str("abc"))
z == Some(x)
z == None
Some(x) == z
None == z
None == Some(Other(3))
Other(3) == null
Str("x") == null
null == Other(3)
null == Str("x")
null == null
1 == true // error
null == true // OK by eqProxy
true == null // error
null == 1 // OK by eqProxy or eqNumInt
1 == null // OK by eqIntNum
class Fruit
implicit def eqFruit: Eq[Fruit, Fruit] = Eq
class Apple extends Fruit
class Pear extends Fruit
val a = new Apple
val p = new Pear
val f: Fruit = a
a == p
p == a
f == p
p == f
Some(new Apple) == Some(new Pear)
val i = 3
val bi = BigInt(i)
i == i
bi == bi
i == bi
bi == i
val ps = PString("hello")
ps == "world"
n match {
case None => // error
}
x == Other(1) // error
Other(2) == x // error
Other(1) == z // error
z == Other(1) // error
n == None // error
Some(new Apple) == Some(Str("xx")) // error
x == n // error
n == x // error
z == Some(n) // error
z == n // error
Some(n) == z // error
Some(n) == Some(Other(3)) // error
Some(Other(3)) == Some(n) // error
n == z // error
"abc" == 1 // error
1 == "abc" // error
"abc" == bi // error
bi == "abc" // error
"world" == ps // error
val s1 = Set(1, 2, 3)
val s2 = Set()
Nil == s1 // error
s1 == Nil // error
Nil == s2 // error
s2 == Nil // error
import collection.parallel._
val p1 = ParSeq(1, 2, 3)
val p2 = ParSeq()
Nil == p1 // OK
p1 == Nil // OK
Nil == p2 // OK
p2 == Nil // Ok
}
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/ByNameClosures.scala | package dotty.tools.dotc
package transform
import TreeTransforms._
import core._
import Symbols._
import SymDenotations._
import Contexts._
import Types._
import Flags._
import Decorators._
import DenotTransformers.IdentityDenotTransformer
import core.StdNames.nme
/** This phase translates arguments to call-by-name parameters, using the rules
*
* x ==> x if x is a => parameter
* e.apply() ==> <cbn-arg>(e) if e is pure
* e ==> <cbn-arg>(() => e) for all other arguments
*
* where
*
* <cbn-arg>: [T](() => T): T
*
* is a synthetic method defined in Definitions. Erasure will later strip the <cbn-arg> wrappers.
*/
class ByNameClosures extends TransformByNameApply with IdentityDenotTransformer { thisTransformer =>
import ast.tpd._
override def phaseName: String = "byNameClosures"
override def mkByNameClosure(arg: Tree, argType: Type)(implicit ctx: Context): Tree = {
val meth = ctx.newSymbol(
ctx.owner, nme.ANON_FUN, Synthetic | Method, MethodType(Nil, Nil, argType))
Closure(meth, _ => arg.changeOwnerAfter(ctx.owner, meth, thisTransformer))
}
}
|
ihji/dotty | tests/idempotency/BootstrapChecker.scala |
object Test {
def main(args: Array[String]): Unit =
IdempotencyCheck.checkIdempotency("../out/dotty")
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala | /*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package dotty.tools.dotc.classpath
import dotty.tools.io.{AbstractFile, VirtualDirectory}
import dotty.tools.io.Path.string2path
import dotty.tools.dotc.config.Settings
import FileUtils.AbstractFileOps
import dotty.tools.io.ClassPath
import dotty.tools.dotc.core.Contexts.Context
/**
* Provides factory methods for classpath. When creating classpath instances for a given path,
* it uses proper type of classpath depending on a types of particular files containing sources or classes.
*/
class ClassPathFactory {
/**
* Create a new classpath based on the abstract file.
*/
def newClassPath(file: AbstractFile)(implicit ctx: Context): ClassPath = ClassPathFactory.newClassPath(file)
/**
* Creators for sub classpaths which preserve this context.
*/
def sourcesInPath(path: String)(implicit ctx: Context): List[ClassPath] =
for {
file <- expandPath(path, expandStar = false)
dir <- Option(AbstractFile getDirectory file)
} yield createSourcePath(dir)
def expandPath(path: String, expandStar: Boolean = true): List[String] = dotty.tools.io.ClassPath.expandPath(path, expandStar)
def expandDir(extdir: String): List[String] = dotty.tools.io.ClassPath.expandDir(extdir)
def contentsOfDirsInPath(path: String)(implicit ctx: Context): List[ClassPath] =
for {
dir <- expandPath(path, expandStar = false)
name <- expandDir(dir)
entry <- Option(AbstractFile.getDirectory(name))
} yield newClassPath(entry)
def classesInExpandedPath(path: String)(implicit ctx: Context): IndexedSeq[ClassPath] =
classesInPathImpl(path, expand = true).toIndexedSeq
def classesInPath(path: String)(implicit ctx: Context) = classesInPathImpl(path, expand = false)
def classesInManifest(useManifestClassPath: Boolean)(implicit ctx: Context) =
if (useManifestClassPath) dotty.tools.io.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
else Nil
// Internal
protected def classesInPathImpl(path: String, expand: Boolean)(implicit ctx: Context) =
for {
file <- expandPath(path, expand)
dir <- {
def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None
Option(AbstractFile.getDirectory(file)).orElse(asImage)
}
} yield newClassPath(dir)
private def createSourcePath(file: AbstractFile)(implicit ctx: Context): ClassPath =
if (file.isJarOrZip)
ZipAndJarSourcePathFactory.create(file)
else if (file.isDirectory)
new DirectorySourcePath(file.file)
else
sys.error(s"Unsupported sourcepath element: $file")
}
object ClassPathFactory {
def newClassPath(file: AbstractFile)(implicit ctx: Context): ClassPath = file match {
case vd: VirtualDirectory => VirtualDirectoryClassPath(vd)
case _ =>
if (file.isJarOrZip)
ZipAndJarClassPathFactory.create(file)
else if (file.isDirectory)
new DirectoryClassPath(file.file)
else
sys.error(s"Unsupported classpath element: $file")
}
}
|
ihji/dotty | bot/src/dotty/tools/bot/util/HttpClientAux.scala | package dotty.tools.bot.util
import org.http4s._
import scalaz.concurrent.Task
import org.http4s.headers.{ Accept, Authorization }
object HttpClientAux {
def uriFromString(url: String): Task[Uri] =
Uri.fromString(url).fold(Task.fail, Task.now)
implicit class RicherTask(val task: Task[Request]) extends AnyVal {
def withOauth2(token: String): Task[Request] =
task.map(_.putHeaders(new Authorization(new OAuth2BearerToken(token))))
def withAuth(user: String, pass: String): Task[Request] =
task.map(_.putHeaders(new Authorization(BasicCredentials(user, pass))))
}
private[this] lazy val previewAcceptHeader =
Accept.parse("application/vnd.github.black-cat-preview+json")
.getOrElse(throw new Exception("Couldn't initialize accept header"))
@inline private def request(method: Method, endpoint: Uri): Request =
Request(uri = endpoint, method = method)
.putHeaders(previewAcceptHeader)
def get(endpoint: String): Task[Request] =
uriFromString(endpoint).map(request(Method.GET, _))
def post(endpoint: String): Task[Request] =
uriFromString(endpoint).map(request(Method.POST, _))
def delete(endpoint: String): Task[Request] =
uriFromString(endpoint).map(request(Method.DELETE, _))
}
|
ihji/dotty | tests/neg/phantom-class-type-members.scala | import Boo._
object Test {
def main(args: Array[String]): Unit = {
val a = new Bar()
foo(a.asInstanceOf[Foo{type T = BooNothing}].y) // error
a match {
case a: Foo{type T = BooNothing} => a.y // error
}
val b = new Baz
b.asInstanceOf[Foo{type T = BooAny}].z(any) // error
b match {
case b: Foo{type T = BooAny} => a.z(any) // error
}
}
def foo(x: BooNothing) = println("foo")
}
abstract class Foo {
type T <: BooAny
def y: T
def z(z: T): Unit
}
class Bar extends Foo {
type T = BooAny
def y: T = any
def z(z: T) = ()
}
class Baz extends Foo {
type T = BooNothing
def y: T = nothing
def z(z: T) = ()
}
object Boo extends Phantom {
type BooAny = this.Any
type BooNothing = this.Nothing
def any: BooAny = assume
def nothing: BooNothing = assume
}
|
ihji/dotty | tests/run/i2738.scala | object Test {
def main(args: Array[String]): Unit = {
foo(1)
foo("a")
baz(2)
baz("b")
}
def foo[X <: Int](x: X) = {
def bar = printlnThisMethodName()
printlnThisMethodName()
bar
}
def foo(x: String) = {
def bar = printlnThisMethodName()
printlnThisMethodName()
bar
}
def baz[X <: Int](x: X) = {
object qux {
override def toString() = {
printlnThisMethodsClassName()
"a"
}
}
printlnThisMethodName()
qux.toString()
}
def baz(x: String) = {
object qux {
override def toString() = {
printlnThisMethodsClassName()
"b"
}
}
printlnThisMethodName()
qux.toString()
}
def printlnThisMethodName() =
println(Thread.currentThread().getStackTrace()(2).getMethodName)
def printlnThisMethodsClassName() =
println(Thread.currentThread().getStackTrace()(2).getClassName)
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/reporting/Reporter.scala | <gh_stars>0
package dotty.tools
package dotc
package reporting
import core.Contexts._
import util.{SourcePosition, NoSourcePosition}
import core.Decorators.PhaseListDecorator
import collection.mutable
import config.Printers
import java.lang.System.currentTimeMillis
import core.Mode
import dotty.tools.dotc.core.Symbols.Symbol
import diagnostic.messages._
import diagnostic._
import Message._
object Reporter {
/** Convert a SimpleReporter into a real Reporter */
def fromSimpleReporter(simple: interfaces.SimpleReporter): Reporter =
new Reporter with UniqueMessagePositions with HideNonSensicalMessages {
override def doReport(m: MessageContainer)(implicit ctx: Context): Unit = m match {
case m: ConditionalWarning if !m.enablingOption.value =>
case _ =>
simple.report(m)
}
}
}
import Reporter._
trait Reporting { this: Context =>
/** For sending messages that are printed only if -verbose is set */
def inform(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
if (this.settings.verbose.value) this.echo(msg, pos)
def echo(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
reporter.report(new Info(msg, pos))
def reportWarning(warning: Warning): Unit =
if (!this.settings.silentWarnings.value) {
if (this.settings.XfatalWarnings.value) reporter.report(warning.toError)
else reporter.report(warning)
}
def deprecationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
reportWarning(new DeprecationWarning(msg, pos))
def migrationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
reportWarning(new MigrationWarning(msg, pos))
def uncheckedWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
reportWarning(new UncheckedWarning(msg, pos))
def featureWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
reportWarning(new FeatureWarning(msg, pos))
def featureWarning(feature: String, featureDescription: String, isScala2Feature: Boolean,
featureUseSite: Symbol, required: Boolean, pos: SourcePosition): Unit = {
val req = if (required) "needs to" else "should"
val prefix = if (isScala2Feature) "scala." else "dotty."
val fqname = prefix + "language." + feature
val explain = {
if (reporter.isReportedFeatureUseSite(featureUseSite)) ""
else {
reporter.reportNewFeatureUseSite(featureUseSite)
s"""
|This can be achieved by adding the import clause 'import $fqname'
|or by setting the compiler option -language:$feature.
|See the Scala docs for value $fqname for a discussion
|why the feature $req be explicitly enabled.""".stripMargin
}
}
val msg = s"$featureDescription $req be enabled\nby making the implicit value $fqname visible.$explain"
if (required) error(msg, pos)
else reportWarning(new FeatureWarning(msg, pos))
}
def warning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
reportWarning(new Warning(msg, pos))
def strictWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
if (this.settings.strict.value) error(msg, pos)
else reportWarning(new ExtendMessage(() => msg)(_ + "\n(This would be an error under strict mode)").warning(pos))
def error(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
reporter.report(new Error(msg, pos))
def errorOrMigrationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
if (ctx.scala2Mode) migrationWarning(msg, pos) else error(msg, pos)
def restrictionError(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
reporter.report {
new ExtendMessage(() => msg)(m => s"Implementation restriction: $m").error(pos)
}
def incompleteInputError(msg: => Message, pos: SourcePosition = NoSourcePosition)(implicit ctx: Context): Unit =
reporter.incomplete(new Error(msg, pos))(ctx)
/** Log msg if settings.log contains the current phase.
* See [[config.CompilerCommand#explainAdvanced]] for the exact meaning of
* "contains" here.
*/
def log(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
if (this.settings.log.value.containsPhase(phase))
echo(s"[log ${ctx.phasesStack.reverse.mkString(" -> ")}] $msg", pos)
def debuglog(msg: => String): Unit =
if (ctx.debug) log(msg)
def informTime(msg: => String, start: Long): Unit = {
def elapsed = s" in ${currentTimeMillis - start}ms"
informProgress(msg + elapsed)
}
def informProgress(msg: => String) =
inform("[" + msg + "]")
def trace[T](msg: => String)(value: T) = {
log(msg + " " + value)
value
}
def debugwarn(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
if (this.settings.debug.value) warning(msg, pos)
@inline
def debugTraceIndented[TD](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => TD): TD =
conditionalTraceIndented(this.settings.debugTrace.value, question, printer, show)(op)
@inline
def conditionalTraceIndented[TC](cond: Boolean, question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => TC): TC =
if (cond) traceIndented[TC](question, printer, show)(op)
else op
@inline
def traceIndented[T](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T =
if (printer eq config.Printers.noPrinter) op
else doTraceIndented[T](question, printer, show)(op)
private def doTraceIndented[T](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T = {
def resStr(res: Any): String = res match {
case res: printing.Showable if show => res.show
case _ => String.valueOf(res)
}
// Avoid evaluating question multiple time, since each evaluation
// may cause some extra logging output.
lazy val q: String = question
doTraceIndented[T](s"==> $q?", (res: Any) => s"<== $q = ${resStr(res)}")(op)
}
def doTraceIndented[T](leading: => String, trailing: Any => String)(op: => T): T =
if (ctx.mode.is(Mode.Printing)) op
else {
var finalized = false
var logctx = this
while (logctx.reporter.isInstanceOf[StoreReporter]) logctx = logctx.outer
def finalize(result: Any, note: String) =
if (!finalized) {
base.indent -= 1
logctx.log(s"${base.indentTab * base.indent}${trailing(result)}$note")
finalized = true
}
try {
logctx.log(s"${base.indentTab * base.indent}$leading")
base.indent += 1
val res = op
finalize(res, "")
res
} catch {
case ex: Throwable =>
finalize("<missing>", s" (with exception $ex)")
throw ex
}
}
}
/**
* This interface provides methods to issue information, warning and
* error messages.
*/
abstract class Reporter extends interfaces.ReporterResult {
/** Report a diagnostic */
def doReport(m: MessageContainer)(implicit ctx: Context): Unit
/** Whether very long lines can be truncated. This exists so important
* debugging information (like printing the classpath) is not rendered
* invisible due to the max message length.
*/
private var _truncationOK: Boolean = true
def truncationOK = _truncationOK
def withoutTruncating[T](body: => T): T = {
val saved = _truncationOK
_truncationOK = false
try body
finally _truncationOK = saved
}
type ErrorHandler = MessageContainer => Context => Unit
private var incompleteHandler: ErrorHandler = d => c => report(d)(c)
def withIncompleteHandler[T](handler: ErrorHandler)(op: => T): T = {
val saved = incompleteHandler
incompleteHandler = handler
try op
finally incompleteHandler = saved
}
var errorCount = 0
var warningCount = 0
def hasErrors = errorCount > 0
def hasWarnings = warningCount > 0
private var errors: List[Error] = Nil
def allErrors = errors
/** Have errors been reported by this reporter, or in the
* case where this is a StoreReporter, by an outer reporter?
*/
def errorsReported = hasErrors
private[this] var reportedFeaturesUseSites = Set[Symbol]()
def isReportedFeatureUseSite(featureTrait: Symbol): Boolean = reportedFeaturesUseSites.contains(featureTrait)
def reportNewFeatureUseSite(featureTrait: Symbol): Unit = reportedFeaturesUseSites += featureTrait
val unreportedWarnings = new mutable.HashMap[String, Int] {
override def default(key: String) = 0
}
def report(m: MessageContainer)(implicit ctx: Context): Unit =
if (!isHidden(m)) {
doReport(m)(ctx.addMode(Mode.Printing))
m match {
case m: ConditionalWarning if !m.enablingOption.value => unreportedWarnings(m.enablingOption.name) += 1
case m: Warning => warningCount += 1
case m: Error =>
errors = m :: errors
errorCount += 1
case m: Info => // nothing to do here
// match error if d is something else
}
}
def incomplete(m: MessageContainer)(implicit ctx: Context): Unit =
incompleteHandler(m)(ctx)
/** Summary of warnings and errors */
def summary: String = {
val b = new mutable.ListBuffer[String]
if (warningCount > 0)
b += countString(warningCount, "warning") + " found"
if (errorCount > 0)
b += countString(errorCount, "error") + " found"
for ((settingName, count) <- unreportedWarnings)
b += s"there were $count ${settingName.tail} warning(s); re-run with $settingName for details"
b.mkString("\n")
}
/** Print the summary of warnings and errors */
def printSummary(implicit ctx: Context): Unit = {
val s = summary
if (s != "") ctx.echo(s)
}
/** Returns a string meaning "n elements". */
protected def countString(n: Int, elements: String): String = n match {
case 0 => "no " + elements + "s"
case 1 => "one " + elements
case 2 => "two " + elements + "s"
case 3 => "three " + elements + "s"
case 4 => "four " + elements + "s"
case _ => n + " " + elements + "s"
}
/** Should this diagnostic not be reported at all? */
def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean =
ctx.mode.is(Mode.Printing)
/** Does this reporter contain not yet reported errors or warnings? */
def hasPending: Boolean = false
/** If this reporter buffers messages, remove and return all buffered messages. */
def removeBufferedMessages(implicit ctx: Context): List[MessageContainer] = Nil
/** Issue all error messages in this reporter to next outer one, or make sure they are written. */
def flush()(implicit ctx: Context): Unit =
removeBufferedMessages.foreach(ctx.reporter.report)
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/localopt/RemoveUnnecessaryNullChecks.scala | package dotty.tools.dotc
package transform.localopt
import core.Constants.{Constant, NullTag}
import core.Contexts.Context
import core.Symbols._
import core.Types._
import core.Flags._
import ast.Trees._
import scala.collection.mutable
/** Eliminated null checks based on the following observations:
*
* - (this) cannot be null
* - (new C) cannot be null
* - literal is either null itself or non null
* - fallsback to `tpe.isNotNull`, which will eventually be true for non nullable types.
* - in (a.call; a == null), the first call throws a NPE if a is null; the test can be removed.
*
* @author DarkDimius, Jvican, OlivierBlanvillain
*/
class RemoveUnnecessaryNullChecks extends Optimisation {
import ast.tpd._
val initializedVals = mutable.HashSet[Symbol]()
val checkGood = mutable.HashMap[Symbol, Set[Symbol]]()
def clear(): Unit = {
initializedVals.clear()
checkGood.clear()
}
def isGood(t: Symbol)(implicit ctx: Context): Boolean = {
t.exists && initializedVals.contains(t) && {
var changed = true
var set = Set(t)
while (changed) {
val oldSet = set
set = set ++ set.flatMap(x => checkGood.getOrElse(x, Nil))
changed = set != oldSet
}
!set.exists(x => !initializedVals.contains(x))
}
}
def visitor(implicit ctx: Context): Tree => Unit = {
case vd: ValDef =>
val rhs = vd.rhs
if (!vd.symbol.is(Mutable) && !rhs.isEmpty) {
def checkNonNull(t: Tree, target: Symbol): Boolean = t match {
case Block(_ , expr) =>
checkNonNull(expr, target)
case If(_, thenp, elsep) =>
checkNonNull(thenp, target) && checkNonNull(elsep, target)
case _: New | _: This => true
case t: Apply if t.symbol.isPrimaryConstructor => true
case t: Literal => t.const.value != null
case t: Ident if !t.symbol.owner.isClass =>
checkGood.put(target, checkGood.getOrElse(target, Set.empty) + t.symbol)
true
case t: Apply if !t.symbol.owner.isClass =>
checkGood.put(target, checkGood.getOrElse(target, Set.empty) + t.symbol)
true
case t: Typed =>
checkNonNull(t.expr, target)
case _ => t.tpe.isNotNull
}
if (checkNonNull(vd.rhs, vd.symbol))
initializedVals += vd.symbol
}
case t: Tree =>
}
def transformer(implicit ctx: Context): Tree => Tree = {
def isNullLiteral(tree: Tree) = tree match {
case literal: Literal =>
literal.const.tag == NullTag
case _ => false
}
val transformation: Tree => Tree = {
case check @ Apply(Select(lhs, _), List(rhs)) =>
val sym = check.symbol
val eqOrNe = sym == defn.Object_eq || sym == defn.Object_ne
val nullLhs = isNullLiteral(lhs) && isGood(rhs.symbol)
val nullRhs = isNullLiteral(rhs) && isGood(lhs.symbol)
if (eqOrNe && (nullLhs || nullRhs)) {
def block(b: Boolean) = Block(List(lhs, rhs), Literal(Constant(b)))
if (sym == defn.Object_eq) block(false)
else if (sym == defn.Object_ne) block(true)
else check
} else check
case t => t
}
transformation
}
}
|
ihji/dotty | tests/neg/phantom-Eq.scala | /* This is a example of how to implement Eq using erasable phantom types.
*
* See also: ../pos/phantomEq.scala
*/
object PhantomEqTest {
import EqUtil._
"abc" === "abc"
1 === 4
1 === "abc" // error
"ghi" === 4 // error
0 === Nil // error
List(1, 2) === 1 // error
List(1, 2) === "" // error
}
object EqUtil extends Phantom {
type PhantomEq[-L, -R] <: this.Any
type PhantomEqEq[T] = PhantomEq[T, T]
implicit class EqualsDeco[T](val x: T) extends AnyVal {
def ===[U] (y: U)(implicit ce: PhantomEq[T, U]) = x.equals(y)
}
implicit def eqString: PhantomEqEq[String] = assume
implicit def eqInt: PhantomEqEq[Int] = assume
implicit def eqDouble: PhantomEqEq[Double] = assume
implicit def eqByteNum: PhantomEq[Byte, Number] = assume
implicit def eqNumByte: PhantomEq[Number, Byte] = assume
implicit def eqSeq[T, U](implicit eq: PhantomEq[T, U]): PhantomEq[Seq[T], Seq[U]] = assume
}
|
ihji/dotty | tests/run/phantom-4.scala | /* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomTermErasure,phantomTypeErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
fun4(3, 4, boo[Blinky], boo[Pinky])
fun4(5, 6, boo[Inky], boo[Pinky])
fun4(7, 8, boo[Pinky], boo[Casper])
}
def fun4(n: Int, n2: Int, top: Blinky, bottom: Pinky): Unit = {
println("fun4")
}
}
object Boo extends Phantom {
type Blinky <: this.Any
type Inky <: Blinky
type Pinky <: Inky
type Casper = Pinky
def boo[B <: Blinky]: B = assume
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/localopt/ConstantFold.scala | package dotty.tools.dotc
package transform.localopt
import core.Contexts.Context
import core.Symbols._
import core.Types._
import typer.ConstFold
import ast.Trees._
import Simplify.desugarIdent
/** Various constant folding.
*
* - Starts/ends with the constant folding implemented in typer (ConstFold).
*
* - Join branches if they are "similar"
*
* - regularize arithmetic and boolean expressions to have constants on the
* left, ie. 6 * 2 * a * 5 => 60 * a
*
* - (if) specific optimisation that propagate booleans, negation, and factor
* out (nested) if with equivalent branches wrt to isSimilar. For example:
* - if (b) exp else exp → b; exp
* - if (b1) e1 else if (b2) e1 else e2 → if (b1 || b2) e1 else e2
*
* - Constant propagation over pattern matching.
*
* @author DarkDimius, OlivierBlanvillain
*/
class ConstantFold(val simplifyPhase: Simplify) extends Optimisation {
import ast.tpd._
def visitor(implicit ctx: Context) = NoVisitor
def clear(): Unit = ()
def transformer(implicit ctx: Context): Tree => Tree = { x => preEval(x) match {
// TODO: include handling of isInstanceOf similar to one in IsInstanceOfEvaluator
// TODO: include methods such as Int.int2double(see ./tests/pos/harmonize.scala)
case If(cond1, thenp, elsep) if isSimilar(thenp, elsep) =>
Block(cond1 :: Nil, thenp)
case If(cond1, If(cond2, thenp2, elsep2), elsep1) if isSimilar(elsep1, elsep2) =>
If(cond1.select(defn.Boolean_&&).appliedTo(cond2), thenp2, elsep1)
case If(cond1, If(cond2, thenp2, elsep2), elsep1) if isSimilar(elsep1, thenp2) =>
If(cond1.select(defn.Boolean_!).ensureApplied.select(defn.Boolean_||).appliedTo(cond2), elsep1, elsep2)
case If(cond1, thenp1, If(cond2, thenp2, elsep2)) if isSimilar(thenp1, thenp2) =>
If(cond1.select(defn.Boolean_||).appliedTo(cond2), thenp1, elsep2)
case If(cond1, thenp1, If(cond2, thenp2, elsep2)) if isSimilar(thenp1, elsep2) =>
If(cond1.select(defn.Boolean_||).appliedTo(cond2.select(defn.Boolean_!).ensureApplied), thenp1, thenp2)
case If(t: Literal, thenp, elsep) =>
if (t.const.booleanValue) thenp
else elsep
case ift @ If(cond, thenp: Literal, elsep: Literal)
if isBool(ift.tpe) && thenp.const.booleanValue && !elsep.const.booleanValue =>
cond
// the lower two are disabled, as it may make the isSimilar rule not apply for a nested structure of iffs.
// see the example below:
// (b1, b2) match {
// case (true, true) => true
// case (false, false) => true
// case _ => false
// }
// case ift @ If(cond, thenp: Literal, elsep)
// if isBool(ift.tpe) && thenp.const.booleanValue =>
// if (thenp.const.booleanValue)
// cond.select(defn.Boolean_||).appliedTo(elsep)
// else // thenp is false, this tree is bigger then the original
// cond.select(defn.Boolean_!).ensureApplied.select(defn.Boolean_&&).appliedTo(elsep)
// case ift @ If(cond, thenp, elsep :Literal) if
// isBool(ift.tpe) && !elsep.const.booleanValue =>
// cond.select(defn.Boolean_&&).appliedTo(elsep)
// the other case ins't handled intentionally. See previous case for explanation
case If(t @ Select(recv, _), thenp, elsep) if t.symbol eq defn.Boolean_! =>
If(recv, elsep, thenp)
case If(t @ Apply(Select(recv, _), Nil), thenp, elsep) if t.symbol eq defn.Boolean_! =>
If(recv, elsep, thenp)
// TODO: similar trick for comparisons.
// TODO: handle comparison with min\max values
case Apply(meth1 @ Select(Apply(meth2 @ Select(rec, _), Nil), _), Nil)
if meth1.symbol == defn.Boolean_! && meth2.symbol == defn.Boolean_! =>
rec
case meth1 @ Select(meth2 @ Select(rec, _), _)
if meth1.symbol == defn.Boolean_! && meth2.symbol == defn.Boolean_! && !ctx.erasedTypes =>
rec
case t @ Apply(Select(lhs, _), List(rhs)) =>
val sym = t.symbol
(lhs, rhs) match {
case (lhs, Literal(_)) if !lhs.isInstanceOf[Literal] && simplifyPhase.CommutativePrimitiveOperations.contains(sym) =>
rhs.select(sym).appliedTo(lhs)
case (l, _) if (sym == defn.Boolean_&&) && isConst(l.tpe) =>
val const = asConst(l.tpe).value.booleanValue
if (const) Block(lhs :: Nil, rhs)
else l
case (l, x: Literal) if sym == defn.Boolean_== && isBool(l.tpe) && isBool(x.tpe) =>
if (x.const.booleanValue) l
else l.select(defn.Boolean_!).ensureApplied
case (l, x: Literal) if sym == defn.Boolean_!= && isBool(l.tpe) && isBool(x.tpe) =>
if (!x.const.booleanValue) l
else l.select(defn.Boolean_!).ensureApplied
case (x: Literal, l) if sym == defn.Boolean_== && isBool(l.tpe) && isBool(x.tpe) =>
if (x.const.booleanValue) l
else l.select(defn.Boolean_!).ensureApplied
case (x: Literal, l) if sym == defn.Boolean_!= && isBool(l.tpe) && isBool(x.tpe) =>
if (!x.const.booleanValue) l
else l.select(defn.Boolean_!).ensureApplied
case (l: Literal, _) if (sym == defn.Boolean_||) && isConst(l.tpe) =>
val const = asConst(l.tpe).value.booleanValue
if (l.const.booleanValue) l
else Block(lhs :: Nil, rhs)
// case (Literal(Constant(1)), _) if sym == defn.Int_* => rhs
// case (Literal(Constant(0)), _) if sym == defn.Int_+ => rhs
// case (Literal(Constant(1L)), _) if sym == defn.Long_* => rhs
// case (Literal(Constant(0L)), _) if sym == defn.Long_+ => rhs
// // TODO: same for float, double, short
// // TODO: empty string concat
// // TODO: disctribute & reorder constants
// // TODO: merge subsequent casts
// case (_, Literal(Constant(1))) if sym == defn.Int_/ => lhs
// case (_, Literal(Constant(1L))) if sym == defn.Long_/ => lhs
// case (_, Literal(Constant(0))) if sym == defn.Int_/ =>
// Block(List(lhs),
// ref(defn.throwMethod).appliedTo(New(defn.ArithmeticExceptionClass.typeRef, defn.ArithmeticExceptionClass_stringConstructor, Literal(Constant("/ by zero")) :: Nil)))
// case (_, Literal(Constant(0L))) if sym == defn.Long_/ =>
// Block(List(lhs),
// ref(defn.throwMethod).appliedTo(New(defn.ArithmeticExceptionClass.typeRef, defn.ArithmeticExceptionClass_stringConstructor, Literal(Constant("/ by zero")) :: Nil)))
case _ => t
}
// This case can only be triggered when running Simplify before pattern matching:
// case t: Match
// if t.selector.tpe.isInstanceOf[ConstantType] &&
// t.cases.forall { x =>
// x.pat.tpe.isInstanceOf[ConstantType] || (isWildcardArg(x.pat) && x.guard.isEmpty)
// } =>
// val selectorValue = t.selector.tpe.asInstanceOf[ConstantType].value
// val better = t.cases.find(x => isWildcardArg(x.pat) || (x.pat.tpe.asInstanceOf[ConstantType].value eq selectorValue))
// if (better.nonEmpty) better.get.body
// else t
case t: Literal => t
case t: CaseDef => t
case t if !isPureExpr(t) => t
case t =>
val s = ConstFold.apply(t)
if ((s ne null) && s.tpe.isInstanceOf[ConstantType]) {
val constant = s.tpe.asInstanceOf[ConstantType].value
Literal(constant)
} else t
}
}
def preEval(t: Tree)(implicit ctx: Context) = {
if (t.isInstanceOf[Literal] || t.isInstanceOf[CaseDef] || !isPureExpr(t)) t
else {
val s = ConstFold.apply(t)
if ((s ne null) && s.tpe.isInstanceOf[ConstantType]) {
val constant = s.tpe.asInstanceOf[ConstantType].value
Literal(constant)
} else t
}
}
def isSimilar(t1: Tree, t2: Tree)(implicit ctx: Context): Boolean = t1 match {
case t1: Apply =>
t2 match {
case t2: Apply =>
(t1.symbol == t2.symbol) &&
(t1.args zip t2.args).forall(x => isSimilar(x._1, x._2)) &&
isSimilar(t1.fun, t2.fun)
case _ => false
}
case t1: Ident =>
desugarIdent(t1) match {
case Some(t) =>
val t2i = t2 match {
case t2: Ident => desugarIdent(t2).getOrElse(t2)
case _ => t2
}
isSimilar(t, t2i)
case None => t1.symbol eq t2.symbol
}
case t1: Select => t2 match {
case t2: Select =>
(t1.symbol eq t2.symbol) &&
isSimilar(t1.qualifier, t2.qualifier)
case t2: Ident => desugarIdent(t2) match {
case Some(t2) => isSimilar(t1, t2)
case None => false
}
case _ => false
}
case t1: Literal => t2 match {
case t2: Literal =>
t1.const.tag == t2.const.tag &&
t1.const.value == t2.const.value
case _ => false
}
case _ => false
}
def isBool(tpe: Type)(implicit ctx: Context): Boolean = tpe.derivesFrom(defn.BooleanClass)
def isConst(tpe: Type)(implicit ctx: Context): Boolean = tpe.isInstanceOf[ConstantType]
def asConst(tpe: Type)(implicit ctx: Context): ConstantType = tpe.asInstanceOf[ConstantType]
}
|
ihji/dotty | tests/run/phantom-methods-14.scala | import scala.reflect.ClassTag
object Test {
def main(args: Array[String]): Unit = {
bar1(Foo.a)
bar2(Foo.a)(null)
}
def bar1(ev: Foo.A) = ()
def bar2(ev: Foo.A)(implicit c: ClassTag[Int]) = implicitly[ClassTag[Int]]
}
object Foo extends Phantom {
type A <: this.Any
def a: A = assume
}
|
ihji/dotty | tests/run/enum-approx.scala | <reponame>ihji/dotty
enum class Fun[-T, +U >: Null] {
def f: T => U = null
}
object Fun {
case Identity[T, U >: Null](override val f: T => U) extends Fun[T, U]
case ConstNull {
override def f = x => null
}
case ConstNullClass() {
override def f = x => null
}
case ConstNullSimple
}
object Test {
def main(args: Array[String]) = {
val x: Null = Fun.ConstNull.f("abc")
val y: Null = Fun.ConstNullClass().f("abc")
assert(Fun.ConstNullSimple.f == null)
}
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala | <reponame>ihji/dotty<filename>compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala<gh_stars>0
package dotty.tools
package dotc
package parsing
import util.Chars._
abstract class CharArrayReader { self =>
val buf: Array[Char]
protected def startFrom = 0
/** Switch whether unicode should be decoded */
protected def decodeUni: Boolean = true
/** An error routine to call on bad unicode escapes \\uxxxx. */
protected def error(msg: String, offset: Int): Unit
/** the last read character */
var ch: Char = _
/** The offset one past the last read character */
var charOffset: Int = startFrom
/** The offset before the last read character */
var lastCharOffset: Int = startFrom
/** The start offset of the current line */
var lineStartOffset: Int = startFrom
/** The start offset of the line before the current one */
var lastLineStartOffset: Int = startFrom
private var lastUnicodeOffset = -1
/** Is last character a unicode escape \\uxxxx? */
def isUnicodeEscape = charOffset == lastUnicodeOffset
/** Advance one character; reducing CR;LF pairs to just LF */
final def nextChar(): Unit = {
val idx = charOffset
lastCharOffset = idx
if (idx >= buf.length) {
ch = SU
} else {
val c = buf(idx)
ch = c
charOffset = idx + 1
if (c == '\\') potentialUnicode()
else if (c < ' ') { skipCR(); potentialLineEnd() }
}
}
def getc() = { nextChar() ; ch }
/** Advance one character, leaving CR;LF pairs intact.
* This is for use in multi-line strings, so there are no
* "potential line ends" here.
*/
final def nextRawChar(): Unit = {
val idx = charOffset
lastCharOffset = idx
if (idx >= buf.length) {
ch = SU
} else {
val c = buf(charOffset)
ch = c
charOffset = idx + 1
if (c == '\\') potentialUnicode()
}
}
/** Interpret \\uxxxx escapes */
private def potentialUnicode(): Unit = {
def evenSlashPrefix: Boolean = {
var p = charOffset - 2
while (p >= 0 && buf(p) == '\\') p -= 1
(charOffset - p) % 2 == 0
}
def udigit: Int = {
if (charOffset >= buf.length) {
// Since the positioning code is very insistent about throwing exceptions,
// we have to decrement the position so our error message can be seen, since
// we are one past EOF. This happens with e.g. val x = \ u 1 <EOF>
error("incomplete unicode escape", charOffset - 1)
SU
}
else {
val d = digit2int(buf(charOffset), 16)
if (d >= 0) charOffset += 1
else error("error in unicode escape", charOffset)
d
}
}
if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) {
do charOffset += 1
while (charOffset < buf.length && buf(charOffset) == 'u')
val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
lastUnicodeOffset = charOffset
ch = code.toChar
}
}
/** replace CR;LF by LF */
private def skipCR(): Unit = {
if (ch == CR)
if (charOffset < buf.length && buf(charOffset) == LF) {
charOffset += 1
ch = LF
}
}
/** Handle line ends */
private def potentialLineEnd(): Unit = {
if (ch == LF || ch == FF) {
lastLineStartOffset = lineStartOffset
lineStartOffset = charOffset
}
}
def isAtEnd = charOffset >= buf.length
/** A new reader that takes off at the current character position */
def lookaheadReader() = new CharArrayLookaheadReader
class CharArrayLookaheadReader extends CharArrayReader {
val buf = self.buf
charOffset = self.charOffset
ch = self.ch
override def decodeUni = self.decodeUni
def error(msg: String, offset: Int) = self.error(msg, offset)
}
}
|
ihji/dotty | tests/run/phantom-poly-4.scala | <filename>tests/run/phantom-poly-4.scala
/* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomTermErasure,phantomTypeErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
polyfun4(boo[Blinky])
polyfun4(boo[Inky])
polyfun4(boo[Pinky])
}
def polyfun4[P >: BooNothing](p: P): Unit = {
println("polyfun4")
}
}
object Boo extends Phantom {
type BooNothing = Boo.Nothing
type Blinky <: this.Any
type Inky <: Blinky
type Pinky <: Inky
def boo[B <: Blinky]: B = assume
}
|
ihji/dotty | tests/neg/phantom-trait-4.scala | <filename>tests/neg/phantom-trait-4.scala
class Foo {
object Boo1 extends Phantom // error
def foo = {
object Boo2 extends Phantom // error
42
}
}
object Foo {
object Boo1 extends Phantom
def foo = {
object Boo2 extends Phantom // error
42
}
}
package foo {
object Boo1 extends Phantom
}
|
ihji/dotty | tests/pos/i2426.scala | class Foo @deprecated("foo", "2.11.0") (x: Int)
class Bar @deprecated(x: Int)
class Baz1 @deprecated(implicit c: C)
class Baz2 @deprecated()(implicit c: C)
class Baz3 @deprecated()()(implicit c: C)
object Test {
implicit val c: C = obj
new Baz1
new Baz2
new Baz3()
}
class D(implicit x: C)
class C
object obj extends C
class ann(x: C)(y: C, s: String) extends scala.annotation.Annotation
class Bam @ann(obj)(obj, "h")(n: String)
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/patmat/Space.scala | package dotty.tools.dotc
package transform
package patmat
import core._
import Types._
import Contexts._
import Flags._
import ast.Trees._
import ast.tpd
import Decorators._
import Symbols._
import StdNames._
import NameOps._
import Constants._
import reporting.diagnostic.messages._
import config.Printers.{ exhaustivity => debug }
/** Space logic for checking exhaustivity and unreachability of pattern matching
*
* Space can be thought of as a set of possible values. A type or a pattern
* both refer to spaces. The space of a type is the values that inhabit the
* type. The space of a pattern is the values that can be covered by the
* pattern.
*
* Space is recursively defined as follows:
*
* 1. `Empty` is a space
* 2. For a type T, `Typ(T)` is a space
* 3. A union of spaces `S1 | S2 | ...` is a space
* 4. For a case class Kon(x1: T1, x2: T2, .., xn: Tn), if S1, S2, ..., Sn
* are spaces, then `Kon(S1, S2, ..., Sn)` is a space.
* 5. `Fun(S1, S2, ..., Sn)` is an extractor space.
*
* For the problem of exhaustivity check, its formulation in terms of space is as follows:
*
* Is the space Typ(T) a subspace of the union of space covered by all the patterns?
*
* The problem of unreachable patterns can be formulated as follows:
*
* Is the space covered by a pattern a subspace of the space covered by previous patterns?
*
* Assumption:
* (1) One case class cannot be inherited directly or indirectly by another
* case class.
* (2) Inheritance of a case class cannot be well handled by the algorithm.
*
*/
/** space definition */
sealed trait Space
/** Empty space */
case object Empty extends Space
/** Space representing the set of all values of a type
*
* @param tp: the type this space represents
* @param decomposed: does the space result from decomposition? Used for pretty print
*
*/
case class Typ(tp: Type, decomposed: Boolean) extends Space
/** Space representing a constructor pattern */
case class Kon(tp: Type, params: List[Space]) extends Space
/** Space representing an extractor pattern */
case class Fun(tp: Type, fun: Type, params: List[Space]) extends Space
/** Union of spaces */
case class Or(spaces: List[Space]) extends Space
/** abstract space logic */
trait SpaceLogic {
/** Is `tp1` a subtype of `tp2`? */
def isSubType(tp1: Type, tp2: Type): Boolean
/** Is `tp1` the same type as `tp2`? */
def isEqualType(tp1: Type, tp2: Type): Boolean
/** Return a space containing the values of both types.
*
* The types should be atomic (non-decomposable) and unrelated (neither
* should be a subtype of the other).
*/
def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type): Space
/** Is the type `tp` decomposable? i.e. all values of the type can be covered
* by its decomposed types.
*
* Abstract sealed class, OrType, Boolean and Java enums can be decomposed.
*/
def canDecompose(tp: Type): Boolean
/** Return term parameter types of the case class `tp` */
def signature(tp: Type): List[Type]
/** Get components of decomposable types */
def decompose(tp: Type): List[Space]
/** Display space in string format */
def show(sp: Space): String
/** Simplify space using the laws, there's no nested union after simplify
*
* @param aggressive if true and OR space has less than 5 components, `simplify` will
* collapse `sp1 | sp2` to `sp1` if `sp2` is a subspace of `sp1`.
*
* This reduces noise in counterexamples.
*/
def simplify(space: Space, aggressive: Boolean = false): Space = space match {
case Kon(tp, spaces) =>
val sp = Kon(tp, spaces.map(simplify(_)))
if (sp.params.contains(Empty)) Empty
else sp
case Fun(tp, fun, spaces) =>
val sp = Fun(tp, fun, spaces.map(simplify(_)))
if (sp.params.contains(Empty)) Empty
else sp
case Or(spaces) =>
val set = spaces.map(simplify(_)).flatMap {
case Or(ss) => ss
case s => Seq(s)
} filter (_ != Empty)
if (set.isEmpty) Empty
else if (set.size == 1) set.toList(0)
else if (aggressive && spaces.size < 5) {
val res = set.map(sp => (sp, set.filter(_ ne sp))).find {
case (sp, sps) =>
isSubspace(sp, Or(sps))
}
if (res.isEmpty) Or(set)
else simplify(Or(res.get._2), aggressive)
}
else Or(set)
case Typ(tp, _) =>
if (canDecompose(tp) && decompose(tp).isEmpty) Empty
else space
case _ => space
}
/** Flatten space to get rid of `Or` for pretty print */
def flatten(space: Space): List[Space] = space match {
case Kon(tp, spaces) =>
val flats = spaces.map(flatten _)
flats.foldLeft(List[Kon]()) { (acc, flat) =>
if (acc.isEmpty) flat.map(s => Kon(tp, Nil :+ s))
else for (Kon(tp, ss) <- acc; s <- flat) yield Kon(tp, ss :+ s)
}
case Or(spaces) =>
spaces.flatMap(flatten _)
case _ => List(space)
}
/** Is `a` a subspace of `b`? Equivalent to `a - b == Empty`, but faster */
def isSubspace(a: Space, b: Space): Boolean = {
def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b)
def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp)))
val res = (simplify(a), b) match {
case (Empty, _) => true
case (_, Empty) => false
case (Or(ss), _) =>
ss.forall(isSubspace(_, b))
case (Typ(tp1, _), Typ(tp2, _)) =>
isSubType(tp1, tp2)
case (Typ(tp1, _), Or(ss)) => // optimization: don't go to subtraction too early
ss.exists(isSubspace(a, _)) || tryDecompose1(tp1)
case (_, Or(_)) =>
simplify(minus(a, b)) == Empty
case (Typ(tp1, _), Kon(tp2, ss)) =>
isSubType(tp1, tp2) && isSubspace(Kon(tp2, signature(tp2).map(Typ(_, false))), b)
case (Kon(tp1, ss), Typ(tp2, _)) =>
isSubType(tp1, tp2)
case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
isEqualType(tp1, tp2) && ss1.zip(ss2).forall((isSubspace _).tupled)
case (Fun(tp1, fun, ss), Typ(tp2, _)) =>
isSubType(tp1, tp2)
case (Typ(tp2, _), Fun(tp1, fun, ss)) =>
false // approximation: assume a type can never be fully matched by an extractor
case (Kon(_, _), Fun(_, _, _)) =>
false // approximation
case (Fun(_, _, _), Kon(_, _)) =>
false // approximation
case (Fun(_, fun1, ss1), Fun(_, fun2, ss2)) =>
isEqualType(fun1, fun2) && ss1.zip(ss2).forall((isSubspace _).tupled)
}
debug.println(s"${show(a)} < ${show(b)} = $res")
res
}
/** Intersection of two spaces */
def intersect(a: Space, b: Space): Space = {
def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b)
def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp)))
val res: Space = (a, b) match {
case (Empty, _) | (_, Empty) => Empty
case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filterConserve(_ ne Empty))
case (Or(ss), _) => Or(ss.map(intersect(_, b)).filterConserve(_ ne Empty))
case (Typ(tp1, _), Typ(tp2, _)) =>
if (isSubType(tp1, tp2)) a
else if (isSubType(tp2, tp1)) b
else if (canDecompose(tp1)) tryDecompose1(tp1)
else if (canDecompose(tp2)) tryDecompose2(tp2)
else intersectUnrelatedAtomicTypes(tp1, tp2)
case (Typ(tp1, _), Kon(tp2, ss)) =>
if (isSubType(tp2, tp1)) b
else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class
else if (canDecompose(tp1)) tryDecompose1(tp1)
else Empty
case (Kon(tp1, ss), Typ(tp2, _)) =>
if (isSubType(tp1, tp2)) a
else if (isSubType(tp2, tp1)) a // problematic corner case: inheriting a case class
else if (canDecompose(tp2)) tryDecompose2(tp2)
else Empty
case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
if (!isEqualType(tp1, tp2)) Empty
else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty
else Kon(tp1, ss1.zip(ss2).map((intersect _).tupled))
case (Typ(tp1, _), Fun(tp2, _, _)) =>
if (isSubType(tp1, tp2) || isSubType(tp2, tp1)) b // prefer extractor space for better approximation
else if (canDecompose(tp1)) tryDecompose1(tp1)
else Empty
case (Fun(tp1, _, _), Typ(tp2, _)) =>
if (isSubType(tp1, tp2) || isSubType(tp2, tp1)) a
else if (canDecompose(tp2)) tryDecompose2(tp2)
else Empty
case (Fun(tp1, _, _), Kon(tp2, _)) =>
if (isSubType(tp1, tp2) || isSubType(tp2, tp1)) a
else Empty
case (Kon(tp1, _), Fun(tp2, _, _)) =>
if (isSubType(tp1, tp2) || isSubType(tp2, tp1)) b
else Empty
case (Fun(tp1, fun1, ss1), Fun(tp2, fun2, ss2)) =>
if (!isEqualType(fun1, fun2)) Empty
else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty
else Fun(tp1, fun1, ss1.zip(ss2).map((intersect _).tupled))
}
debug.println(s"${show(a)} & ${show(b)} = ${show(res)}")
res
}
/** The space of a not covered by b */
def minus(a: Space, b: Space): Space = {
def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b)
def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp)))
val res = (a, b) match {
case (Empty, _) => Empty
case (_, Empty) => a
case (Typ(tp1, _), Typ(tp2, _)) =>
if (isSubType(tp1, tp2)) Empty
else if (canDecompose(tp1)) tryDecompose1(tp1)
else if (canDecompose(tp2)) tryDecompose2(tp2)
else a
case (Typ(tp1, _), Kon(tp2, ss)) =>
// corner case: inheriting a case class
// rationale: every instance of `tp1` is covered by `tp2(_)`
if (isSubType(tp1, tp2)) minus(Kon(tp2, signature(tp2).map(Typ(_, false))), b)
else if (canDecompose(tp1)) tryDecompose1(tp1)
else a
case (_, Or(ss)) =>
ss.foldLeft(a)(minus)
case (Or(ss), _) =>
Or(ss.map(minus(_, b)))
case (Kon(tp1, ss), Typ(tp2, _)) =>
// uncovered corner case: tp2 :< tp1
if (isSubType(tp1, tp2)) Empty
else if (simplify(a) == Empty) Empty
else if (canDecompose(tp2)) tryDecompose2(tp2)
else a
case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
if (!isEqualType(tp1, tp2)) a
else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) a
else if (ss1.zip(ss2).forall((isSubspace _).tupled)) Empty
else
// `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)`
Or(ss1.zip(ss2).map((minus _).tupled).zip(0 to ss2.length - 1).map {
case (ri, i) => Kon(tp1, ss1.updated(i, ri))
})
case (Fun(tp1, _, _), Typ(tp2, _)) =>
if (isSubType(tp1, tp2)) Empty
else a
case (Typ(tp1, _), Fun(tp2, _, _)) =>
a // approximation
case (Fun(_, _, _), Kon(_, _)) =>
a
case (Kon(_, _), Fun(_, _, _)) =>
a
case (Fun(tp1, fun1, ss1), Fun(tp2, fun2, ss2)) =>
if (!isEqualType(fun1, fun2)) a
else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) a
else if (ss1.zip(ss2).forall((isSubspace _).tupled)) Empty
else
// `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)`
Or(ss1.zip(ss2).map((minus _).tupled).zip(0 to ss2.length - 1).map {
case (ri, i) => Fun(tp1, fun1, ss1.updated(i, ri))
})
}
debug.println(s"${show(a)} - ${show(b)} = ${show(res)}")
res
}
}
object SpaceEngine {
private sealed trait Implementability {
def show(implicit ctx: Context) = this match {
case SubclassOf(classSyms) => s"SubclassOf(${classSyms.map(_.show)})"
case other => other.toString
}
}
private case object ClassOrTrait extends Implementability
private case class SubclassOf(classSyms: List[Symbol]) extends Implementability
private case object Unimplementable extends Implementability
}
/** Scala implementation of space logic */
class SpaceEngine(implicit ctx: Context) extends SpaceLogic {
import SpaceEngine._
import tpd._
private val scalaSomeClass = ctx.requiredClass("scala.Some")
private val scalaSeqFactoryClass = ctx.requiredClass("scala.collection.generic.SeqFactory")
private val scalaListType = ctx.requiredClassRef("scala.collection.immutable.List")
private val scalaNilType = ctx.requiredModuleRef("scala.collection.immutable.Nil")
private val scalaConsType = ctx.requiredClassRef("scala.collection.immutable.::")
/** Checks if it's possible to create a trait/class which is a subtype of `tp`.
*
* - doesn't handle member collisions (will not declare a type unimplementable because of one)
* - expects that neither Any nor Object reach it
* (this is currently true due to both isSubType and and/or type simplification)
*
* See [[intersectUnrelatedAtomicTypes]].
*/
private def implementability(tp: Type): Implementability = tp.dealias match {
case AndType(tp1, tp2) =>
(implementability(tp1), implementability(tp2)) match {
case (Unimplementable, _) | (_, Unimplementable) => Unimplementable
case (SubclassOf(classSyms1), SubclassOf(classSyms2)) =>
(for {
sym1 <- classSyms1
sym2 <- classSyms2
result <-
if (sym1 isSubClass sym2) List(sym1)
else if (sym2 isSubClass sym1) List(sym2)
else Nil
} yield result) match {
case Nil => Unimplementable
case lst => SubclassOf(lst)
}
case (ClassOrTrait, ClassOrTrait) => ClassOrTrait
case (SubclassOf(clss), _) => SubclassOf(clss)
case (_, SubclassOf(clss)) => SubclassOf(clss)
}
case OrType(tp1, tp2) =>
(implementability(tp1), implementability(tp2)) match {
case (ClassOrTrait, _) | (_, ClassOrTrait) => ClassOrTrait
case (SubclassOf(classSyms1), SubclassOf(classSyms2)) =>
SubclassOf(classSyms1 ::: classSyms2)
case (SubclassOf(classSyms), _) => SubclassOf(classSyms)
case (_, SubclassOf(classSyms)) => SubclassOf(classSyms)
case _ => Unimplementable
}
case _: SingletonType =>
// singleton types have no instantiable subtypes
Unimplementable
case tp: RefinedType =>
// refinement itself is not considered - it would at most make
// a type unimplementable because of a member collision
implementability(tp.parent)
case other =>
val classSym = other.classSymbol
if (classSym.exists) {
if (classSym is Final) Unimplementable
else if (classSym is Trait) ClassOrTrait
else SubclassOf(List(classSym))
} else {
// if no class symbol exists, conservatively say that anything
// can implement `tp`
ClassOrTrait
}
}
override def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type) = {
val and = AndType(tp1, tp2)
// Precondition: !(tp1 <:< tp2) && !(tp2 <:< tp1)
// Then, no leaf of the and-type tree `and` is a subtype of `and`.
// Then, to create a value of type `and` you must instantiate a trait (class/module)
// which is a subtype of all the leaves of `and`.
val imp = implementability(and)
debug.println(s"atomic intersection: ${and.show} ~ ${imp.show}")
imp match {
case Unimplementable => Empty
case _ => Typ(and, true)
}
}
/** Return the space that represents the pattern `pat`
*/
def project(pat: Tree): Space = pat match {
case Literal(c) =>
if (c.value.isInstanceOf[Symbol])
Typ(c.value.asInstanceOf[Symbol].termRef, false)
else
Typ(ConstantType(c), false)
case _: BackquotedIdent => Typ(pat.tpe, false)
case Ident(_) | Select(_, _) =>
Typ(pat.tpe.stripAnnots, false)
case Alternative(trees) => Or(trees.map(project(_)))
case Bind(_, pat) => project(pat)
case UnApply(fun, _, pats) =>
if (pat.tpe.classSymbol.is(CaseClass))
Kon(pat.tpe.stripAnnots, pats.map(pat => project(pat)))
else if (fun.symbol.owner == scalaSeqFactoryClass && fun.symbol.name == nme.unapplySeq)
projectList(pats)
else if (fun.symbol.info.finalResultType.isRef(scalaSomeClass))
Kon(pat.tpe.stripAnnots, pats.map(pat => project(pat)))
else
Fun(pat.tpe.stripAnnots, fun.tpe, pats.map(pat => project(pat)))
case Typed(pat @ UnApply(_, _, _), _) => project(pat)
case Typed(expr, _) => Typ(expr.tpe.stripAnnots, true)
case _ =>
Empty
}
/** Space of the pattern: List(a, b, c: _*)
*/
def projectList(pats: List[Tree]): Space = {
if (pats.isEmpty) return Typ(scalaNilType, false)
val (items, zero) = if (pats.last.tpe.isRepeatedParam)
(pats.init, Typ(scalaListType.appliedTo(pats.head.tpe.widen), false))
else
(pats, Typ(scalaNilType, false))
items.foldRight[Space](zero) { (pat, acc) =>
Kon(scalaConsType.appliedTo(pats.head.tpe.widen), project(pat) :: acc :: Nil)
}
}
/* Erase a type binding according to erasure semantics in pattern matching */
def erase(tp: Type): Type = {
def doErase(tp: Type): Type = tp match {
case tp: HKApply => erase(tp.superType)
case tp: RefinedType => erase(tp.parent)
case _ => tp
}
tp match {
case OrType(tp1, tp2) =>
OrType(erase(tp1), erase(tp2))
case AndType(tp1, tp2) =>
AndType(erase(tp1), erase(tp2))
case _ =>
val origin = doErase(tp)
if (origin =:= defn.ArrayType) tp else origin
}
}
/** Is `tp1` a subtype of `tp2`? */
def isSubType(tp1: Type, tp2: Type): Boolean = {
// `erase` is a workaround to make the following code pass the check:
//
// def f(e: Either[Int, String]) = e match {
// case Left(i) => i
// case Right(s) => 0
// }
//
// The problem is that when decompose `Either[Int, String]`, `Type.wrapIfMember`
// only refines the type member inherited from `Either` -- it's complex to refine
// the type members in `Left` and `Right`.
//
// FIXME: remove this hack
val res = tp1 <:< erase(tp2)
debug.println(s"${tp1.show} <:< ${tp2.show} = $res")
res
}
def isEqualType(tp1: Type, tp2: Type): Boolean = tp1 =:= tp2
/** Parameter types of the case class type `tp` */
def signature(tp: Type): List[Type] = {
val ktor = tp.classSymbol.primaryConstructor.info
val meth = ktor match {
case ktor: PolyType =>
ktor.instantiate(tp.classSymbol.typeParams.map(_.typeRef)).asSeenFrom(tp, tp.classSymbol)
case _ => ktor
}
// refine path-dependent type in params. refer to t9672
meth.firstParamTypes.map(_.asSeenFrom(tp, tp.classSymbol))
}
/** Decompose a type into subspaces -- assume the type can be decomposed */
def decompose(tp: Type): List[Space] = {
val children = tp.classSymbol.annotations.filter(_.symbol == ctx.definitions.ChildAnnot).map { annot =>
// refer to definition of Annotation.makeChild
annot.tree match {
case Apply(TypeApply(_, List(tpTree)), _) => tpTree.symbol
}
}
debug.println(s"candidates for ${tp.show} : [${children.map(_.show).mkString(", ")}]")
tp.dealias match {
case AndType(tp1, tp2) =>
intersect(Typ(tp1, false), Typ(tp2, false)) match {
case Or(spaces) => spaces
case Empty => Nil
case space => List(space)
}
case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true))
case tp if tp.isRef(defn.BooleanClass) =>
List(
Typ(ConstantType(Constant(true)), true),
Typ(ConstantType(Constant(false)), true)
)
case tp if tp.classSymbol.is(Enum) =>
children.map(sym => Typ(sym.termRef, true))
case tp =>
val parts = children.map { sym =>
if (sym.is(ModuleClass))
refine(tp, sym.sourceModule.termRef)
else if (sym.isTerm)
refine(tp, sym.termRef)
else if (sym.info.typeParams.length > 0 || tp.isInstanceOf[TypeRef])
refine(tp, sym.typeRef)
else
sym.typeRef
} filter { tpe =>
// Child class may not always be subtype of parent:
// GADT & path-dependent types
val res = tpe <:< expose(tp)
if (!res) debug.println(s"unqualified child ousted: ${tpe.show} !< ${tp.show}")
res
}
debug.println(s"${tp.show} decomposes to [${parts.map(_.show).mkString(", ")}]")
parts.map(Typ(_, true))
}
}
/** Refine tp2 based on tp1
*
* E.g. if `tp1` is `Option[Int]`, `tp2` is `Some`, then return
* `Some[Int]`.
*
* If `tp1` is `path1.A`, `tp2` is `path2.B`, and `path1` is subtype of
* `path2`, then return `path1.B`.
*/
def refine(tp1: Type, tp2: Type): Type = (tp1, tp2) match {
case (tp1: RefinedType, _: TypeRef) => tp1.wrapIfMember(refine(tp1.parent, tp2))
case (tp1: HKApply, _) => refine(tp1.superType, tp2)
case (TypeRef(ref1: TypeProxy, _), tp2 @ TypeRef(ref2: TypeProxy, _)) =>
if (ref1.underlying <:< ref2.underlying) tp2.derivedSelect(ref1) else tp2
case (TypeRef(ref1: TypeProxy, _), tp2 @ TermRef(ref2: TypeProxy, _)) =>
if (ref1.underlying <:< ref2.underlying) tp2.derivedSelect(ref1) else tp2
case _ => tp2
}
/** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */
def canDecompose(tp: Type): Boolean = {
val dealiasedTp = tp.dealias
val res = tp.classSymbol.is(allOf(Abstract, Sealed)) ||
tp.classSymbol.is(allOf(Trait, Sealed)) ||
dealiasedTp.isInstanceOf[OrType] ||
(dealiasedTp.isInstanceOf[AndType] && {
val and = dealiasedTp.asInstanceOf[AndType]
canDecompose(and.tp1) || canDecompose(and.tp2)
}) ||
tp.isRef(defn.BooleanClass) ||
tp.classSymbol.is(allOf(Enum, Sealed)) // Enum value doesn't have Sealed flag
debug.println(s"decomposable: ${tp.show} = $res")
res
}
/** Show friendly type name with current scope in mind
*
* E.g. C.this.B --> B if current owner is C
* C.this.x.T --> x.T if current owner is C
* X[T] --> X
* C --> C if current owner is C !!!
*
*/
def showType(tp: Type): String = {
val enclosingCls = ctx.owner.enclosingClass.asClass.classInfo.symbolicTypeRef
def isOmittable(sym: Symbol) =
sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName ||
ctx.definitions.UnqualifiedOwnerTypes.exists(_.symbol == sym) ||
sym.showFullName.startsWith("scala.") ||
sym == enclosingCls.typeSymbol
def refinePrefix(tp: Type): String = tp match {
case NoPrefix => ""
case tp: NamedType if isOmittable(tp.symbol) => ""
case tp: ThisType => refinePrefix(tp.tref)
case tp: RefinedType => refinePrefix(tp.parent)
case tp: NamedType => tp.name.show.stripSuffix("$")
}
def refine(tp: Type): String = tp match {
case tp: RefinedType => refine(tp.parent)
case tp: ThisType => refine(tp.tref)
case tp: NamedType =>
val pre = refinePrefix(tp.prefix)
if (tp.name == tpnme.higherKinds) pre
else if (pre.isEmpty) tp.name.show.stripSuffix("$")
else pre + "." + tp.name.show.stripSuffix("$")
case _ => tp.show.stripSuffix("$")
}
val text = tp.stripAnnots match {
case tp: OrType => showType(tp.tp1) + " | " + showType(tp.tp2)
case tp => refine(tp)
}
if (text.isEmpty) enclosingCls.show.stripSuffix("$")
else text
}
/** Display spaces */
def show(s: Space): String = {
/** does the companion object of the given symbol have custom unapply */
def hasCustomUnapply(sym: Symbol): Boolean = {
val companion = sym.companionModule
companion.findMember(nme.unapply, NoPrefix, excluded = Synthetic).exists ||
companion.findMember(nme.unapplySeq, NoPrefix, excluded = Synthetic).exists
}
def doShow(s: Space, mergeList: Boolean = false): String = s match {
case Empty => ""
case Typ(c: ConstantType, _) => c.value.show
case Typ(tp: TermRef, _) => tp.symbol.showName
case Typ(tp, decomposed) =>
val sym = tp.widen.classSymbol
if (ctx.definitions.isTupleType(tp))
signature(tp).map(_ => "_").mkString("(", ", ", ")")
else if (scalaListType.isRef(sym))
if (mergeList) "_*" else "_: List"
else if (scalaConsType.isRef(sym))
if (mergeList) "_" else "List(_)"
else if (tp.classSymbol.is(CaseClass) && !hasCustomUnapply(tp.classSymbol))
// use constructor syntax for case class
showType(tp) + signature(tp).map(_ => "_").mkString("(", ", ", ")")
else if (decomposed) "_: " + showType(tp)
else "_"
case Kon(tp, params) =>
if (ctx.definitions.isTupleType(tp))
"(" + params.map(doShow(_)).mkString(", ") + ")"
else if (tp.isRef(scalaConsType.symbol))
if (mergeList) params.map(doShow(_, mergeList)).mkString(", ")
else params.map(doShow(_, true)).filter(_ != "Nil").mkString("List(", ", ", ")")
else
showType(tp) + params.map(doShow(_)).mkString("(", ", ", ")")
case Fun(tp, fun, params) =>
showType(fun) + params.map(doShow(_)).mkString("(", ", ", ")")
case Or(_) =>
throw new Exception("incorrect flatten result " + s)
}
flatten(s).map(doShow(_, false)).distinct.mkString(", ")
}
def checkable(tree: Match): Boolean = {
def isCheckable(tp: Type): Boolean = tp match {
case AnnotatedType(tp, annot) =>
(ctx.definitions.UncheckedAnnot != annot.symbol) && isCheckable(tp)
case _ =>
// Possible to check everything, but be compatible with scalac by default
ctx.settings.YcheckAllPatmat.value ||
tp.typeSymbol.is(Sealed) ||
tp.isInstanceOf[OrType] ||
(tp.isInstanceOf[AndType] && {
val and = tp.asInstanceOf[AndType]
isCheckable(and.tp1) || isCheckable(and.tp2)
}) ||
tp.isRef(defn.BooleanClass) ||
tp.typeSymbol.is(Enum) ||
canDecompose(tp) ||
(defn.isTupleType(tp) && tp.dealias.argInfos.exists(isCheckable(_)))
}
val Match(sel, cases) = tree
val res = isCheckable(sel.tpe.widen.deAnonymize.dealiasKeepAnnots)
debug.println(s"checkable: ${sel.show} = $res")
res
}
/** Expose refined type to eliminate reference to type variables
*
* A = B M { type T = A } ~~> M { type T = B }
*
* A <: X :> Y M { type T = A } ~~> M { type T <: X :> Y }
*
* A <: X :> Y B <: U :> V M { type T <: A :> B } ~~> M { type T <: X :> V }
*
* A = X B = Y M { type T <: A :> B } ~~> M { type T <: X :> Y }
*/
def expose(tp: Type): Type = {
def follow(tp: Type, up: Boolean): Type = tp match {
case tp: TypeProxy =>
tp.underlying match {
case TypeBounds(lo, hi) =>
follow(if (up) hi else lo, up)
case _ =>
tp
}
case OrType(tp1, tp2) =>
OrType(follow(tp1, up), follow(tp2, up))
case AndType(tp1, tp2) =>
AndType(follow(tp1, up), follow(tp2, up))
}
tp match {
case tp: RefinedType =>
tp.refinedInfo match {
case tpa : TypeAlias =>
val hi = follow(tpa.alias, true)
val lo = follow(tpa.alias, false)
val refined = if (hi =:= lo)
tpa.derivedTypeAlias(hi)
else
tpa.derivedTypeBounds(lo, hi)
tp.derivedRefinedType(
expose(tp.parent),
tp.refinedName,
refined
)
case tpb @ TypeBounds(lo, hi) =>
tp.derivedRefinedType(
expose(tp.parent),
tp.refinedName,
tpb.derivedTypeBounds(follow(lo, false), follow(hi, true))
)
case _ =>
tp.derivedRefinedType(
expose(tp.parent),
tp.refinedName,
tp.refinedInfo
)
}
case _ => tp
}
}
def checkExhaustivity(_match: Match): Unit = {
val Match(sel, cases) = _match
val selTyp = sel.tpe.widen.deAnonymize.dealias
val patternSpace = cases.map({ x =>
val space = project(x.pat)
debug.println(s"${x.pat.show} ====> ${show(space)}")
space
}).reduce((a, b) => Or(List(a, b)))
val uncovered = simplify(minus(Typ(selTyp, true), patternSpace), aggressive = true)
if (uncovered != Empty)
ctx.warning(PatternMatchExhaustivity(show(uncovered)), sel.pos)
}
def checkRedundancy(_match: Match): Unit = {
val Match(sel, cases) = _match
// ignore selector type for now
// val selTyp = sel.tpe.widen.deAnonymize.dealias
if (cases.length == 1) return
// starts from the second, the first can't be redundant
(1 until cases.length).foreach { i =>
// in redundancy check, take guard as false in order to soundly approximate
val prevs = cases.take(i).map { x =>
if (x.guard.isEmpty) project(x.pat)
else Empty
}.reduce((a, b) => Or(List(a, b)))
val curr = project(cases(i).pat)
debug.println(s"---------------reachable? ${show(curr)}")
debug.println(s"prev: ${show(prevs)}")
if (isSubspace(curr, prevs)) {
ctx.warning(MatchCaseUnreachable(), cases(i).body.pos)
}
}
}
}
|
ihji/dotty | tests/run/phantom-self-1.scala | object Test {
def main(args: Array[String]): Unit = {
Boo.any
Boo.any2
}
}
object Boo extends Phantom with T
trait T { self: Phantom =>
type X = self.Any
def any: X = self.assume
def any2: X = assume
}
|
ihji/dotty | tests/run/phantom-2.scala | <filename>tests/run/phantom-2.scala
/* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomTermErasure,phantomTypeErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
fun2(Boo.nothig)
}
def fun2(bottom: BooNothing): Unit = {
println("fun2")
}
}
object Boo extends Phantom {
type BooNothing = this.Nothing
def nothig: BooNothing = assume
}
|
ihji/dotty | library/src/scala/Phantom.scala | /* Defined synthetically
package scala
trait Phantom {
/** Phantom.Any does not extend scala.Any */
protected /*final*/ trait Any
protected final trait Nothing extends this.Any
protected final def assume: this.Nothing
}
*/
|
ihji/dotty | tests/idempotency/Checker.scala | <filename>tests/idempotency/Checker.scala
object Test {
def main(args: Array[String]): Unit =
IdempotencyCheck.checkIdempotency("../out/idempotency")
}
|
ihji/dotty | tests/neg/phantom-multiversal.scala | <gh_stars>0
class BooFunDef1 {
import Universe1._
import UniverseA._
fun1(one, two)
fun1(one, b) // error
fun1(b, a) // error // error
funA(a, b)
funA(a, one) // error
funA(two, one) // error // error
funMulti(a, one, 42)
funMulti(a, b, 42) // error
funMulti(one, two, one) // error // error
def fun1(x: One, y: Two) = ???
def funA(k: A, l: B) = ???
def funMulti(k: A, x: One, i: Int) = ???
}
object Universe1 extends Phantom {
type One = this.Any
type Two <: One
def one: One = assume
def two: Two = assume
}
object UniverseA extends Phantom {
type A = this.Any
type B <: A
def a: A = assume
def b: B = assume
}
|
ihji/dotty | tests/neg/phantom-instanceOf-2.scala |
class phantomInstanceOf2 {
import Boo._
boo[Blinky].asInstanceOf[Any] // error
boo[Blinky].asInstanceOf[Nothing] // error
boo[Blinky].asInstanceOf[Blinky] // error
boo[Blinky].asInstanceOf[BooAny] // error
}
object Boo extends Phantom {
type BooAny <: this.Any
type Blinky <: this.Any
def boo[B <: this.Any]: B = assume
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/localopt/Jumpjump.scala | <filename>compiler/src/dotty/tools/dotc/transform/localopt/Jumpjump.scala
package dotty.tools.dotc
package transform.localopt
import core.TypeErasure
import core.Constants.Constant
import core.Contexts.Context
import core.Decorators._
import core.Symbols._
import ast.Trees._
import scala.collection.mutable
import config.Printers.simplify
import core.Flags._
/** Rewrites pairs of consecutive LabelDef jumps by jumping directly to the target.
*
* @author DarkDimius, OlivierBlanvillain
*/
class Jumpjump extends Optimisation {
import ast.tpd._
val defined = mutable.HashMap[Symbol, Symbol]()
def clear(): Unit = defined.clear()
def visitor(implicit ctx: Context): Tree => Unit = {
case defdef: DefDef if defdef.symbol.is(Label) =>
defdef.rhs match {
case Apply(t, args)
if t.symbol.is(Label) &&
TypeErasure.erasure(defdef.symbol.info.finalResultType).classSymbol ==
TypeErasure.erasure(t.symbol.info.finalResultType).classSymbol &&
args.size == defdef.vparamss.map(_.size).sum &&
args.zip(defdef.vparamss.flatten).forall(x => x._1.symbol eq x._2.symbol) &&
defdef.symbol != t.symbol =>
defined(defdef.symbol) = t.symbol
case _ =>
}
case _ =>
}
def transformer(implicit ctx: Context): Tree => Tree = {
case a: Apply if defined.contains(a.fun.symbol) =>
defined.get(a.symbol) match {
case None => a
case Some(fwd) =>
ref(fwd).appliedToArgs(a.args)
}
case a: DefDef if defined.contains(a.symbol) =>
simplify.println(s"Dropping ${a.symbol.showFullName} as forwarder to ${defined(a.symbol).showFullName}")
EmptyTree
case t => t
}
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/LinkScala2Impls.scala | <reponame>ihji/dotty
package dotty.tools.dotc
package transform
import core._
import TreeTransforms._
import Contexts.Context
import Flags._
import SymUtils._
import Symbols._
import SymDenotations._
import Types._
import Decorators._
import DenotTransformers._
import StdNames._
import NameOps._
import Phases._
import ast.untpd
import ast.Trees._
import NameKinds.ImplMethName
import collection.mutable
/** Rewrite calls
*
* super[M].f(args)
*
* where M is a Scala 2.11 trait implemented by the current class to
*
* M$class.f(this, args)
*
* provided the implementation class M$class defines a corresponding function `f`.
* If M is a Scala 2.12 or newer trait, rewrite to
*
* M.f(this, args)
*
* where f is a static member of M.
*/
class LinkScala2Impls extends MiniPhase with IdentityDenotTransformer { thisTransform =>
import ast.tpd._
override def phaseName: String = "linkScala2Impls"
override def changesMembers = true
val treeTransform = new Transform
override def runsAfterGroupsOf: Set[Class[_ <: Phase]] = Set(classOf[Mixin])
// Adds as a side effect static members to traits which can confuse Mixin,
// that's why it is runsAfterGroupOf
class Transform extends TreeTransform {
def phase = thisTransform
/** Copy definitions from implementation class to trait itself */
private def augmentScala_2_12_Trait(mixin: ClassSymbol)(implicit ctx: Context): Unit = {
def newImpl(sym: TermSymbol): Symbol = sym.copy(
owner = mixin,
name = if (sym.isConstructor) sym.name else ImplMethName(sym.name)
)
for (sym <- mixin.implClass.info.decls)
newImpl(sym.asTerm).enteredAfter(thisTransform)
}
override def prepareForTemplate(impl: Template)(implicit ctx: Context) = {
val cls = impl.symbol.owner.asClass
for (mixin <- cls.mixins)
if (mixin.is(Scala_2_12_Trait, butNot = Scala_2_12_Augmented)) {
augmentScala_2_12_Trait(mixin)
mixin.setFlag(Scala_2_12_Augmented)
}
this
}
override def transformApply(app: Apply)(implicit ctx: Context, info: TransformerInfo) = {
def currentClass = ctx.owner.enclosingClass.asClass
app match {
case Apply(sel @ Select(Super(_, _), _), args)
if sel.symbol.owner.is(Scala2xTrait) && currentClass.mixins.contains(sel.symbol.owner) =>
val impl = implMethod(sel.symbol)
if (impl.exists) Apply(ref(impl), This(currentClass) :: args).withPos(app.pos)
else app // could have been an abstract method in a trait linked to from a super constructor
case _ =>
app
}
}
private def implMethod(meth: Symbol)(implicit ctx: Context): Symbol = {
val (implInfo, implName) =
if (meth.owner.is(Scala_2_12_Trait))
(meth.owner.info, ImplMethName(meth.name.asTermName))
else
(meth.owner.implClass.info, meth.name)
if (meth.isConstructor)
implInfo.decl(nme.TRAIT_CONSTRUCTOR).symbol
else
implInfo.decl(implName)
.suchThat(c => FullParameterization.memberSignature(c.info) == meth.signature)
.symbol
}
}
private val Scala2xTrait = allOf(Scala2x, Trait)
}
|
ihji/dotty | tests/run/phantom-methods-12.scala | /* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomRefErasure,phantomErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
fun1(
{ println("x1"); boo },
{ println("x2"); boo }
)(
{ println("x3"); boo }
)(
{ println("x4"); boo },
{ println("x5"); boo }
)
fun2(
{ println("y1"); 1 },
{ println("y2"); 2 }
)(
{ println("y3"); boo }
)(
{ println("y4"); boo },
{ println("y5"); boo }
)
fun3(
{ println("z1"); boo },
{ println("z2"); boo }
)(
{ println("z3"); 4 }
)(
{ println("z4"); 5 },
{ println("z5"); 6 }
)
}
def fun1(x1: Inky, x2: Inky)(x3: Inky)(x4: Inky, x5: Inky) = {
println("fun1")
}
def fun2(x1: Int, x2: Int)(x3: Inky)(x4: Inky, x5: Inky) = {
println("fun2")
}
def fun3(x1: Inky, x2: Inky)(x3: Int)(x4: Int, x5: Int) = {
println("fun3")
}
}
object Boo extends Phantom {
type Inky <: this.Any
def boo: Inky = assume
}
|
ihji/dotty | compiler/test/dotty/tools/dotc/repl/TestREPL.scala | package dotty
package tools.dotc
package repl
import core.Contexts.Context
import collection.mutable
import java.io.{StringWriter, PrintStream}
import dotty.tools.io.{ PlainFile, Directory }
import org.junit.Test
/** A subclass of REPL used for testing.
* It takes a transcript of a REPL session in `script`. The transcript
* starts with the first input prompt `scala> ` and ends with `scala> :quit` and a newline.
* Invoking `process()` on the `TestREPL` runs all input lines and
* collects then interleaved with REPL output in a string writer `out`.
* Invoking `check()` checks that the collected output matches the original
* `script`.
*/
class TestREPL(script: String) extends REPL {
private val out = new StringWriter()
override lazy val config = new REPL.Config {
override val output = new NewLinePrintWriter(out)
override def context(ctx: Context) = {
val fresh = ctx.fresh
fresh.setSetting(ctx.settings.color, "never")
fresh.setSetting(ctx.settings.classpath, Jars.dottyReplDeps.mkString(":"))
fresh.initialize()(fresh)
fresh
}
override def input(in: Interpreter)(implicit ctx: Context) = new InteractiveReader {
val lines = script.lines.buffered
def readLine(prompt: String): String = {
val line = lines.next()
val buf = new StringBuilder
if (line.startsWith(prompt)) {
output.println(line)
buf append line.drop(prompt.length)
while (lines.hasNext && lines.head.startsWith(continuationPrompt)) {
val continued = lines.next()
output.println(continued)
buf append System.lineSeparator()
buf append continued.drop(continuationPrompt.length)
}
buf.toString
}
else readLine(prompt)
}
val interactive = false
}
}
def check() = {
out.close()
val printed = out.toString
val transcript = printed.drop(printed.indexOf(config.prompt))
if (transcript.toString.lines.toList != script.lines.toList) {
println("input differs from transcript (copy is repl.transcript):")
println(transcript)
val s = new PrintStream("repl.transcript")
s.print(transcript)
s.close()
assert(false)
}
}
}
class REPLTests {
def replFile(prefix: String, fileName: String): Unit = {
val path = s"$prefix$fileName"
val f = new PlainFile(path)
val repl = new TestREPL(new String(f.toCharArray))
repl.process(Array[String]())
repl.check()
}
def replFiles(path: String): Unit = {
val dir = Directory(path)
val fileNames = dir.files.toArray.map(_.jfile.getName).filter(_ endsWith ".check")
for (name <- fileNames) {
println(s"testing $path$name")
replFile(path, name)
}
}
@Test def replAll = replFiles("../tests/repl/")
}
|
ihji/dotty | bot/src/dotty/tools/bot/model/Drone.scala | package dotty.tools
package bot
package model
import io.circe._
import io.circe.generic.auto._
import io.circe.syntax._
import org.http4s._
import org.http4s.circe._
import org.http4s.client.Client
import scalaz.concurrent.Task
import bot.util.HttpClientAux
object Drone {
import HttpClientAux._
case class Build(
number: Int,
event: String,
status: String,
commit: String,
author: String
)
private[this] val baseUrl = "http://dotty-ci.epfl.ch/api"
private def job(id: Int) =
s"$baseUrl/repos/lampepfl/dotty/builds/$id"
private def job(id: Int, subId: Int) =
s"$baseUrl/repos/lampepfl/dotty/builds/$id/$subId"
def stopBuild(id: Int, token: String)(implicit client: Client): Task[Boolean] = {
def resToBoolean(res: Response): Task[Boolean] = Task.now {
res.status.code >= 200 && res.status.code < 400
}
val responses = List(1, 2, 3, 4).map { subId =>
client.fetch(delete(job(id, subId)).withOauth2(token))(resToBoolean)
}
Task.gatherUnordered(responses).map(xs => xs.exists(_ == true))
}
def startBuild(id: Int, token: String)(implicit client: Client): Task[Build] =
client.expect(post(job(id)).withOauth2(token))(jsonOf[Build])
}
|
ihji/dotty | library/src/scala/Eq.scala | <reponame>ihji/dotty
package scala
import annotation.implicitNotFound
import scala.collection.{GenSeq, Set}
/** A marker trait indicating that values of type `L` can be compared to values of type `R`. */
@implicitNotFound("Values of types ${L} and ${R} cannot be compared with == or !=")
sealed trait Eq[-L, -R]
/** Besides being a companion object, this object
* can also be used as a value that's compatible with
* any instance of `Eq`.
*/
object Eq extends Eq[Any, Any] {
/** A fall-back "implicit" to compare values of any types.
* Even though this method is not declared implicit, the compiler will
* compute instances as solutions to `Eq[T, U]` queries if `T <: U` or `U <: T`
* or both `T` and `U` are Eq-free. A type `S` is Eq-free if there is no
* implicit instance of type `Eq[S, S]`.
*/
def eqAny[L, R]: Eq[L, R] = Eq
// Instances of `Eq` for common types
implicit def eqNumber : Eq[Number, Number] = Eq
implicit def eqString : Eq[String, String] = Eq
implicit def eqBoolean : Eq[Boolean, Boolean] = Eq
implicit def eqByte : Eq[Byte, Byte] = Eq
implicit def eqShort : Eq[Short, Short] = Eq
implicit def eqChar : Eq[Char, Char] = Eq
implicit def eqInt : Eq[Int, Int] = Eq
implicit def eqLong : Eq[Long, Long] = Eq
implicit def eqFloat : Eq[Float, Float] = Eq
implicit def eqDouble : Eq[Double, Double] = Eq
implicit def eqUnit : Eq[Unit, Unit] = Eq
// true asymmetry, modeling the (somewhat problematic) nature of equals on Proxies
implicit def eqProxy : Eq[Proxy, Any] = Eq
implicit def eqSeq[T, U](implicit eq: Eq[T, U]): Eq[GenSeq[T], GenSeq[U]] = Eq
implicit def eqSet[T, U](implicit eq: Eq[T, U]): Eq[Set[T], Set[U]] = Eq
implicit def eqByteNum : Eq[Byte, Number] = Eq
implicit def eqNumByte : Eq[Number, Byte] = Eq
implicit def eqCharNum : Eq[Char, Number] = Eq
implicit def eqNumChar : Eq[Number, Char] = Eq
implicit def eqShortNum : Eq[Short, Number] = Eq
implicit def eqNumShort : Eq[Number, Short] = Eq
implicit def eqIntNum : Eq[Int, Number] = Eq
implicit def eqNumInt : Eq[Number, Int] = Eq
implicit def eqLongNum : Eq[Long, Number] = Eq
implicit def eqNumLong : Eq[Number, Long] = Eq
implicit def eqFloatNum : Eq[Float, Number] = Eq
implicit def eqNumFloat : Eq[Number, Float] = Eq
implicit def eqDoubleNum: Eq[Double, Number] = Eq
implicit def eqNumDouble: Eq[Number, Double] = Eq
} |
ihji/dotty | tests/neg/phantom-class-type-parameters.scala | <reponame>ihji/dotty
import Boo._
object Test {
def main(args: Array[String]): Unit = {
val a = new Foo[BooAny](any)
foo(a.asInstanceOf[Foo[BooNothing]].x) // error
foo(a.asInstanceOf[Foo[BooNothing]].y) // error
a match {
case a: Foo[BooNothing] => a.x // error
}
val b = new Foo[BooNothing](a.asInstanceOf[Foo[BooNothing]].x) // error
b.asInstanceOf[Foo[BooAny]].z(any) // error
b match {
case b: Foo[BooAny] => b.z(any) // error
}
}
def foo(x: BooNothing) = println("foo")
}
class Foo[T <: BooAny](val x: T) {
def y: T = x
def z(z: T) = ()
}
object Boo extends Phantom {
type BooAny = this.Any
type BooNothing = this.Nothing
def any: BooAny = assume
}
|
ihji/dotty | tests/neg/phantom-multiversal-AndOr.scala | <gh_stars>0
class BooFunDef1 {
import Universe1._
import UniverseA._
def fun1(b: One | A) = ??? // error
def fun2(b: A | One) = ??? // error
def fun3(b: A | One | Any) = ??? // error // error
def fun4(b: A & One) = ??? // error
def fun5(b: One & A) = ??? // error
def fun6(b: A & One & Any) = ??? // error // error
}
object Universe1 extends Phantom {
type One <: this.Any
}
object UniverseA extends Phantom {
type A <: this.Any
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala | <reponame>ihji/dotty
package dotty.tools.dotc
package core
package tasty
import scala.collection.mutable
import TastyFormat._
import TastyBuffer.NameRef
import Names.{Name, TermName, termName, EmptyTermName}
import NameKinds._
import java.util.UUID
object TastyUnpickler {
class UnpickleException(msg: String) extends Exception(msg)
abstract class SectionUnpickler[R](val name: String) {
def unpickle(reader: TastyReader, nameAtRef: NameTable): R
}
class NameTable extends (NameRef => TermName) {
private val names = new mutable.ArrayBuffer[TermName]
def add(name: TermName) = names += name
def apply(ref: NameRef) = names(ref.index)
def contents: Iterable[TermName] = names
}
}
import TastyUnpickler._
class TastyUnpickler(reader: TastyReader) {
import reader._
def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
private val sectionReader = new mutable.HashMap[String, TastyReader]
val nameAtRef = new NameTable
private def check(cond: Boolean, msg: => String) =
if (!cond) throw new UnpickleException(msg)
private def readName(): TermName = nameAtRef(readNameRef())
private def readString(): String = readName().toString
private def readNameContents(): TermName = {
val tag = readByte()
val length = readNat()
val start = currentAddr
val end = start + length
val result = tag match {
case UTF8 =>
goto(end)
termName(bytes, start.index, length)
case QUALIFIED | FLATTENED | EXPANDED | EXPANDPREFIX =>
qualifiedNameKindOfTag(tag)(readName(), readName().asSimpleName)
case UNIQUE =>
val separator = readName().toString
val num = readNat()
val originals = until(end)(readName())
val original = if (originals.isEmpty) EmptyTermName else originals.head
uniqueNameKindOfSeparator(separator)(original, num)
case DEFAULTGETTER | VARIANT | OUTERSELECT =>
numberedNameKindOfTag(tag)(readName(), readNat())
case SIGNED =>
val original = readName()
val result = readName().toTypeName
val params = until(end)(readName().toTypeName)
var sig = Signature(params, result)
if (sig == Signature.NotAMethod) sig = Signature.NotAMethod
SignedName(original, sig)
case _ =>
simpleNameKindOfTag(tag)(readName())
}
assert(currentAddr == end, s"bad name $result $start $currentAddr $end")
result
}
private def readHeader(): UUID = {
for (i <- 0 until header.length)
check(readByte() == header(i), "not a TASTy file")
val major = readNat()
val minor = readNat()
check(major == MajorVersion && minor <= MinorVersion,
s"""TASTy signature has wrong version.
| expected: $MajorVersion.$MinorVersion
| found : $major.$minor""".stripMargin)
new UUID(readUncompressedLong(), readUncompressedLong())
}
private val uuid = readHeader()
locally {
until(readEnd()) { nameAtRef.add(readNameContents()) }
while (!isAtEnd) {
val secName = readString()
val secEnd = readEnd()
sectionReader(secName) = new TastyReader(bytes, currentAddr.index, secEnd.index, currentAddr.index)
goto(secEnd)
}
}
def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
for (reader <- sectionReader.get(sec.name)) yield
sec.unpickle(reader, nameAtRef)
}
|
ihji/dotty | tests/neg/customArgs/phantom-overload-2.scala | <filename>tests/neg/customArgs/phantom-overload-2.scala
class phantomOverload2 {
import Boo._
def foo1() = ???
def foo1(x: A) = ??? // error
def foo1(x1: B)(x2: N) = ??? // error
def foo2(x1: Int, x2: A) = ???
def foo2(x1: A)(x2: Int) = ??? // error
def foo2(x1: N)(x2: A)(x3: Int) = ??? // error
def foo3(x1: Int, x2: A) = ???
def foo3(x1: Int, x2: A)(x3: A) = ??? // error
}
object Boo extends Phantom {
type A <: this.Any
type B <: this.Any
type N = this.Nothing
}
|
ihji/dotty | tests/run/phantom-methods-11.scala | <reponame>ihji/dotty
/* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomRefErasure,phantomErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
fun(
{ println("x1"); boo },
{ println("x2"); boo }
)(
{ println("x3"); boo }
)(
{ println("x4"); boo },
{ println("x5"); boo }
)
new Fun(
{ println("y1"); boo },
{ println("y2"); boo }
)(
{ println("y3"); boo }
)(
{ println("y4"); boo },
{ println("y5"); boo }
)
(new Fun2().fun)(
{ println("z1"); boo },
{ println("z2"); boo }
)(
{ println("z3"); boo }
)(
{ println("z4"); boo },
{ println("z5"); boo }
)
(new Fun2().fun2)(
{ println("w1"); boo },
{ println("w2"); boo }
)(
{ println("w3"); boo }
)(
{ println("w4"); boo },
{ println("w5"); boo }
)
}
def fun(x1: Inky, x2: Inky)(x3: Inky)(x4: Inky, x5: Inky) = {
println("fun")
}
class Fun(y1: Inky, y2: Inky)(y3: Inky)(y4: Inky, y5: Inky) {
println("Fun")
}
class Fun2 {
println("Fun2")
def fun(z1: Inky, z2: Inky)(z3: Inky)(z4: Inky, z5: Inky) = {
println("Fun2fun")
}
def fun2[T](z1: Inky, z2: Inky)(z3: Inky)(z4: Inky, z5: Inky) = {
println("Fun2fun2")
}
}
}
object Boo extends Phantom {
type Inky <: this.Any
def boo: Inky = assume
}
|
ihji/dotty | tests/run/switches.scala | <gh_stars>0
import annotation.switch
object Test extends App {
val x = 3
final val Y = 3
val x1 = x match {
case 0 => 0
case 1 => 1
case 2 => 2
case Y => 3
}
val x2 = (x: @switch) match {
case 0 => 0
case 1 | 2 => 2
case Y => 3
case _ => 4
}
val x3 = (x: @switch) match {
case '0' if x > 0 => 0
case '1' => 1
case '2' => 2
case '3' => 3
case x => 4
}
assert(x1 == 3)
assert(x2 == 3)
assert(x3 == 4)
}
|
ihji/dotty | compiler/test/dotty/tools/vulpix/TestConfiguration.scala | package dotty
package tools
package vulpix
object TestConfiguration {
implicit val defaultOutputDir: String = "../out/"
implicit class RichStringArray(val xs: Array[String]) extends AnyVal {
def and(args: String*): Array[String] = {
val argsArr: Array[String] = args.toArray
xs ++ argsArr
}
}
val noCheckOptions = Array(
"-pagewidth", "120",
"-color:never"
)
val checkOptions = Array(
"-Yno-deep-subtypes",
"-Yno-double-bindings",
"-Yforce-sbt-phases"
)
val classPath = {
val paths = Jars.dottyTestDeps map { p =>
val file = new java.io.File(p)
assert(
file.exists,
s"""|File "$p" couldn't be found. Run `packageAll` from build tool before
|testing.
|
|If running without sbt, test paths need to be setup environment variables:
|
| - DOTTY_LIBRARY
| - DOTTY_COMPILER
| - DOTTY_INTERFACES
| - DOTTY_EXTRAS
|
|Where these all contain locations, except extras which is a colon
|separated list of jars.
|
|When compiling with eclipse, you need the sbt-interfaces jar, put
|it in extras."""
)
file.getAbsolutePath
} mkString (":")
Array("-classpath", paths)
}
private val yCheckOptions = Array("-Ycheck:tailrec,resolveSuper,mixin,arrayConstructors,labelDef")
val defaultUnoptimised = noCheckOptions ++ checkOptions ++ yCheckOptions ++ classPath
val defaultOptimised = defaultUnoptimised :+ "-optimise"
val defaultOptions = defaultUnoptimised
val allowDeepSubtypes = defaultOptions diff Array("-Yno-deep-subtypes")
val allowDoubleBindings = defaultOptions diff Array("-Yno-double-bindings")
val picklingOptions = defaultUnoptimised ++ Array(
"-Xprint-types",
"-Ytest-pickler",
"-Yprintpos"
)
val scala2Mode = defaultOptions ++ Array("-language:Scala2")
val explicitUTF8 = defaultOptions ++ Array("-encoding", "UTF8")
val explicitUTF16 = defaultOptions ++ Array("-encoding", "UTF16")
}
|
ihji/dotty | tests/neg/phantom-trait-1.scala | <gh_stars>0
object Boo extends Phantom {
override val assume: this.Nothing = super.assume // error
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/localopt/Devalify.scala | <reponame>ihji/dotty
package dotty.tools.dotc
package transform.localopt
import core.Constants.Constant
import core.Contexts.Context
import core.Flags._
import core.Symbols._
import core.Types._
import ast.Trees._
import scala.collection.mutable
import config.Printers.simplify
import Simplify._
import transform.SymUtils._
/** Inline vals and remove vals that are aliases to other vals
*
* Notion of alias is a by-value notion, so "good" casts are ignored.
*
* This phase has to be careful not to eliminate vals that are parts of other types
*
* @author DarkDimius, OlivierBlanvillain
*/
class Devalify extends Optimisation {
import ast.tpd._
val timesUsed = mutable.HashMap[Symbol, Int]()
val timesUsedAsType = mutable.HashMap[Symbol, Int]()
val defined = mutable.HashSet[Symbol]()
val usedInInnerClass = mutable.HashMap[Symbol, Int]()
// Either a duplicate or a read through series of immutable fields
val copies = mutable.HashMap[Symbol, Tree]()
def clear(): Unit = {
timesUsed.clear()
timesUsedAsType.clear()
defined.clear()
usedInInnerClass.clear()
copies.clear()
}
def visitType(tp: Type)(implicit ctx: Context): Unit = {
tp.foreachPart(x => x match {
case TermRef(NoPrefix, _) =>
val b4 = timesUsedAsType.getOrElseUpdate(x.termSymbol, 0)
timesUsedAsType.put(x.termSymbol, b4 + 1)
case _ =>
})
}
def doVisit(tree: Tree, used: mutable.HashMap[Symbol, Int])(implicit ctx: Context): Unit = tree match {
case valdef: ValDef if !valdef.symbol.is(Param | Mutable | Module | Lazy) &&
valdef.symbol.exists && !valdef.symbol.owner.isClass =>
defined += valdef.symbol
dropCasts(valdef.rhs) match {
case t: Tree if readingOnlyVals(t) =>
copies.put(valdef.symbol, valdef.rhs)
case _ =>
}
visitType(valdef.symbol.info)
case t: New =>
val normalized = t.tpt.tpe.normalizedPrefix
val symIfExists = normalized.termSymbol
val b4 = used.getOrElseUpdate(symIfExists, 0)
used.put(symIfExists, b4 + 1)
visitType(normalized)
case valdef: ValDef if valdef.symbol.exists && !valdef.symbol.owner.isClass &&
!valdef.symbol.is(Param | Module | Lazy) =>
// TODO: handle params after constructors. Start changing public signatures by eliminating unused arguments.
defined += valdef.symbol
case valdef: ValDef => visitType(valdef.symbol.info)
case t: DefDef => visitType(t.symbol.info)
case t: Typed => visitType(t.tpt.tpe)
case t: TypeApply => t.args.foreach(x => visitType(x.tpe))
case t: RefTree =>
val b4 = used.getOrElseUpdate(t.symbol, 0)
used.put(t.symbol, b4 + 1)
case _ =>
}
def visitor(implicit ctx: Context): Tree => Unit = { tree =>
def crossingClassBoundaries(t: Tree): Boolean = t match {
case _: New => true
case _: Template => true
case _ => false
}
// We shouldn't inline `This` nodes, which we approximate by not inlining
// anything across class boundaries. To do so, we visit every class a
// second time and record what's used in the usedInInnerClass Set.
if (crossingClassBoundaries(tree)) {
// Doing a foreachSubTree(tree) here would work, but would also
// be exponential for deeply nested classes. Instead we do a short
// circuit traversal that doesn't visit further nested classes.
val reVisitClass = new TreeAccumulator[Unit] {
def apply(u: Unit, t: Tree)(implicit ctx: Context): Unit = {
doVisit(t, usedInInnerClass)
if (!crossingClassBoundaries(t))
foldOver((), t)
}
}
reVisitClass.foldOver((), tree)
}
doVisit(tree, timesUsed)
}
def transformer(implicit ctx: Context): Tree => Tree = {
val valsToDrop = defined -- timesUsed.keySet -- timesUsedAsType.keySet
val copiesToReplaceAsDuplicates = copies.filter { x =>
val rhs = dropCasts(x._2)
rhs.isInstanceOf[Literal] || (!rhs.symbol.owner.isClass && !rhs.symbol.is(Method | Mutable))
} -- timesUsedAsType.keySet
// TODO: if a non-synthetic val is duplicate of a synthetic one, rename a synthetic one and drop synthetic flag?
val copiesToReplaceAsUsedOnce =
timesUsed.filter(x => x._2 == 1)
.flatMap(x => copies.get(x._1) match {
case Some(tr) => List((x._1, tr))
case None => Nil
}) -- timesUsedAsType.keySet
val replacements = copiesToReplaceAsDuplicates ++ copiesToReplaceAsUsedOnce -- usedInInnerClass.keySet
val deepReplacer = new TreeMap() {
override def transform(tree: Tree)(implicit ctx: Context): Tree = {
def loop(tree: Tree): Tree =
tree match {
case t: RefTree if replacements.contains(t.symbol) =>
loop(replacements(t.symbol))
case _ => tree
}
super.transform(loop(tree))
}
}
val transformation: Tree => Tree = {
case t: ValDef if valsToDrop.contains(t.symbol) =>
// TODO: Could emit a warning for non synthetic code? This valdef is
// probably something users would want to remove from source...
simplify.println(s"Dropping definition of ${t.symbol.showFullName} as not used")
t.rhs.changeOwner(t.symbol, t.symbol.owner)
case t: ValDef if replacements.contains(t.symbol) =>
simplify.println(s"Dropping definition of ${t.symbol.showFullName} as an alias")
EmptyTree
case t: New =>
val symIfExists = t.tpt.tpe.normalizedPrefix.termSymbol
if (replacements.contains(symIfExists)) {
val newPrefix = deepReplacer.transform(replacements(symIfExists))
val newTpt = t.tpt.tpe match {
case t: NamedType =>
t.derivedSelect(newPrefix.tpe)
}
New(newTpt)
}
else t
case t: RefTree if !t.symbol.is(Method | Param | Mutable) =>
if (replacements.contains(t.symbol))
deepReplacer.transform(replacements(t.symbol)).ensureConforms(t.tpe.widen)
else t
case tree => tree
}
transformation
}
def dropCasts(t: Tree)(implicit ctx: Context): Tree = t match {
// case TypeApply(aio@Select(rec, nm), _) if aio.symbol == defn.Any_asInstanceOf => dropCasts(rec)
case Typed(t, tpe) => t
case _ => t
}
def readingOnlyVals(t: Tree)(implicit ctx: Context): Boolean = dropCasts(t) match {
case Typed(exp, _) => readingOnlyVals(exp)
case TypeApply(fun @ Select(rec, _), List(tp)) =>
val isAsInstanceOf = fun.symbol == defn.Any_asInstanceOf && rec.tpe.derivesFrom(tp.tpe.classSymbol)
isAsInstanceOf && readingOnlyVals(rec)
case t @ Apply(Select(rec, _), Nil) =>
isImmutableAccessor(t) && readingOnlyVals(rec)
case t @ Select(rec, _) if t.symbol.is(Method) =>
isImmutableAccessor(t) && readingOnlyVals(rec)
case t @ Select(qual, _) if !isEffectivelyMutable(t) =>
readingOnlyVals(qual)
case t: Ident if !t.symbol.is(Mutable | Method) && !t.symbol.info.dealias.isInstanceOf[ExprType] =>
desugarIdent(t).forall(readingOnlyVals)
case t: This => true
// null => false, or the following fails devalify:
// trait I {
// def foo: Any = null
// }
// object Main {
// def main = {
// val s: I = null
// s.foo
// }
// }
case Literal(Constant(null)) => false
case t: Literal => true
case _ => false
}
}
|
ihji/dotty | tests/neg/reference-phantom-type-2.scala | object MyPhantoms extends Phantom {
type Inky <: this.Any
type Blinky <: this.Any
type Pinky <: Inky
type Clyde <: Pinky
def pinky: Pinky = assume
def clyde: Clyde = assume
}
import MyPhantoms._
object MyApp {
def run(phantom: Inky) = println("run")
def hide(phantom: Blinky) = println("run")
run(pinky)
run(clyde)
hide(null.asInstanceOf[Blinky]) // error
}
|
ihji/dotty | project/plugins.sbt | <reponame>ihji/dotty<filename>project/plugins.sbt
// Add personal SBT plugins for IDEs, etc to `local-plugins.sbt`
//
// e.g. addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.1.0")
// Scala IDE project file generator
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.1.0")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.14")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.4")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "1.1")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.8.2")
|
ihji/dotty | tests/pos/phantom-in-value-class.scala |
object PhantomInValueClass {
import BooUtil._
new VC("ghi").foo(boo)
}
object BooUtil extends Phantom {
type Boo <: this.Any
def boo: Boo = assume
class VC[T](val x: T) extends AnyVal {
def foo(b: Boo) = println(x)
}
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/localopt/InlineLocalObjects.scala | <filename>compiler/src/dotty/tools/dotc/transform/localopt/InlineLocalObjects.scala
package dotty.tools.dotc
package transform.localopt
import core.Constants.Constant
import core.Contexts.Context
import core.Decorators._
import core.Names.Name
import core.NameKinds.LocalOptInlineLocalObj
import core.Types.Type
import core.StdNames._
import core.Symbols._
import core.Flags._
import ast.Trees._
import scala.collection.mutable
import transform.SymUtils._
import config.Printers.simplify
import Simplify._
/** Rewrite fields of local instances as vals.
*
* If a local instance does not escape the local scope, it will be removed
* later by DropNoEffects, thus implementing the equivalent of (local) multi
* parameter value classes. The main motivation for this transformation is to
* get ride of the intermediate tuples object somes created when pattern
* matching on Scala2 case classes.
*/
class InlineLocalObjects(val simplifyPhase: Simplify) extends Optimisation {
import ast.tpd._
// ValDefs whose rhs is a case class instantiation: potential candidates.
val candidates = mutable.HashSet[Symbol]()
// ValDefs whose lhs is used with `._1` (or any getter call).
val gettersCalled = mutable.HashSet[Symbol]()
// Map from class to new fields, initialised between visitor and transformer.
var newFieldsMapping: Map[Symbol, Map[Symbol, Symbol]] = null
// | | |
// | | New fields, replacements these getters
// | Usages of getters of these classes
// ValDefs of the classes that are being torn apart; = candidates.intersect(gettersCalled)
def clear(): Unit = {
candidates.clear()
gettersCalled.clear()
newFieldsMapping = null
}
def initNewFieldsMapping()(implicit ctx: Context): Unit =
if (newFieldsMapping == null) {
newFieldsMapping = candidates.intersect(gettersCalled).map { refVal =>
val accessors = refVal.info.classSymbol.caseAccessors.filter(_.isGetter)
val newLocals = accessors.map { x =>
val owner: Symbol = refVal.owner
val name: Name = LocalOptInlineLocalObj.fresh()
val flags: FlagSet = Synthetic
val info: Type = x.asSeenFrom(refVal.info).info.finalResultType.widenDealias
ctx.newSymbol(owner, name, flags, info)
}
(refVal, accessors.zip(newLocals).toMap)
}.toMap
}
// Pattern for candidates to this optimisation: ValDefs where the rhs is an
// immutable case class instantiation.
object NewCaseClassValDef {
def unapply(t: ValDef)(implicit ctx: Context): Option[(Tree, List[Tree])] =
t.rhs match {
case Apply(fun, args)
if t.symbol.info.classSymbol.is(CaseClass) && // is rhs a case class?
!t.symbol.is(Lazy | Mutable) && // is lhs a val?
!t.symbol.info.classSymbol.caseAccessors.exists(_.is(Mutable)) && // is the case class immutable?
fun.symbol.isConstructor && // is rhs a new?
t.tpe.widenDealias == t.symbol.info.finalResultType.widenDealias => // no case class inheritance or enums
Some((fun, args))
case _ => None
}
}
def visitor(implicit ctx: Context): Tree => Unit = {
case t @ NewCaseClassValDef(fun, args) =>
candidates += t.symbol
case t @ Select(qual, _) if isImmutableAccessor(t) =>
gettersCalled += qual.symbol
case _ =>
}
def transformer(implicit ctx: Context): Tree => Tree = {
initNewFieldsMapping();
{
case t @ NewCaseClassValDef(fun, args) if newFieldsMapping.contains(t.symbol) =>
val newFields = newFieldsMapping(t.symbol).values.toList
val newFieldsDefs = newFields.zip(args).map { case (nf, arg) =>
val rhs = arg.changeOwnerAfter(t.symbol, nf.symbol, simplifyPhase)
ValDef(nf.asTerm, rhs)
}
val recreate = cpy.ValDef(t)(rhs = fun.appliedToArgs(newFields.map(x => ref(x))))
simplify.println(s"Replacing ${t.symbol.fullName} with stack-allocated fields ($newFields)")
Thicket(newFieldsDefs :+ recreate)
case t @ Select(rec, _) if isImmutableAccessor(t) =>
newFieldsMapping.getOrElse(rec.symbol, Map.empty).get(t.symbol) match {
case None => t
case Some(newSym) => ref(newSym)
}
case t => t
}
}
}
|
ihji/dotty | tests/pos/harmonize.scala | <reponame>ihji/dotty<gh_stars>0
object Test {
def main(args: Array[String]) = {
val x = true
val n = 1
inline val nn = 2
val y = if (x) 'A' else n
val z: Int = y
val yy1 = n match {
case 1 => 'A'
case 2 => n
case 3 => 1.0
}
val zz1: AnyVal = yy1 // no widening
val yy2 = n match {
case 1 => 'A'
case 2 => nn
case 3 => 1.0f
}
val zz2: Float = yy2 // widening to Float
val yy3 = n match {
case 1 => 'A'
case 2 => 3L
case 3 => 1.0f
}
val zz3: Double = yy3 // widening to Double
val a = try {
'A'
} catch {
case ex: Exception => nn
case ex: Error => 3L
}
val b: Long = a
val xs = List(1.0, nn, 'c')
val ys: List[Double] = xs
}
inline val b = 33
def f(): Int = b + 1
val a1 = Array(b, 33, 'a')
val b1: Array[Int] = a1
val a2 = Array(b, 33, 'a', f())
val b2: Array[Int] = a2
val a3 = Array(1.0f, 'a', 0)
val b3: Array[Float] = a3
val a4 = Array(1.0f, 1L)
val b4: Array[Double] = a4
val a5 = Array(1.0f, 1L, f())
val b5: Array[AnyVal] = a5
val a6 = Array(1.0f, 1234567890)
val b6: Array[AnyVal] = a6
}
|
ihji/dotty | tests/neg/i1641.scala | <filename>tests/neg/i1641.scala
package bar { object bippy extends (Double => String) { def apply(x: Double): String = "Double" } }
package object println { def bippy(x: Int, y: Int, z: Int) = "(Int, Int, Int)" }
object Test {
def main(args: Array[String]): Unit = {
println(bar.bippy(5.5)) // error
println(bar.bippy(1, 2, 3)) // error
}
}
|
ihji/dotty | tests/pos/phantom-Eq2/Phantom-Eq_1.scala | <reponame>ihji/dotty<gh_stars>0
/* This is a version of ../pos/phantomEq.scala that tests phantom with separate compilation */
object EqUtil extends Phantom {
type PhantomEq[-L, -R] <: this.Any
type PhantomEqEq[T] = PhantomEq[T, T]
implicit class EqualsDeco[T](val x: T) extends AnyVal {
def ===[U] (y: U)(implicit ce: PhantomEq[T, U]) = x.equals(y)
}
implicit def eqString: PhantomEqEq[String] = assume
implicit def eqInt: PhantomEqEq[Int] = assume
implicit def eqDouble: PhantomEqEq[Double] = assume
implicit def eqByteNum: PhantomEq[Byte, Number] = assume
implicit def eqNumByte: PhantomEq[Number, Byte] = assume
implicit def eqSeq[T, U](implicit eq: PhantomEq[T, U]): PhantomEq[Seq[T], Seq[U]] = assume
}
|
ihji/dotty | tests/run/phantom-assume-1.scala |
object Test {
import Boo._
def main(args: Array[String]): Unit = {
Boo.assume1
Boo.assume2
Boo.assume3
}
}
object Boo extends Phantom {
type BooAny = this.Any
def assume1: BooAny = assume
def assume2: BooAny = this.assume
def assume3: BooAny = Boo.assume
}
|
ihji/dotty | tests/run/phantom-methods-8.scala | /* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomRefErasure,phantomErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
pacFun4(inky)
}
def pacFun4(clyde: Inky) = {
println("pacFun4")
}
def inky: Inky = {
println("inky")
Boo.boo[Inky]
}
}
object Boo extends Phantom {
type Inky <: Boo.Any
def boo[B <: Boo.Any]: B = assume
}
|
ihji/dotty | tests/neg/t5729.scala | trait T[X]
object Test {
def join(in: Seq[T[_]]): Int = ???
def join[S](in: Seq[T[S]]): String = ???
join(null: Seq[T[_]]) // error: ambiguous
}
object C {
def join(in: Seq[List[_]]): Int = error("TODO")
def join[S](in: Seq[List[S]]): String = error("TODO")
join(Seq[List[Int]]()) // error: ambiguous
//
// ./a.scala:13: error: ambiguous reference to overloaded definition,
// both method join in object C of type [S](in: Seq[List[S]])String
// and method join in object C of type (in: Seq[List[_]])Int
// match argument types (Seq[List[Int]])
// join(Seq[List[Int]]())
// ^
// one error found
}
|
ihji/dotty | tests/neg/phantom-trait-2.scala | <gh_stars>0
object Boo1 extends Phantom {
class A extends this.Any // error
class B extends this.Nothing // error
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/LiftTry.scala | <filename>compiler/src/dotty/tools/dotc/transform/LiftTry.scala
package dotty.tools.dotc
package transform
import TreeTransforms._
import core.DenotTransformers._
import core.Symbols._
import core.Contexts._
import core.Types._
import core.Flags._
import core.Decorators._
import core.NameKinds.LiftedTreeName
import NonLocalReturns._
/** Lifts try's that might be executed on non-empty expression stacks
* to their own methods. I.e.
*
* try body catch handler
*
* is lifted to
*
* { def liftedTree$n() = try body catch handler; liftedTree$n() }
*/
class LiftTry extends MiniPhase with IdentityDenotTransformer { thisTransform =>
import ast.tpd._
/** the following two members override abstract members in Transform */
val phaseName: String = "liftTry"
val treeTransform = new Transform(needLift = false)
val liftingTransform = new Transform(needLift = true)
class Transform(needLift: Boolean) extends TreeTransform {
def phase = thisTransform
override def prepareForApply(tree: Apply)(implicit ctx: Context) =
if (tree.fun.symbol.is(Label)) this
else liftingTransform
override def prepareForValDef(tree: ValDef)(implicit ctx: Context) =
if (!tree.symbol.exists ||
tree.symbol.isSelfSym ||
tree.symbol.owner == ctx.owner.enclosingMethod) this
else liftingTransform
override def prepareForAssign(tree: Assign)(implicit ctx: Context) =
if (tree.lhs.symbol.maybeOwner == ctx.owner.enclosingMethod) this
else liftingTransform
override def prepareForReturn(tree: Return)(implicit ctx: Context) =
if (!isNonLocalReturn(tree)) this
else liftingTransform
override def prepareForTemplate(tree: Template)(implicit ctx: Context) =
treeTransform
override def transformTry(tree: Try)(implicit ctx: Context, info: TransformerInfo): Tree =
if (needLift) {
ctx.debuglog(i"lifting tree at ${tree.pos}, current owner = ${ctx.owner}")
val fn = ctx.newSymbol(
ctx.owner, LiftedTreeName.fresh(), Synthetic | Method,
MethodType(Nil, tree.tpe.widenIfUnstable), coord = tree.pos)
tree.changeOwnerAfter(ctx.owner, fn, thisTransform)
Block(DefDef(fn, tree) :: Nil, ref(fn).appliedToNone)
}
else tree
}
}
|
ihji/dotty | tests/pos/reference/phantom-types.scala | <filename>tests/pos/reference/phantom-types.scala
object MyPhantoms extends Phantom {
type Inky <: this.Any
type Blinky <: this.Any
type Pinky <: Inky
type Clyde <: Pinky
def pinky: Pinky = assume
def clyde: Clyde = assume
}
import MyPhantoms._
object MyApp {
def run(phantom: Inky) = println("run")
def hide(phantom: Blinky) = println("run")
run(pinky)
run(clyde)
}
object MyOtherPhantom extends Phantom {
type MyPhantom <: this.Any
def myPhantom: MyPhantom = assume
def f1(a: Int, b: MyPhantom, c: Int): Int = a + c
def f2 = {
f1(3, myPhantom, 2)
}
}
|
ihji/dotty | tests/pos/enum-List-control.scala | <reponame>ihji/dotty<filename>tests/pos/enum-List-control.scala
abstract sealed class List[T] extends Enum
object List {
final case class Cons[T](x: T, xs: List[T]) extends List[T] {
def enumTag = 0
}
final case class Nil[T]() extends List[T] {
def enumTag = 1
}
}
object Test {
import List._
val xs = Cons(1, Cons(2, Cons(3, Nil())))
def main(args: Array[String]) = println(xs)
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/parsing/Parsers.scala | <reponame>ihji/dotty
package dotty.tools
package dotc
package parsing
import scala.collection.mutable.ListBuffer
import scala.collection.immutable.BitSet
import util.{ SourceFile, SourcePosition }
import Tokens._
import Scanners._
import MarkupParsers._
import core._
import Flags._
import Contexts._
import Names._
import NameKinds.WildcardParamName
import ast.{Positioned, Trees, untpd}
import ast.Trees._
import Decorators._
import StdNames._
import util.Positions._
import Constants._
import ScriptParsers._
import Comments._
import scala.annotation.{tailrec, switch}
import util.DotClass
import rewrite.Rewrites.patch
object Parsers {
import ast.untpd._
import reporting.diagnostic.Message
import reporting.diagnostic.messages._
case class OpInfo(operand: Tree, operator: Ident, offset: Offset)
class ParensCounters {
private var parCounts = new Array[Int](lastParen - firstParen)
def count(tok: Token) = parCounts(tok - firstParen)
def change(tok: Token, delta: Int) = parCounts(tok - firstParen) += delta
def nonePositive: Boolean = parCounts forall (_ <= 0)
}
@sharable object Location extends Enumeration {
val InParens, InBlock, InPattern, ElseWhere = Value
}
@sharable object ParamOwner extends Enumeration {
val Class, Type, TypeParam, Def = Value
}
private implicit class AddDeco(val buf: ListBuffer[Tree]) extends AnyVal {
def +++=(x: Tree) = x match {
case x: Thicket => buf ++= x.trees
case x => buf += x
}
}
/** The parse starting point depends on whether the source file is self-contained:
* if not, the AST will be supplemented.
*/
def parser(source: SourceFile)(implicit ctx: Context) =
if (source.isSelfContained) new ScriptParser(source)
else new Parser(source)
abstract class ParserCommon(val source: SourceFile)(implicit ctx: Context) extends DotClass {
val in: ScannerCommon
/* ------------- POSITIONS ------------------------------------------- */
/** Positions tree.
* If `t` does not have a position yet, set its position to the given one.
*/
def atPos[T <: Positioned](pos: Position)(t: T): T =
if (t.pos.isSourceDerived) t else t.withPos(pos)
def atPos[T <: Positioned](start: Offset, point: Offset, end: Offset)(t: T): T =
atPos(Position(start, end, point))(t)
/** If the last read offset is strictly greater than `start`, position tree
* to position spanning from `start` to last read offset, with given point.
* If the last offset is less than or equal to start, the tree `t` did not
* consume any source for its construction. In this case, don't position it yet,
* but wait for its position to be determined by `setChildPositions` when the
* parent node is positioned.
*/
def atPos[T <: Positioned](start: Offset, point: Offset)(t: T): T =
if (in.lastOffset > start) atPos(start, point, in.lastOffset)(t) else t
def atPos[T <: Positioned](start: Offset)(t: T): T =
atPos(start, start)(t)
def startOffset(t: Positioned): Int =
if (t.pos.exists) t.pos.start else in.offset
def pointOffset(t: Positioned): Int =
if (t.pos.exists) t.pos.point else in.offset
def endOffset(t: Positioned): Int =
if (t.pos.exists) t.pos.end else in.lastOffset
def nameStart: Offset =
if (in.token == BACKQUOTED_IDENT) in.offset + 1 else in.offset
def sourcePos(off: Int = in.offset): SourcePosition =
source atPos Position(off)
/* ------------- ERROR HANDLING ------------------------------------------- */
/** The offset where the last syntax error was reported, or if a skip to a
* safepoint occurred afterwards, the offset of the safe point.
*/
protected var lastErrorOffset : Int = -1
/** Issue an error at given offset if beyond last error offset
* and update lastErrorOffset.
*/
def syntaxError(msg: => Message, offset: Int = in.offset): Unit =
if (offset > lastErrorOffset) {
val length = if (in.name != null) in.name.show.length else 0
syntaxError(msg, Position(offset, offset + length))
lastErrorOffset = in.offset
}
/** Unconditionally issue an error at given position, without
* updating lastErrorOffset.
*/
def syntaxError(msg: => Message, pos: Position): Unit =
ctx.error(msg, source atPos pos)
}
class Parser(source: SourceFile)(implicit ctx: Context) extends ParserCommon(source) {
val in: Scanner = new Scanner(source)
val openParens = new ParensCounters
/** This is the general parse entry point.
* Overridden by ScriptParser
*/
def parse(): Tree = {
val t = compilationUnit()
accept(EOF)
t
}
/* -------------- TOKEN CLASSES ------------------------------------------- */
def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
def isIdent(name: Name) = in.token == IDENTIFIER && in.name == name
def isSimpleLiteral = simpleLiteralTokens contains in.token
def isLiteral = literalTokens contains in.token
def isNumericLit = numericLitTokens contains in.token
def isModifier = modifierTokens contains in.token
def isExprIntro = canStartExpressionTokens contains in.token
def isBindingIntro = canStartBindingTokens contains in.token
def isTemplateIntro = templateIntroTokens contains in.token
def isDclIntro = dclIntroTokens contains in.token
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
def mustStartStat = mustStartStatTokens contains in.token
def isDefIntro(allowedMods: BitSet) =
in.token == AT || (allowedMods contains in.token) || (defIntroTokens contains in.token)
def isCaseIntro =
in.token == AT || (modifierTokensOrCase contains in.token)
def isStatSep: Boolean =
in.token == NEWLINE || in.token == NEWLINES || in.token == SEMI
/* ------------- ERROR HANDLING ------------------------------------------- */
/** The offset of the last time when a statement on a new line was definitely
* encountered in the current scope or an outer scope.
*/
private var lastStatOffset = -1
def setLastStatOffset() =
if (mustStartStat && in.isAfterLineEnd())
lastStatOffset = in.offset
/** Is offset1 less or equally indented than offset2?
* This is the case if the characters between the preceding end-of-line and offset1
* are a prefix of the characters between the preceding end-of-line and offset2.
*/
def isLeqIndented(offset1: Int, offset2: Int): Boolean = {
def recur(idx1: Int, idx2: Int): Boolean =
idx1 == offset1 ||
idx2 < offset2 && source(idx1) == source(idx2) && recur(idx1 + 1, idx2 + 1)
recur(source.startOfLine(offset1), source.startOfLine(offset2))
}
/** Skip on error to next safe point.
* Safe points are:
* - Closing braces, provided they match an opening brace before the error point.
* - Closing parens and brackets, provided they match an opening parent or bracket
* before the error point and there are no intervening other kinds of parens.
* - Semicolons and newlines, provided there are no intervening braces.
* - Definite statement starts on new lines, provided they are not more indented
* than the last known statement start before the error point.
*/
protected def skip(): Unit = {
val skippedParens = new ParensCounters
while (true) {
(in.token: @switch) match {
case EOF =>
return
case SEMI | NEWLINE | NEWLINES =>
if (skippedParens.count(LBRACE) == 0) return
case RBRACE =>
if (openParens.count(LBRACE) > 0 && skippedParens.count(LBRACE) == 0)
return
skippedParens.change(LBRACE, -1)
case RPAREN =>
if (openParens.count(LPAREN) > 0 && skippedParens.nonePositive)
return
skippedParens.change(LPAREN, -1)
case RBRACKET =>
if (openParens.count(LBRACKET) > 0 && skippedParens.nonePositive)
return
skippedParens.change(LBRACKET, -1)
case LBRACE =>
skippedParens.change(LBRACE, + 1)
case LPAREN =>
skippedParens.change(LPAREN, + 1)
case LBRACKET=>
skippedParens.change(LBRACKET, + 1)
case _ =>
if (mustStartStat &&
in.isAfterLineEnd() &&
isLeqIndented(in.offset, lastStatOffset max 0))
return
}
in.nextToken()
}
}
def warning(msg: => Message, sourcePos: SourcePosition) =
ctx.warning(msg, sourcePos)
def warning(msg: => Message, offset: Int = in.offset) =
ctx.warning(msg, source atPos Position(offset))
def deprecationWarning(msg: => Message, offset: Int = in.offset) =
ctx.deprecationWarning(msg, source atPos Position(offset))
/** Issue an error at current offset taht input is incomplete */
def incompleteInputError(msg: => Message) =
ctx.incompleteInputError(msg, source atPos Position(in.offset))
/** If at end of file, issue an incompleteInputError.
* Otherwise issue a syntax error and skip to next safe point.
*/
def syntaxErrorOrIncomplete(msg: => Message) =
if (in.token == EOF) incompleteInputError(msg)
else {
syntaxError(msg)
skip()
lastErrorOffset = in.offset
} // DEBUG
/** Consume one token of the specified type, or
* signal an error if it is not there.
*
* @return The offset at the start of the token to accept
*/
def accept(token: Int): Int = {
val offset = in.offset
if (in.token != token) {
syntaxErrorOrIncomplete(ExpectedTokenButFound(token, in.token))
}
if (in.token == token) in.nextToken()
offset
}
/** semi = nl {nl} | `;'
* nl = `\n' // where allowed
*/
def acceptStatSep(): Unit = in.token match {
case NEWLINE | NEWLINES => in.nextToken()
case _ => accept(SEMI)
}
def acceptStatSepUnlessAtEnd(altEnd: Token = EOF) =
if (!isStatSeqEnd && in.token != altEnd) acceptStatSep()
def errorTermTree = atPos(in.offset) { Literal(Constant(null)) }
private var inFunReturnType = false
private def fromWithinReturnType[T](body: => T): T = {
val saved = inFunReturnType
try {
inFunReturnType = true
body
} finally inFunReturnType = saved
}
/** A placeholder for dummy arguments that should be re-parsed as parameters */
val ParamNotArg = EmptyTree
/** A flag indicating we are parsing in the annotations of a primary
* class constructor
*/
private var inClassConstrAnnots = false
private def fromWithinClassConstr[T](body: => T): T = {
val saved = inClassConstrAnnots
try {
inClassConstrAnnots = true
body
} finally {
inClassConstrAnnots = saved
if (lookaheadTokens.nonEmpty) {
in.insertTokens(lookaheadTokens.toList)
lookaheadTokens.clear()
}
}
}
/** Lookahead tokens for the case of annotations in class constructors.
* We store tokens in lookahead as long as they can form a valid prefix
* of a class parameter clause.
*/
private var lookaheadTokens = new ListBuffer[TokenData]
/** Copy current token to end of lookahead */
private def saveLookahead() = {
val lookahead = new TokenData{}
lookahead.copyFrom(in)
lookaheadTokens += lookahead
}
def migrationWarningOrError(msg: String, offset: Int = in.offset) =
if (in.isScala2Mode)
ctx.migrationWarning(msg, source atPos Position(offset))
else
syntaxError(msg, offset)
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
/** Convert tree to formal parameter list
*/
def convertToParams(tree: Tree): List[ValDef] = tree match {
case Parens(t) => convertToParam(t) :: Nil
case Tuple(ts) => ts map (convertToParam(_))
case t => convertToParam(t) :: Nil
}
/** Convert tree to formal parameter
*/
def convertToParam(tree: Tree, mods: Modifiers = Modifiers(), expected: String = "formal parameter"): ValDef = tree match {
case Ident(name) =>
makeParameter(name.asTermName, TypeTree(), mods) withPos tree.pos
case Typed(Ident(name), tpt) =>
makeParameter(name.asTermName, tpt, mods) withPos tree.pos
case _ =>
syntaxError(s"not a legal $expected", tree.pos)
makeParameter(nme.ERROR, tree, mods)
}
/** Convert (qual)ident to type identifier
*/
def convertToTypeId(tree: Tree): Tree = tree match {
case id @ Ident(name) =>
cpy.Ident(id)(name.toTypeName)
case id @ Select(qual, name) =>
cpy.Select(id)(qual, name.toTypeName)
case _ =>
syntaxError(IdentifierExpected(tree.show), tree.pos)
tree
}
/* --------------- PLACEHOLDERS ------------------------------------------- */
/** The implicit parameters introduced by `_` in the current expression.
* Parameters appear in reverse order.
*/
var placeholderParams: List[ValDef] = Nil
def checkNoEscapingPlaceholders[T](op: => T): T = {
val savedPlaceholderParams = placeholderParams
placeholderParams = Nil
try op
finally {
placeholderParams match {
case vd :: _ => syntaxError(UnboundPlaceholderParameter(), vd.pos)
case _ =>
}
placeholderParams = savedPlaceholderParams
}
}
def isWildcard(t: Tree): Boolean = t match {
case Ident(name1) => placeholderParams.nonEmpty && name1 == placeholderParams.head.name
case Typed(t1, _) => isWildcard(t1)
case Annotated(t1, _) => isWildcard(t1)
case Parens(t1) => isWildcard(t1)
case _ => false
}
/* -------------- XML ---------------------------------------------------- */
/** the markup parser */
lazy val xmlp = new MarkupParser(this, true)
object symbXMLBuilder extends SymbolicXMLBuilder(this, true) // DEBUG choices
def xmlLiteral() : Tree = xmlp.xLiteral
def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
/* -------- COMBINATORS -------------------------------------------------------- */
def enclosed[T](tok: Token, body: => T): T = {
accept(tok)
openParens.change(tok, 1)
try body
finally {
accept(tok + 1)
openParens.change(tok, -1)
}
}
def inParens[T](body: => T): T = enclosed(LPAREN, body)
def inBraces[T](body: => T): T = enclosed(LBRACE, body)
def inBrackets[T](body: => T): T = enclosed(LBRACKET, body)
def inDefScopeBraces[T](body: => T): T = {
val saved = lastStatOffset
try inBraces(body)
finally lastStatOffset = saved
}
/** part { `separator` part }
*/
def tokenSeparated[T](separator: Int, part: () => T): List[T] = {
val ts = new ListBuffer[T] += part()
while (in.token == separator) {
in.nextToken()
ts += part()
}
ts.toList
}
def commaSeparated[T](part: () => T): List[T] = tokenSeparated(COMMA, part)
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
var opStack: List[OpInfo] = Nil
def checkAssoc(offset: Token, op1: Name, op2: Name, op2LeftAssoc: Boolean): Unit =
if (isLeftAssoc(op1) != op2LeftAssoc)
syntaxError(MixedLeftAndRightAssociativeOps(op1, op2, op2LeftAssoc), offset)
def reduceStack(base: List[OpInfo], top: Tree, prec: Int, leftAssoc: Boolean, op2: Name): Tree = {
if (opStack != base && precedence(opStack.head.operator.name) == prec)
checkAssoc(opStack.head.offset, opStack.head.operator.name, op2, leftAssoc)
def recur(top: Tree): Tree = {
if (opStack == base) top
else {
val opInfo = opStack.head
val opPrec = precedence(opInfo.operator.name)
if (prec < opPrec || leftAssoc && prec == opPrec) {
opStack = opStack.tail
recur {
atPos(opInfo.operator.pos union opInfo.operand.pos union top.pos) {
InfixOp(opInfo.operand, opInfo.operator, top)
}
}
}
else top
}
}
recur(top)
}
/** operand { infixop operand} [postfixop],
* respecting rules of associativity and precedence.
* @param notAnOperator a token that does not count as operator.
* @param maybePostfix postfix operators are allowed.
*/
def infixOps(
first: Tree, canStartOperand: Token => Boolean, operand: () => Tree,
isType: Boolean = false,
notAnOperator: Name = nme.EMPTY,
maybePostfix: Boolean = false): Tree = {
val base = opStack
var top = first
while (isIdent && in.name != notAnOperator) {
val op = if (isType) typeIdent() else termIdent()
top = reduceStack(base, top, precedence(op.name), isLeftAssoc(op.name), op.name)
opStack = OpInfo(top, op, in.offset) :: opStack
newLineOptWhenFollowing(canStartOperand)
if (maybePostfix && !canStartOperand(in.token)) {
val topInfo = opStack.head
opStack = opStack.tail
val od = reduceStack(base, topInfo.operand, 0, true, in.name)
return atPos(startOffset(od), topInfo.offset) {
PostfixOp(od, topInfo.operator)
}
}
top = operand()
}
reduceStack(base, top, 0, true, in.name)
}
/* -------- IDENTIFIERS AND LITERALS ------------------------------------------- */
/** Accept identifier and return its name as a term name. */
def ident(): TermName =
if (isIdent) {
val name = in.name
in.nextToken()
name
} else {
syntaxErrorOrIncomplete(ExpectedTokenButFound(IDENTIFIER, in.token))
nme.ERROR
}
/** Accept identifier and return Ident with its name as a term name. */
def termIdent(): Ident = atPos(in.offset) {
makeIdent(in.token, ident())
}
/** Accept identifier and return Ident with its name as a type name. */
def typeIdent(): Ident = atPos(in.offset) {
makeIdent(in.token, ident().toTypeName)
}
private def makeIdent(tok: Token, name: Name) =
if (tok == BACKQUOTED_IDENT) BackquotedIdent(name)
else Ident(name)
def wildcardIdent(): Ident =
atPos(accept(USCORE)) { Ident(nme.WILDCARD) }
def termIdentOrWildcard(): Ident =
if (in.token == USCORE) wildcardIdent() else termIdent()
/** Accept identifier acting as a selector on given tree `t`. */
def selector(t: Tree): Tree =
atPos(startOffset(t), in.offset) { Select(t, ident()) }
/** Selectors ::= id { `.' id }
*
* Accept `.' separated identifiers acting as a selectors on given tree `t`.
* @param finish An alternative parse in case the next token is not an identifier.
* If the alternative does not apply, its tree argument is returned unchanged.
*/
def selectors(t: Tree, finish: Tree => Tree): Tree = {
val t1 = finish(t)
if (t1 ne t) t1 else dotSelectors(selector(t), finish)
}
/** DotSelectors ::= { `.' id }
*
* Accept `.' separated identifiers acting as a selectors on given tree `t`.
* @param finish An alternative parse in case the token following a `.' is not an identifier.
* If the alternative does not apply, its tree argument is returned unchanged.
*/
def dotSelectors(t: Tree, finish: Tree => Tree = id) =
if (in.token == DOT) { in.nextToken(); selectors(t, finish) }
else t
private val id: Tree => Tree = x => x
/** Path ::= StableId
* | [id `.'] this
*
* @param thisOK If true, the path can end with the keyword `this`.
* If false, another selection is required after the `this`.
* @param finish An alternative parse in case the token following a `.' is not an identifier.
* If the alternative does not apply, its tree argument is returned unchanged.
*/
def path(thisOK: Boolean, finish: Tree => Tree = id): Tree = {
val start = in.offset
def handleThis(qual: Ident) = {
in.nextToken()
val t = atPos(start) { This(qual) }
if (!thisOK && in.token != DOT) syntaxError(DanglingThisInPath(), t.pos)
dotSelectors(t, finish)
}
def handleSuper(qual: Ident) = {
in.nextToken()
val mix = mixinQualifierOpt()
val t = atPos(start) { Super(This(qual), mix) }
accept(DOT)
dotSelectors(selector(t), finish)
}
if (in.token == THIS) handleThis(EmptyTypeIdent)
else if (in.token == SUPER) handleSuper(EmptyTypeIdent)
else {
val t = termIdent()
if (in.token == DOT) {
def qual = cpy.Ident(t)(t.name.toTypeName)
in.nextToken()
if (in.token == THIS) handleThis(qual)
else if (in.token == SUPER) handleSuper(qual)
else selectors(t, finish)
}
else t
}
}
/** MixinQualifier ::= `[' id `]'
*/
def mixinQualifierOpt(): Ident =
if (in.token == LBRACKET) inBrackets(atPos(in.offset) { typeIdent() })
else EmptyTypeIdent
/** StableId ::= id
* | Path `.' id
* | [id '.'] super [`[' id `]']`.' id
*/
def stableId(): Tree =
path(thisOK = false)
/** QualId ::= id {`.' id}
*/
def qualId(): Tree =
dotSelectors(termIdent())
/** SimpleExpr ::= literal
* | symbol
* | null
* @param negOffset The offset of a preceding `-' sign, if any.
* If the literal is not negated, negOffset = in.offset.
*/
def literal(negOffset: Int = in.offset, inPattern: Boolean = false): Tree = {
def finish(value: Any): Tree = {
val t = atPos(negOffset) { Literal(Constant(value)) }
in.nextToken()
t
}
val isNegated = negOffset < in.offset
atPos(negOffset) {
if (in.token == SYMBOLLIT) atPos(in.skipToken()) { SymbolLit(in.strVal) }
else if (in.token == INTERPOLATIONID) interpolatedString(inPattern)
else finish(in.token match {
case CHARLIT => in.charVal
case INTLIT => in.intVal(isNegated).toInt
case LONGLIT => in.intVal(isNegated)
case FLOATLIT => in.floatVal(isNegated).toFloat
case DOUBLELIT => in.floatVal(isNegated)
case STRINGLIT | STRINGPART => in.strVal
case TRUE => true
case FALSE => false
case NULL => null
case _ =>
syntaxErrorOrIncomplete(IllegalLiteral())
null
})
}
}
private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
val segmentBuf = new ListBuffer[Tree]
val interpolator = in.name
in.nextToken()
while (in.token == STRINGPART) {
segmentBuf += Thicket(
literal(inPattern = inPattern),
atPos(in.offset) {
if (in.token == IDENTIFIER)
termIdent()
else if (in.token == USCORE && inPattern) {
in.nextToken()
Ident(nme.WILDCARD)
}
else if (in.token == THIS) {
in.nextToken()
This(EmptyTypeIdent)
}
else if (in.token == LBRACE)
if (inPattern) Block(Nil, inBraces(pattern()))
else expr()
else {
ctx.error(InterpolatedStringError(), source atPos Position(in.offset))
EmptyTree
}
})
}
if (in.token == STRINGLIT) segmentBuf += literal(inPattern = inPattern)
InterpolatedString(interpolator, segmentBuf.toList)
}
/* ------------- NEW LINES ------------------------------------------------- */
def newLineOpt(): Unit = {
if (in.token == NEWLINE) in.nextToken()
}
def newLinesOpt(): Unit = {
if (in.token == NEWLINE || in.token == NEWLINES)
in.nextToken()
}
def newLineOptWhenFollowedBy(token: Int): Unit = {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && in.next.token == token) newLineOpt()
}
def newLineOptWhenFollowing(p: Int => Boolean): Unit = {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
/* ------------- TYPES ------------------------------------------------------ */
/** Same as [[typ]], but if this results in a wildcard it emits a syntax error and
* returns a tree for type `Any` instead.
*/
def toplevelTyp(): Tree = {
val t = typ()
findWildcardType(t) match {
case Some(wildcardPos) =>
syntaxError(UnboundWildcardType(), wildcardPos)
scalaAny
case None => t
}
}
/** Type ::= [`implicit'] FunArgTypes `=>' Type
* | HkTypeParamClause `->' Type
* | InfixType
* FunArgTypes ::= InfixType
* | `(' [ FunArgType {`,' FunArgType } ] `)'
*/
def typ(): Tree = {
val start = in.offset
val isImplicit = in.token == IMPLICIT
if (isImplicit) in.nextToken()
def functionRest(params: List[Tree]): Tree =
atPos(start, accept(ARROW)) {
val t = typ()
if (isImplicit) new ImplicitFunction(params, t) else Function(params, t)
}
val t =
if (in.token == LPAREN) {
in.nextToken()
if (in.token == RPAREN) {
in.nextToken()
functionRest(Nil)
}
else {
openParens.change(LPAREN, 1)
val ts = commaSeparated(funArgType)
openParens.change(LPAREN, -1)
accept(RPAREN)
if (isImplicit || in.token == ARROW) functionRest(ts)
else {
for (t <- ts)
if (t.isInstanceOf[ByNameTypeTree])
syntaxError(ByNameParameterNotSupported())
val tuple = atPos(start) { makeTupleOrParens(ts) }
infixTypeRest(
refinedTypeRest(
withTypeRest(
annotTypeRest(
simpleTypeRest(tuple)))))
}
}
}
else if (in.token == LBRACKET) {
val start = in.offset
val tparams = typeParamClause(ParamOwner.TypeParam)
if (in.token == ARROW)
atPos(start, in.skipToken())(LambdaTypeTree(tparams, typ()))
else { accept(ARROW); typ() }
}
else infixType()
in.token match {
case ARROW => functionRest(t :: Nil)
case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t
case _ =>
if (isImplicit && !t.isInstanceOf[ImplicitFunction])
syntaxError("Types with implicit keyword can only be function types", Position(start, start + nme.IMPLICITkw.asSimpleName.length))
t
}
}
/** InfixType ::= RefinedType {id [nl] refinedType}
*/
def infixType(): Tree = infixTypeRest(refinedType())
def infixTypeRest(t: Tree): Tree =
infixOps(t, canStartTypeTokens, refinedType, isType = true, notAnOperator = nme.raw.STAR)
/** RefinedType ::= WithType {Annotation | [nl] Refinement}
*/
val refinedType: () => Tree = () => refinedTypeRest(withType())
def refinedTypeRest(t: Tree): Tree = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) refinedTypeRest(atPos(startOffset(t)) { RefinedTypeTree(t, refinement()) })
else t
}
/** WithType ::= AnnotType {`with' AnnotType} (deprecated)
*/
def withType(): Tree = withTypeRest(annotType())
def withTypeRest(t: Tree): Tree =
if (in.token == WITH) {
if (ctx.settings.strict.value)
deprecationWarning(DeprecatedWithOperator())
in.nextToken()
AndTypeTree(t, withType())
}
else t
/** AnnotType ::= SimpleType {Annotation}
*/
def annotType(): Tree = annotTypeRest(simpleType())
def annotTypeRest(t: Tree): Tree =
if (in.token == AT) annotTypeRest(atPos(startOffset(t)) { Annotated(t, annot()) })
else t
/** SimpleType ::= SimpleType TypeArgs
* | SimpleType `#' id
* | StableId
* | Path `.' type
* | `(' ArgTypes `)'
* | `_' TypeBounds
* | Refinement
* | Literal
*/
def simpleType(): Tree = simpleTypeRest {
if (in.token == LPAREN)
atPos(in.offset) {
makeTupleOrParens(inParens(argTypes(namedOK = false, wildOK = true)))
}
else if (in.token == LBRACE)
atPos(in.offset) { RefinedTypeTree(EmptyTree, refinement()) }
else if (isSimpleLiteral) { SingletonTypeTree(literal()) }
else if (in.token == USCORE) {
val start = in.skipToken()
typeBounds().withPos(Position(start, in.lastOffset, start))
}
else path(thisOK = false, handleSingletonType) match {
case r @ SingletonTypeTree(_) => r
case r => convertToTypeId(r)
}
}
val handleSingletonType: Tree => Tree = t =>
if (in.token == TYPE) {
in.nextToken()
atPos(startOffset(t)) { SingletonTypeTree(t) }
} else t
private def simpleTypeRest(t: Tree): Tree = in.token match {
case HASH => simpleTypeRest(typeProjection(t))
case LBRACKET => simpleTypeRest(atPos(startOffset(t)) {
AppliedTypeTree(t, typeArgs(namedOK = false, wildOK = true)) })
case _ => t
}
private def typeProjection(t: Tree): Tree = {
accept(HASH)
val id = typeIdent()
atPos(startOffset(t), startOffset(id)) { Select(t, id.name) }
}
/** NamedTypeArg ::= id `=' Type
*/
val namedTypeArg = () => {
val name = ident()
accept(EQUALS)
NamedArg(name.toTypeName, typ())
}
/** ArgTypes ::= Type {`,' Type}
* | NamedTypeArg {`,' NamedTypeArg}
*/
def argTypes(namedOK: Boolean, wildOK: Boolean) = {
def otherArgs(first: Tree, arg: () => Tree): List[Tree] = {
val rest =
if (in.token == COMMA) {
in.nextToken()
commaSeparated(arg)
}
else Nil
first :: rest
}
def typParser() = if (wildOK) typ() else toplevelTyp()
if (namedOK && in.token == IDENTIFIER)
typParser() match {
case Ident(name) if in.token == EQUALS =>
in.nextToken()
otherArgs(NamedArg(name, typ()), namedTypeArg)
case firstArg =>
otherArgs(firstArg, typ)
}
else commaSeparated(typParser)
}
/** FunArgType ::= Type | `=>' Type
*/
val funArgType = () =>
if (in.token == ARROW) atPos(in.skipToken()) { ByNameTypeTree(typ()) }
else typ()
/** ParamType ::= [`=>'] ParamValueType
*/
def paramType(): Tree =
if (in.token == ARROW) atPos(in.skipToken()) { ByNameTypeTree(paramValueType()) }
else paramValueType()
/** ParamValueType ::= Type [`*']
*/
def paramValueType(): Tree = {
val t = toplevelTyp()
if (isIdent(nme.raw.STAR)) {
in.nextToken()
atPos(startOffset(t)) { PostfixOp(t, Ident(tpnme.raw.STAR)) }
} else t
}
/** TypeArgs ::= `[' Type {`,' Type} `]'
* NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]'
*/
def typeArgs(namedOK: Boolean, wildOK: Boolean): List[Tree] = inBrackets(argTypes(namedOK, wildOK))
/** Refinement ::= `{' RefineStatSeq `}'
*/
def refinement(): List[Tree] = inBraces(refineStatSeq())
/** TypeBounds ::= [`>:' Type] [`<:' Type]
*/
def typeBounds(): TypeBoundsTree =
atPos(in.offset) { TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) }
private def bound(tok: Int): Tree =
if (in.token == tok) { in.nextToken(); toplevelTyp() }
else EmptyTree
/** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type}
*/
def typeParamBounds(pname: TypeName): Tree = {
val t = typeBounds()
val cbs = contextBounds(pname)
if (cbs.isEmpty) t
else atPos((t.pos union cbs.head.pos).start) { ContextBounds(t, cbs) }
}
def contextBounds(pname: TypeName): List[Tree] = in.token match {
case COLON =>
atPos(in.skipToken()) {
AppliedTypeTree(toplevelTyp(), Ident(pname))
} :: contextBounds(pname)
case VIEWBOUND =>
deprecationWarning("view bounds `<%' are deprecated, use a context bound `:' instead")
atPos(in.skipToken()) {
Function(Ident(pname) :: Nil, toplevelTyp())
} :: contextBounds(pname)
case _ =>
Nil
}
def typedOpt(): Tree =
if (in.token == COLON) { in.nextToken(); toplevelTyp() }
else TypeTree()
def typeDependingOn(location: Location.Value): Tree =
if (location == Location.InParens) typ()
else if (location == Location.InPattern) refinedType()
else infixType()
/** Checks whether `t` is a wildcard type.
* If it is, returns the [[Position]] where the wildcard occurs.
*/
@tailrec
private final def findWildcardType(t: Tree): Option[Position] = t match {
case TypeBoundsTree(_, _) => Some(t.pos)
case Parens(t1) => findWildcardType(t1)
case Annotated(t1, _) => findWildcardType(t1)
case _ => None
}
/* ----------- EXPRESSIONS ------------------------------------------------ */
/** EqualsExpr ::= `=' Expr
*/
def equalsExpr(): Tree = {
accept(EQUALS)
expr()
}
def condExpr(altToken: Token): Tree = {
if (in.token == LPAREN) {
val t = atPos(in.offset) { Parens(inParens(exprInParens())) }
if (in.token == altToken) in.nextToken()
t
} else {
val t = expr()
accept(altToken)
t
}
}
/** Expr ::= [`implicit'] FunParams `=>' Expr
* | Expr1
* FunParams ::= Bindings
* | id
* | `_'
* ExprInParens ::= PostfixExpr `:' Type
* | Expr
* BlockResult ::= [`implicit'] FunParams `=>' Block
* | Expr1
* Expr1 ::= `if' `(' Expr `)' {nl} Expr [[semi] else Expr]
* | `if' Expr `then' Expr [[semi] else Expr]
* | `while' `(' Expr `)' {nl} Expr
* | `while' Expr `do' Expr
* | `do' Expr [semi] `while' Expr
* | `try' Expr Catches [`finally' Expr]
* | `try' Expr [`finally' Expr]
* | `throw' Expr
* | `return' [Expr]
* | ForExpr
* | [SimpleExpr `.'] id `=' Expr
* | SimpleExpr1 ArgumentExprs `=' Expr
* | PostfixExpr [Ascription]
* | PostfixExpr `match' `{' CaseClauses `}'
* Bindings ::= `(' [Binding {`,' Binding}] `)'
* Binding ::= (id | `_') [`:' Type]
* Ascription ::= `:' CompoundType
* | `:' Annotation {Annotation}
* | `:' `_' `*'
*/
val exprInParens = () => expr(Location.InParens)
def expr(): Tree = expr(Location.ElseWhere)
def expr(location: Location.Value): Tree = {
val start = in.offset
if (in.token == IMPLICIT)
implicitClosure(start, location, implicitMods())
else {
val saved = placeholderParams
placeholderParams = Nil
def wrapPlaceholders(t: Tree) = try
if (placeholderParams.isEmpty) t
else new WildcardFunction(placeholderParams.reverse, t)
finally placeholderParams = saved
val t = expr1(location)
if (in.token == ARROW) {
placeholderParams = Nil // don't interpret `_' to the left of `=>` as placeholder
wrapPlaceholders(closureRest(start, location, convertToParams(t)))
}
else if (isWildcard(t)) {
placeholderParams = placeholderParams ::: saved
t
}
else wrapPlaceholders(t)
}
}
def expr1(location: Location.Value = Location.ElseWhere): Tree = in.token match {
case IF =>
atPos(in.skipToken()) {
val cond = condExpr(THEN)
newLinesOpt()
val thenp = expr()
val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
else EmptyTree
If(cond, thenp, elsep)
}
case WHILE =>
atPos(in.skipToken()) {
val cond = condExpr(DO)
newLinesOpt()
val body = expr()
WhileDo(cond, body)
}
case DO =>
atPos(in.skipToken()) {
val body = expr()
if (isStatSep) in.nextToken()
accept(WHILE)
val cond = expr()
DoWhile(body, cond)
}
case TRY =>
val tryOffset = in.offset
atPos(in.skipToken()) {
val body = expr()
val (handler, handlerStart) =
if (in.token == CATCH) {
val pos = in.offset
in.nextToken()
(expr(), pos)
} else (EmptyTree, -1)
handler match {
case Block(Nil, EmptyTree) =>
assert(handlerStart != -1)
syntaxError(
EmptyCatchBlock(body),
Position(handlerStart, endOffset(handler))
)
case _ =>
}
val finalizer =
if (in.token == FINALLY) { accept(FINALLY); expr() }
else {
if (handler.isEmpty) warning(
EmptyCatchAndFinallyBlock(body),
source atPos Position(tryOffset, endOffset(body))
)
EmptyTree
}
ParsedTry(body, handler, finalizer)
}
case THROW =>
atPos(in.skipToken()) { Throw(expr()) }
case RETURN =>
atPos(in.skipToken()) { Return(if (isExprIntro) expr() else EmptyTree, EmptyTree) }
case FOR =>
forExpr()
case _ =>
expr1Rest(postfixExpr(), location)
}
def expr1Rest(t: Tree, location: Location.Value) = in.token match {
case EQUALS =>
t match {
case Ident(_) | Select(_, _) | Apply(_, _) =>
atPos(startOffset(t), in.skipToken()) { Assign(t, expr()) }
case _ =>
t
}
case COLON =>
ascription(t, location)
case MATCH =>
atPos(startOffset(t), in.skipToken()) {
inBraces(Match(t, caseClauses()))
}
case _ =>
t
}
def ascription(t: Tree, location: Location.Value) = atPos(startOffset(t), in.skipToken()) {
in.token match {
case USCORE =>
val uscoreStart = in.skipToken()
if (isIdent(nme.raw.STAR)) {
in.nextToken()
if (in.token != RPAREN) syntaxError(SeqWildcardPatternPos(), uscoreStart)
Typed(t, atPos(uscoreStart) { Ident(tpnme.WILDCARD_STAR) })
} else {
syntaxErrorOrIncomplete(IncorrectRepeatedParameterSyntax())
t
}
case AT if location != Location.InPattern =>
(t /: annotations())(Annotated)
case _ =>
val tpt = typeDependingOn(location)
if (isWildcard(t) && location != Location.InPattern) {
val vd :: rest = placeholderParams
placeholderParams =
cpy.ValDef(vd)(tpt = tpt).withPos(vd.pos union tpt.pos) :: rest
}
Typed(t, tpt)
}
}
/** FunParams ::= Bindings
* | id
* | `_'
* Bindings ::= `(' [Binding {`,' Binding}] `)'
*/
def funParams(mods: Modifiers, location: Location.Value): List[Tree] =
if (in.token == LPAREN)
inParens(if (in.token == RPAREN) Nil else commaSeparated(() => binding(mods)))
else {
val start = in.offset
val name = bindingName()
val t =
if (in.token == COLON && location == Location.InBlock) {
if (ctx.settings.strict.value)
// Don't error in non-strict mode, as the alternative syntax "implicit (x: T) => ... "
// is not supported by Scala2.x
migrationWarningOrError(s"This syntax is no longer supported; parameter needs to be enclosed in (...)")
in.nextToken()
val t = infixType()
if (false && in.isScala2Mode) {
patch(source, Position(start), "(")
patch(source, Position(in.lastOffset), ")")
}
t
}
else TypeTree()
(atPos(start) { makeParameter(name, t, mods) }) :: Nil
}
/** Binding ::= (id | `_') [`:' Type]
*/
def binding(mods: Modifiers): Tree =
atPos(in.offset) { makeParameter(bindingName(), typedOpt(), mods) }
def bindingName(): TermName =
if (in.token == USCORE) {
in.nextToken()
WildcardParamName.fresh()
}
else ident()
/** Expr ::= implicit id `=>' Expr
* BlockResult ::= implicit id [`:' InfixType] `=>' Block // Scala2 only
*/
def implicitClosure(start: Int, location: Location.Value, implicitMods: Modifiers): Tree =
closureRest(start, location, funParams(implicitMods, location))
def closureRest(start: Int, location: Location.Value, params: List[Tree]): Tree =
atPos(start, in.offset) {
accept(ARROW)
Function(params, if (location == Location.InBlock) block() else expr())
}
/** PostfixExpr ::= InfixExpr [id [nl]]
* InfixExpr ::= PrefixExpr
* | InfixExpr id [nl] InfixExpr
*/
def postfixExpr(): Tree =
infixOps(prefixExpr(), canStartExpressionTokens, prefixExpr, maybePostfix = true)
/** PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
*/
val prefixExpr = () =>
if (isIdent && nme.raw.isUnary(in.name)) {
val start = in.offset
val op = termIdent()
if (op.name == nme.raw.MINUS && isNumericLit)
simpleExprRest(literal(start), canApply = true)
else
atPos(start) { PrefixOp(op, simpleExpr()) }
}
else simpleExpr()
/** SimpleExpr ::= new Template
* | BlockExpr
* | SimpleExpr1 [`_']
* SimpleExpr1 ::= literal
* | xmlLiteral
* | Path
* | `(' [ExprsInParens] `)'
* | SimpleExpr `.' id
* | SimpleExpr (TypeArgs | NamedTypeArgs)
* | SimpleExpr1 ArgumentExprs
*/
def simpleExpr(): Tree = {
var canApply = true
val t = in.token match {
case XMLSTART =>
xmlLiteral()
case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
path(thisOK = true)
case USCORE =>
val start = in.skipToken()
val pname = WildcardParamName.fresh()
val param = ValDef(pname, TypeTree(), EmptyTree).withFlags(SyntheticTermParam)
.withPos(Position(start))
placeholderParams = param :: placeholderParams
atPos(start) { Ident(pname) }
case LPAREN =>
atPos(in.offset) { makeTupleOrParens(inParens(exprsInParensOpt())) }
case LBRACE =>
canApply = false
blockExpr()
case NEW =>
canApply = false
val start = in.skipToken()
val (impl, missingBody) = template(emptyConstructor)
impl.parents match {
case parent :: Nil if missingBody =>
if (parent.isType) ensureApplied(wrapNew(parent)) else parent
case _ =>
New(impl.withPos(Position(start, in.lastOffset)))
}
case _ =>
if (isLiteral) literal()
else {
syntaxErrorOrIncomplete(IllegalStartSimpleExpr(tokenString(in.token)))
errorTermTree
}
}
simpleExprRest(t, canApply)
}
def simpleExprRest(t: Tree, canApply: Boolean = true): Tree = {
if (canApply) newLineOptWhenFollowedBy(LBRACE)
in.token match {
case DOT =>
in.nextToken()
simpleExprRest(selector(t), canApply = true)
case LBRACKET =>
val tapp = atPos(startOffset(t), in.offset) { TypeApply(t, typeArgs(namedOK = true, wildOK = false)) }
simpleExprRest(tapp, canApply = true)
case LPAREN | LBRACE if canApply =>
val app = atPos(startOffset(t), in.offset) { Apply(t, argumentExprs()) }
simpleExprRest(app, canApply = true)
case USCORE =>
atPos(startOffset(t), in.skipToken()) { PostfixOp(t, Ident(nme.WILDCARD)) }
case _ =>
t
}
}
/** ExprsInParens ::= ExprInParens {`,' ExprInParens}
*/
def exprsInParensOpt(): List[Tree] =
if (in.token == RPAREN) Nil else commaSeparated(exprInParens)
/** ParArgumentExprs ::= `(' [ExprsInParens] `)'
* | `(' [ExprsInParens `,'] PostfixExpr `:' `_' `*' ')'
*
* Special treatment for arguments of primary class constructor
* annotations. All empty argument lists `(` `)` following the first
* get represented as `List(ParamNotArg)` instead of `Nil`, indicating that
* the token sequence should be interpreted as an empty parameter clause
* instead. `ParamNotArg` can also be produced when parsing the first
* argument (see `classConstrAnnotExpr`).
*
* The method affects `lookaheadTokens` as a side effect.
* If the argument list parses as `List(ParamNotArg)`, `lookaheadTokens`
* contains the tokens that need to be replayed to parse the parameter clause.
* Otherwise, `lookaheadTokens` is empty.
*/
def parArgumentExprs(first: Boolean = false): List[Tree] = {
if (inClassConstrAnnots) {
assert(lookaheadTokens.isEmpty)
saveLookahead()
accept(LPAREN)
val args =
if (in.token == RPAREN)
if (first) Nil // first () counts as annotation argument
else ParamNotArg :: Nil
else {
openParens.change(LPAREN, +1)
try commaSeparated(argumentExpr)
finally openParens.change(LPAREN, -1)
}
if (args == ParamNotArg :: Nil)
in.adjustSepRegions(RPAREN) // simulate `)` without requiring it
else {
lookaheadTokens.clear()
accept(RPAREN)
}
args
}
else
inParens(if (in.token == RPAREN) Nil else commaSeparated(argumentExpr))
}
/** ArgumentExprs ::= ParArgumentExprs
* | [nl] BlockExpr
*/
def argumentExprs(): List[Tree] =
if (in.token == LBRACE) blockExpr() :: Nil else parArgumentExprs()
val argumentExpr = () => {
val arg =
if (inClassConstrAnnots && lookaheadTokens.nonEmpty) classConstrAnnotExpr()
else exprInParens()
arg match {
case arg @ Assign(Ident(id), rhs) => cpy.NamedArg(arg)(id, rhs)
case arg => arg
}
}
/** Handle first argument of an argument list to an annotation of
* a primary class constructor. If the current token either cannot
* start an expression or is an identifier and is followed by `:`,
* stop parsing the rest of the expression and return `EmptyTree`,
* indicating that we should re-parse the expression as a parameter clause.
* Otherwise parse as normal.
*/
def classConstrAnnotExpr() = {
if (in.token == IDENTIFIER) {
saveLookahead()
postfixExpr() match {
case Ident(_) if in.token == COLON => ParamNotArg
case t => expr1Rest(t, Location.InParens)
}
}
else if (isExprIntro) exprInParens()
else ParamNotArg
}
/** ArgumentExprss ::= {ArgumentExprs}
*/
def argumentExprss(fn: Tree): Tree = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LPAREN || in.token == LBRACE) argumentExprss(Apply(fn, argumentExprs()))
else fn
}
/** ParArgumentExprss ::= {ParArgumentExprs}
*
* Special treatment for arguments of primary class constructor
* annotations. If an argument list returns `List(ParamNotArg)`
* ignore it, and return prefix parsed before that list instead.
*/
def parArgumentExprss(fn: Tree): Tree =
if (in.token == LPAREN) {
val args = parArgumentExprs(first = !fn.isInstanceOf[Trees.Apply[_]])
if (inClassConstrAnnots && args == ParamNotArg :: Nil) fn
else parArgumentExprss(Apply(fn, args))
}
else fn
/** BlockExpr ::= `{' (CaseClauses | Block) `}'
*/
def blockExpr(): Tree = atPos(in.offset) {
inDefScopeBraces {
if (in.token == CASE) Match(EmptyTree, caseClauses())
else block()
}
}
/** Block ::= BlockStatSeq
* @note Return tree does not carry source position.
*/
def block(): Tree = {
val stats = blockStatSeq()
def isExpr(stat: Tree) = !(stat.isDef || stat.isInstanceOf[Import])
if (stats.nonEmpty && isExpr(stats.last)) Block(stats.init, stats.last)
else Block(stats, EmptyTree)
}
/** Guard ::= if PostfixExpr
*/
def guard(): Tree =
if (in.token == IF) { in.nextToken(); postfixExpr() }
else EmptyTree
/** Enumerators ::= Generator {semi Enumerator | Guard}
*/
def enumerators(): List[Tree] = generator() :: enumeratorsRest()
def enumeratorsRest(): List[Tree] =
if (isStatSep) { in.nextToken(); enumerator() :: enumeratorsRest() }
else if (in.token == IF) guard() :: enumeratorsRest()
else Nil
/** Enumerator ::= Generator
* | Guard
* | Pattern1 `=' Expr
*/
def enumerator(): Tree =
if (in.token == IF) guard()
else {
val pat = pattern1()
if (in.token == EQUALS) atPos(startOffset(pat), in.skipToken()) { GenAlias(pat, expr()) }
else generatorRest(pat)
}
/** Generator ::= Pattern `<-' Expr
*/
def generator(): Tree = generatorRest(pattern1())
def generatorRest(pat: Tree) =
atPos(startOffset(pat), accept(LARROW)) { GenFrom(pat, expr()) }
/** ForExpr ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
* {nl} [`yield'] Expr
* | `for' Enumerators (`do' Expr | `yield' Expr)
*/
def forExpr(): Tree = atPos(in.skipToken()) {
var wrappedEnums = true
val enums =
if (in.token == LBRACE) inBraces(enumerators())
else if (in.token == LPAREN) {
val lparenOffset = in.skipToken()
openParens.change(LPAREN, 1)
val pats = patternsOpt()
val pat =
if (in.token == RPAREN || pats.length > 1) {
wrappedEnums = false
accept(RPAREN)
openParens.change(LPAREN, -1)
atPos(lparenOffset) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer.
}
else pats.head
val res = generatorRest(pat) :: enumeratorsRest()
if (wrappedEnums) {
accept(RPAREN)
openParens.change(LPAREN, -1)
}
res
} else {
wrappedEnums = false
enumerators()
}
newLinesOpt()
if (in.token == YIELD) { in.nextToken(); ForYield(enums, expr()) }
else if (in.token == DO) { in.nextToken(); ForDo(enums, expr()) }
else {
if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension())
ForDo(enums, expr())
}
}
/** CaseClauses ::= CaseClause {CaseClause}
*/
def caseClauses(): List[CaseDef] = {
val buf = new ListBuffer[CaseDef]
buf += caseClause()
while (in.token == CASE) buf += caseClause()
buf.toList
}
/** CaseClause ::= case Pattern [Guard] `=>' Block
*/
def caseClause(): CaseDef = atPos(in.offset) {
accept(CASE)
CaseDef(pattern(), guard(), atPos(accept(ARROW)) { block() })
}
/* -------- PATTERNS ------------------------------------------- */
/** Pattern ::= Pattern1 { `|' Pattern1 }
*/
val pattern = () => {
val pat = pattern1()
if (isIdent(nme.raw.BAR))
atPos(startOffset(pat)) { Alternative(pat :: patternAlts()) }
else pat
}
def patternAlts(): List[Tree] =
if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1() :: patternAlts() }
else Nil
/** Pattern1 ::= PatVar Ascription
* | Pattern2
*/
def pattern1(): Tree = {
val p = pattern2()
if (isVarPattern(p) && in.token == COLON) ascription(p, Location.InPattern)
else p
}
/** Pattern2 ::= [varid `@'] InfixPattern
*/
val pattern2 = () => infixPattern() match {
case p @ Ident(name) if isVarPattern(p) && in.token == AT =>
val offset = in.skipToken()
// compatibility for Scala2 `x @ _*` syntax
infixPattern() match {
case pt @ Ident(tpnme.WILDCARD_STAR) =>
migrationWarningOrError("The syntax `x @ _*' is no longer supported; use `x : _*' instead", startOffset(p))
atPos(startOffset(p), offset) { Typed(p, pt) }
case p =>
atPos(startOffset(p), offset) { Bind(name, p) }
}
case p @ Ident(tpnme.WILDCARD_STAR) =>
// compatibility for Scala2 `_*` syntax
migrationWarningOrError("The syntax `_*' is no longer supported; use `x : _*' instead", startOffset(p))
atPos(startOffset(p)) { Typed(Ident(nme.WILDCARD), p) }
case p =>
p
}
/** InfixPattern ::= SimplePattern {id [nl] SimplePattern}
*/
def infixPattern(): Tree =
infixOps(simplePattern(), canStartExpressionTokens, simplePattern, notAnOperator = nme.raw.BAR)
/** SimplePattern ::= PatVar
* | Literal
* | XmlPattern
* | `(' [Patterns] `)'
* | SimplePattern1 [TypeArgs] [ArgumentPatterns]
* SimplePattern1 ::= Path
* | `{' Block `}'
* | SimplePattern1 `.' id
* PatVar ::= id
* | `_'
*/
val simplePattern = () => in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
path(thisOK = true) match {
case id @ Ident(nme.raw.MINUS) if isNumericLit => literal(startOffset(id))
case t => simplePatternRest(t)
}
case USCORE =>
val wildIndent = wildcardIdent()
// compatibility for Scala2 `x @ _*` and `_*` syntax
// `x: _*' is parsed in `ascription'
if (isIdent(nme.raw.STAR)) {
in.nextToken()
if (in.token != RPAREN) syntaxError(SeqWildcardPatternPos(), wildIndent.pos)
atPos(wildIndent.pos) { Ident(tpnme.WILDCARD_STAR) }
} else wildIndent
case LPAREN =>
atPos(in.offset) { makeTupleOrParens(inParens(patternsOpt())) }
case LBRACE =>
dotSelectors(blockExpr())
case XMLSTART =>
xmlLiteralPattern()
case _ =>
if (isLiteral) literal(inPattern = true)
else {
syntaxErrorOrIncomplete(IllegalStartOfSimplePattern())
errorTermTree
}
}
def simplePatternRest(t: Tree): Tree = {
var p = t
if (in.token == LBRACKET)
p = atPos(startOffset(t), in.offset) { TypeApply(p, typeArgs(namedOK = false, wildOK = false)) }
if (in.token == LPAREN)
p = atPos(startOffset(t), in.offset) { Apply(p, argumentPatterns()) }
p
}
/** Patterns ::= Pattern [`,' Pattern]
*/
def patterns() = commaSeparated(pattern)
def patternsOpt(): List[Tree] =
if (in.token == RPAREN) Nil else patterns()
/** ArgumentPatterns ::= `(' [Patterns] `)'
* | `(' [Patterns `,'] Pattern2 `:' `_' `*' ')
*/
def argumentPatterns(): List[Tree] =
inParens(patternsOpt())
/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
private def modOfToken(tok: Int): Mod = tok match {
case ABSTRACT => Mod.Abstract()
case FINAL => Mod.Final()
case IMPLICIT => Mod.Implicit()
case INLINE => Mod.Inline()
case LAZY => Mod.Lazy()
case OVERRIDE => Mod.Override()
case PRIVATE => Mod.Private()
case PROTECTED => Mod.Protected()
case SEALED => Mod.Sealed()
}
/** Drop `private' modifier when followed by a qualifier.
* Contract `abstract' and `override' to ABSOVERRIDE
*/
private def normalize(mods: Modifiers): Modifiers =
if ((mods is Private) && mods.hasPrivateWithin)
normalize(mods &~ Private)
else if (mods is AbstractAndOverride)
normalize(addFlag(mods &~ (Abstract | Override), AbsOverride))
else
mods
private def addModifier(mods: Modifiers): Modifiers = {
val tok = in.token
val mod = atPos(in.skipToken()) { modOfToken(tok) }
if (mods is mod.flags) syntaxError(RepeatedModifier(mod.flags.toString))
addMod(mods, mod)
}
private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = (
flags1.isEmpty
|| flags2.isEmpty
|| flags1.isTermFlags && flags2.isTermFlags
|| flags1.isTypeFlags && flags2.isTypeFlags
)
def addFlag(mods: Modifiers, flag: FlagSet): Modifiers = {
def getPrintableTypeFromFlagSet =
Map(Trait -> "trait", Method -> "method", Mutable -> "variable").get(flag)
if (compatible(mods.flags, flag)) mods | flag
else {
syntaxError(ModifiersNotAllowed(mods.flags, getPrintableTypeFromFlagSet))
Modifiers(flag)
}
}
/** Always add the syntactic `mod`, but check and conditionally add semantic `mod.flags`
*/
def addMod(mods: Modifiers, mod: Mod): Modifiers =
addFlag(mods, mod.flags).withAddedMod(mod)
/** AccessQualifier ::= "[" (id | this) "]"
*/
def accessQualifierOpt(mods: Modifiers): Modifiers =
if (in.token == LBRACKET) {
if ((mods is Local) || mods.hasPrivateWithin)
syntaxError("duplicate private/protected qualifier")
inBrackets {
if (in.token == THIS) { in.nextToken(); mods | Local }
else mods.withPrivateWithin(ident().toTypeName)
}
} else mods
/** {Annotation} {Modifier}
* Modifiers ::= {Modifier}
* LocalModifiers ::= {LocalModifier}
* AccessModifier ::= (private | protected) [AccessQualifier]
* Modifier ::= LocalModifier
* | AccessModifier
* | override
* LocalModifier ::= abstract | final | sealed | implicit | lazy
*/
def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = {
@tailrec
def loop(mods: Modifiers): Modifiers = {
if (allowed contains in.token) {
val isAccessMod = accessModifierTokens contains in.token
val mods1 = addModifier(mods)
loop(if (isAccessMod) accessQualifierOpt(mods1) else mods1)
} else if (in.token == NEWLINE && (mods.hasFlags || mods.hasAnnotations)) {
in.nextToken()
loop(mods)
} else {
mods
}
}
normalize(loop(start))
}
def implicitMods(): Modifiers =
addMod(EmptyModifiers, atPos(accept(IMPLICIT)) { Mod.Implicit() })
/** Wrap annotation or constructor in New(...).<init> */
def wrapNew(tpt: Tree) = Select(New(tpt), nme.CONSTRUCTOR)
/** Adjust start of annotation or constructor to position of preceding @ or new */
def adjustStart(start: Offset)(tree: Tree): Tree = {
val tree1 = tree match {
case Apply(fn, args) => cpy.Apply(tree)(adjustStart(start)(fn), args)
case Select(qual, name) => cpy.Select(tree)(adjustStart(start)(qual), name)
case _ => tree
}
if (tree1.pos.exists && start < tree1.pos.start)
tree1.withPos(tree1.pos.withStart(start))
else tree1
}
/** Annotation ::= `@' SimpleType {ParArgumentExprs}
*/
def annot() =
adjustStart(accept(AT)) {
if (in.token == INLINE) in.token = BACKQUOTED_IDENT // allow for now
ensureApplied(parArgumentExprss(wrapNew(simpleType())))
}
def annotations(skipNewLines: Boolean = false): List[Tree] = {
if (skipNewLines) newLineOptWhenFollowedBy(AT)
if (in.token == AT) annot() :: annotations(skipNewLines)
else Nil
}
def annotsAsMods(skipNewLines: Boolean = false): Modifiers =
Modifiers() withAnnotations annotations(skipNewLines)
def defAnnotsMods(allowed: BitSet): Modifiers =
modifiers(allowed, annotsAsMods(skipNewLines = true))
/* -------- PARAMETERS ------------------------------------------- */
/** ClsTypeParamClause::= `[' ClsTypeParam {`,' ClsTypeParam} `]'
* ClsTypeParam ::= {Annotation} [`+' | `-']
* id [HkTypeParamClause] TypeParamBounds
*
* DefTypeParamClause::= `[' DefTypeParam {`,' DefTypeParam} `]'
* DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds
*
* TypTypeParamCaluse::= `[' TypTypeParam {`,' TypTypeParam} `]'
* TypTypeParam ::= {Annotation} id [HkTypePamClause] TypeBounds
*
* HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]'
* HkTypeParam ::= {Annotation} ['+' | `-'] (id [HkTypePamClause] | _') TypeBounds
*/
def typeParamClause(ownerKind: ParamOwner.Value): List[TypeDef] = inBrackets {
def typeParam(): TypeDef = {
val isConcreteOwner = ownerKind == ParamOwner.Class || ownerKind == ParamOwner.Def
val start = in.offset
val mods = atPos(start) {
annotsAsMods() | {
if (ownerKind == ParamOwner.Class) Param | PrivateLocal
else Param
} | {
if (ownerKind != ParamOwner.Def)
if (isIdent(nme.raw.PLUS)) { in.nextToken(); Covariant }
else if (isIdent(nme.raw.MINUS)) { in.nextToken(); Contravariant }
else EmptyFlags
else EmptyFlags
}
}
atPos(start, nameStart) {
val name =
if (isConcreteOwner || in.token != USCORE) ident().toTypeName
else {
in.nextToken()
WildcardParamName.fresh().toTypeName
}
val hkparams = typeParamClauseOpt(ParamOwner.TypeParam)
val bounds =
if (isConcreteOwner) typeParamBounds(name)
else typeBounds()
TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods)
}
}
commaSeparated(typeParam)
}
def typeParamClauseOpt(ownerKind: ParamOwner.Value): List[TypeDef] =
if (in.token == LBRACKET) typeParamClause(ownerKind) else Nil
/** ClsParamClauses ::= {ClsParamClause} [[nl] `(' `implicit' ClsParams `)']
* ClsParamClause ::= [nl] `(' [ClsParams] ')'
* ClsParams ::= ClsParam {`' ClsParam}
* ClsParam ::= {Annotation} [{Modifier} (`val' | `var') | `inline'] Param
* DefParamClauses ::= {DefParamClause} [[nl] `(' `implicit' DefParams `)']
* DefParamClause ::= [nl] `(' [DefParams] ')'
* DefParams ::= DefParam {`,' DefParam}
* DefParam ::= {Annotation} [`inline'] Param
* Param ::= id `:' ParamType [`=' Expr]
*/
def paramClauses(owner: Name, ofCaseClass: Boolean = false): List[List[ValDef]] = {
var imods: Modifiers = EmptyModifiers
var implicitOffset = -1 // use once
var firstClauseOfCaseClass = ofCaseClass
def param(): ValDef = {
val start = in.offset
var mods = annotsAsMods()
if (owner.isTypeName) {
mods = modifiers(start = mods) | ParamAccessor
mods =
atPos(start, in.offset) {
if (in.token == VAL) {
val mod = atPos(in.skipToken()) { Mod.Val() }
mods.withAddedMod(mod)
} else if (in.token == VAR) {
val mod = atPos(in.skipToken()) { Mod.Var() }
addMod(mods, mod)
} else {
if (!(mods.flags &~ (ParamAccessor | Inline)).isEmpty)
syntaxError("`val' or `var' expected")
if (firstClauseOfCaseClass) mods else mods | PrivateLocal
}
}
}
else {
if (in.token == INLINE) mods = addModifier(mods)
mods = atPos(start) { mods | Param }
}
atPos(start, nameStart) {
val name = ident()
val tpt =
if (ctx.settings.YmethodInfer.value && owner.isTermName && in.token != COLON) {
TypeTree() // XX-METHOD-INFER
} else {
accept(COLON)
if (in.token == ARROW && owner.isTypeName && !(mods is Local))
syntaxError(VarValParametersMayNotBeCallByName(name, mods is Mutable))
paramType()
}
val default =
if (in.token == EQUALS) { in.nextToken(); expr() }
else EmptyTree
if (implicitOffset >= 0) {
mods = mods.withPos(mods.pos.union(Position(implicitOffset, implicitOffset)))
implicitOffset = -1
}
for (imod <- imods.mods) mods = addMod(mods, imod)
ValDef(name, tpt, default).withMods(mods)
}
}
def paramClause(): List[ValDef] = inParens {
if (in.token == RPAREN) Nil
else {
if (in.token == IMPLICIT) {
implicitOffset = in.offset
imods = implicitMods()
}
commaSeparated(param)
}
}
def clauses(): List[List[ValDef]] = {
newLineOptWhenFollowedBy(LPAREN)
if (in.token == LPAREN)
paramClause() :: {
firstClauseOfCaseClass = false
if (imods.hasFlags) Nil else clauses()
}
else Nil
}
val start = in.offset
val result = clauses()
if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods is Implicit)))) {
in.token match {
case LBRACKET => syntaxError("no type parameters allowed here")
case EOF => incompleteInputError(AuxConstructorNeedsNonImplicitParameter())
case _ => syntaxError(AuxConstructorNeedsNonImplicitParameter(), start)
}
}
val listOfErrors = checkVarArgsRules(result)
listOfErrors.foreach { vparam =>
syntaxError(VarArgsParamMustComeLast(), vparam.tpt.pos)
}
result
}
/* -------- DEFS ------------------------------------------- */
/** Import ::= import ImportExpr {`,' ImportExpr}
*/
def importClause(): List[Tree] = {
val offset = accept(IMPORT)
commaSeparated(importExpr) match {
case t :: rest =>
// The first import should start at the position of the keyword.
t.withPos(t.pos.withStart(offset)) :: rest
case nil => nil
}
}
/** ImportExpr ::= StableId `.' (id | `_' | ImportSelectors)
*/
val importExpr = () => path(thisOK = false, handleImport) match {
case imp: Import =>
imp
case sel @ Select(qual, name) =>
val selector = atPos(pointOffset(sel)) { Ident(name) }
cpy.Import(sel)(qual, selector :: Nil)
case t =>
accept(DOT)
Import(t, Ident(nme.WILDCARD) :: Nil)
}
val handleImport = { tree: Tree =>
if (in.token == USCORE) Import(tree, importSelector() :: Nil)
else if (in.token == LBRACE) Import(tree, inBraces(importSelectors()))
else tree
}
/** ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}'
*/
def importSelectors(): List[Tree] =
if (in.token == RBRACE) Nil
else {
val sel = importSelector()
sel :: {
if (!isWildcardArg(sel) && in.token == COMMA) {
in.nextToken()
importSelectors()
}
else Nil
}
}
/** ImportSelector ::= id [`=>' id | `=>' `_']
*/
def importSelector(): Tree = {
val from = termIdentOrWildcard()
if (from.name != nme.WILDCARD && in.token == ARROW)
atPos(startOffset(from), in.skipToken()) {
Thicket(from, termIdentOrWildcard())
}
else from
}
def posMods(start: Int, mods: Modifiers) = {
val mods1 = atPos(start)(mods)
in.nextToken()
mods1
}
/** Def ::= val PatDef
* | var VarDef
* | def DefDef
* | type {nl} TypeDcl
* | TmplDef
* Dcl ::= val ValDcl
* | var ValDcl
* | def DefDcl
* | type {nl} TypeDcl
* EnumCase ::= `case' (EnumClassDef | ObjectDef)
*/
def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match {
case VAL =>
val mod = atPos(in.skipToken()) { Mod.Val() }
val mods1 = mods.withAddedMod(mod)
patDefOrDcl(start, mods1)
case VAR =>
val mod = atPos(in.skipToken()) { Mod.Var() }
val mod1 = addMod(mods, mod)
patDefOrDcl(start, mod1)
case DEF =>
defDefOrDcl(start, posMods(start, mods))
case TYPE =>
typeDefOrDcl(start, posMods(start, mods))
case CASE =>
enumCase(start, mods)
case _ =>
tmplDef(start, mods)
}
/** PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
* VarDef ::= PatDef | id {`,' id} `:' Type `=' `_'
* ValDcl ::= id {`,' id} `:' Type
* VarDcl ::= id {`,' id} `:' Type
*/
def patDefOrDcl(start: Offset, mods: Modifiers): Tree = atPos(start, nameStart) {
val lhs = commaSeparated(pattern2)
val tpt = typedOpt()
val rhs =
if (tpt.isEmpty || in.token == EQUALS) {
accept(EQUALS)
if (in.token == USCORE && !tpt.isEmpty && (mods is Mutable) &&
(lhs.toList forall (_.isInstanceOf[Ident]))) {
wildcardIdent()
} else {
expr()
}
} else EmptyTree
lhs match {
case (id @ Ident(name: TermName)) :: Nil => {
ValDef(name, tpt, rhs).withMods(mods).setComment(in.getDocComment(start))
} case _ =>
PatDef(mods, lhs, tpt, rhs)
}
}
private def checkVarArgsRules(vparamss: List[List[untpd.ValDef]]): List[untpd.ValDef] = {
def isVarArgs(tpt: Trees.Tree[Untyped]): Boolean = tpt match {
case PostfixOp(_, op) if op.name == tpnme.raw.STAR => true
case _ => false
}
vparamss.flatMap { params =>
if (params.nonEmpty) {
params.init.filter(valDef => isVarArgs(valDef.tpt))
} else List()
}
}
/** DefDef ::= DefSig (`:' Type [`=' Expr] | "=" Expr)
* | this ParamClause ParamClauses `=' ConstrExpr
* DefDcl ::= DefSig `:' Type
* DefSig ::= id [DefTypeParamClause] ParamClauses
*/
def defDefOrDcl(start: Offset, mods: Modifiers): Tree = atPos(start, nameStart) {
def scala2ProcedureSyntax(resultTypeStr: String) = {
val toInsert =
if (in.token == LBRACE) s"$resultTypeStr ="
else ": Unit " // trailing space ensures that `def f()def g()` works.
in.testScala2Mode(s"Procedure syntax no longer supported; `$toInsert' should be inserted here") && {
patch(source, Position(in.lastOffset), toInsert)
true
}
}
if (in.token == THIS) {
in.nextToken()
val vparamss = paramClauses(nme.CONSTRUCTOR)
if (in.isScala2Mode) newLineOptWhenFollowedBy(LBRACE)
val rhs = {
if (!(in.token == LBRACE && scala2ProcedureSyntax(""))) accept(EQUALS)
atPos(in.offset) { constrExpr() }
}
makeConstructor(Nil, vparamss, rhs).withMods(mods)
} else {
val mods1 = addFlag(mods, Method)
val name = ident()
val tparams = typeParamClauseOpt(ParamOwner.Def)
val vparamss = paramClauses(name)
var tpt = fromWithinReturnType(typedOpt())
if (in.isScala2Mode) newLineOptWhenFollowedBy(LBRACE)
val rhs =
if (in.token == EQUALS) {
in.nextToken()
expr()
}
else if (!tpt.isEmpty)
EmptyTree
else if (scala2ProcedureSyntax(": Unit")) {
tpt = scalaUnit
if (in.token == LBRACE) expr()
else EmptyTree
}
else {
if (!isExprIntro) syntaxError(MissingReturnType(), in.lastOffset)
accept(EQUALS)
expr()
}
DefDef(name, tparams, vparamss, tpt, rhs).withMods(mods1).setComment(in.getDocComment(start))
}
}
/** ConstrExpr ::= SelfInvocation
* | ConstrBlock
*/
def constrExpr(): Tree =
if (in.token == LBRACE) constrBlock()
else Block(selfInvocation() :: Nil, Literal(Constant(())))
/** SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
*/
def selfInvocation(): Tree =
atPos(accept(THIS)) {
newLineOptWhenFollowedBy(LBRACE)
argumentExprss(Apply(Ident(nme.CONSTRUCTOR), argumentExprs()))
}
/** ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}'
*/
def constrBlock(): Tree =
atPos(in.skipToken()) {
val stats = selfInvocation() :: {
if (isStatSep) { in.nextToken(); blockStatSeq() }
else Nil
}
accept(RBRACE)
Block(stats, Literal(Constant(())))
}
/** TypeDef ::= type id [TypeParamClause] `=' Type
* TypeDcl ::= type id [TypeParamClause] TypeBounds
*/
def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = {
newLinesOpt()
atPos(start, nameStart) {
val name = ident().toTypeName
val tparams = typeParamClauseOpt(ParamOwner.Type)
in.token match {
case EQUALS =>
in.nextToken()
TypeDef(name, lambdaAbstract(tparams, typ())).withMods(mods).setComment(in.getDocComment(start))
case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF =>
TypeDef(name, lambdaAbstract(tparams, typeBounds())).withMods(mods).setComment(in.getDocComment(start))
case _ =>
syntaxErrorOrIncomplete("`=', `>:', or `<:' expected")
EmptyTree
}
}
}
/** TmplDef ::= ([`case' | `enum]'] ‘class’ | trait’) ClassDef
* | [`case'] `object' ObjectDef
* | `enum' EnumDef
*/
def tmplDef(start: Int, mods: Modifiers): Tree = {
in.token match {
case TRAIT =>
classDef(start, posMods(start, addFlag(mods, Trait)))
case CLASS =>
classDef(start, posMods(start, mods))
case CASECLASS =>
classDef(start, posMods(start, mods | Case))
case OBJECT =>
objectDef(start, posMods(start, mods | Module))
case CASEOBJECT =>
objectDef(start, posMods(start, mods | Case | Module))
case ENUM =>
val enumMod = atPos(in.skipToken()) { Mod.Enum() }
if (in.token == CLASS) tmplDef(start, addMod(mods, enumMod))
else enumDef(start, mods, enumMod)
case _ =>
syntaxErrorOrIncomplete("expected start of definition")
EmptyTree
}
}
/** ClassDef ::= id ClassConstr TemplateOpt
*/
def classDef(start: Offset, mods: Modifiers): TypeDef = atPos(start, nameStart) {
classDefRest(start, mods, ident().toTypeName)
}
def classDefRest(start: Offset, mods: Modifiers, name: TypeName): TypeDef = {
val constr = classConstr(name, isCaseClass = mods is Case)
val templ = templateOpt(constr)
TypeDef(name, templ).withMods(mods).setComment(in.getDocComment(start))
}
/** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses
*/
def classConstr(owner: Name, isCaseClass: Boolean = false): DefDef = atPos(in.lastOffset) {
val tparams = typeParamClauseOpt(ParamOwner.Class)
val cmods = fromWithinClassConstr(constrModsOpt(owner))
val vparamss = paramClauses(owner, isCaseClass)
makeConstructor(tparams, vparamss).withMods(cmods)
}
/** ConstrMods ::= {Annotation} [AccessModifier]
*/
def constrModsOpt(owner: Name): Modifiers =
modifiers(accessModifierTokens, annotsAsMods())
/** ObjectDef ::= id TemplateOpt
*/
def objectDef(start: Offset, mods: Modifiers): ModuleDef = atPos(start, nameStart) {
objectDefRest(start, mods, ident())
}
def objectDefRest(start: Offset, mods: Modifiers, name: TermName): ModuleDef = {
val template = templateOpt(emptyConstructor)
ModuleDef(name, template).withMods(mods).setComment(in.getDocComment(start))
}
/** id ClassConstr [`extends' [ConstrApps]]
* [nl] ‘{’ EnumCaseStats ‘}’
*/
def enumDef(start: Offset, mods: Modifiers, enumMod: Mod): Thicket = {
val point = nameStart
val modName = ident()
val clsName = modName.toTypeName
val constr = classConstr(clsName)
val parents =
if (in.token == EXTENDS) {
in.nextToken();
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) Nil else tokenSeparated(WITH, constrApp)
}
else Nil
val clsDef = atPos(start, point) {
TypeDef(clsName, Template(constr, parents, EmptyValDef, Nil))
.withMods(addMod(mods, enumMod)).setComment(in.getDocComment(start))
}
newLineOptWhenFollowedBy(LBRACE)
val modDef = atPos(in.offset) {
val body = inBraces(enumCaseStats())
ModuleDef(modName, Template(emptyConstructor, Nil, EmptyValDef, body))
.withMods(mods)
}
Thicket(clsDef :: modDef :: Nil)
}
/** EnumCaseStats = EnumCaseStat {semi EnumCaseStat} */
def enumCaseStats(): List[DefTree] = {
val cases = new ListBuffer[DefTree] += enumCaseStat()
var exitOnError = false
while (!isStatSeqEnd && !exitOnError) {
acceptStatSep()
if (isCaseIntro)
cases += enumCaseStat()
else if (!isStatSep) {
exitOnError = mustStartStat
syntaxErrorOrIncomplete("illegal start of case")
}
}
cases.toList
}
/** EnumCaseStat = {Annotation [nl]} {Modifier} EnumCase */
def enumCaseStat(): DefTree =
enumCase(in.offset, defAnnotsMods(modifierTokens))
/** EnumCase = `case' (EnumClassDef | ObjectDef) */
def enumCase(start: Offset, mods: Modifiers): DefTree = {
val mods1 = mods.withAddedMod(atPos(in.offset)(Mod.EnumCase())) | Case
accept(CASE)
atPos(start, nameStart) {
val id = termIdent()
if (in.token == LBRACKET || in.token == LPAREN)
classDefRest(start, mods1, id.name.toTypeName)
else if (in.token == COMMA) {
in.nextToken()
val ids = commaSeparated(termIdent)
PatDef(mods1, id :: ids, TypeTree(), EmptyTree)
}
else
objectDefRest(start, mods1, id.name.asTermName)
}
}
/* -------- TEMPLATES ------------------------------------------- */
/** ConstrApp ::= SimpleType {ParArgumentExprs}
*/
val constrApp = () => {
val t = annotType()
if (in.token == LPAREN) parArgumentExprss(wrapNew(t))
else t
}
/** Template ::= ConstrApps [TemplateBody] | TemplateBody
* ConstrApps ::= ConstrApp {`with' ConstrApp}
*
* @return a pair consisting of the template, and a boolean which indicates
* whether the template misses a body (i.e. no {...} part).
*/
def template(constr: DefDef): (Template, Boolean) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) (templateBodyOpt(constr, Nil), false)
else {
val parents = tokenSeparated(WITH, constrApp)
newLineOptWhenFollowedBy(LBRACE)
val missingBody = in.token != LBRACE
(templateBodyOpt(constr, parents), missingBody)
}
}
/** TemplateOpt = [`extends' Template | TemplateBody]
*/
def templateOpt(constr: DefDef): Template =
if (in.token == EXTENDS) { in.nextToken(); template(constr)._1 }
else {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) template(constr)._1
else Template(constr, Nil, EmptyValDef, Nil)
}
/** TemplateBody ::= [nl] `{' TemplateStatSeq `}'
*/
def templateBodyOpt(constr: DefDef, parents: List[Tree]) = {
val (self, stats) =
if (in.token == LBRACE) templateBody() else (EmptyValDef, Nil)
Template(constr, parents, self, stats)
}
def templateBody(): (ValDef, List[Tree]) = {
val r = inDefScopeBraces { templateStatSeq() }
if (in.token == WITH) {
syntaxError(EarlyDefinitionsNotSupported())
in.nextToken()
template(emptyConstructor)
}
r
}
/* -------- STATSEQS ------------------------------------------- */
/** Create a tree representing a packaging */
def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pointOffset(pkg))(PackageDef(x, stats))
}
/** Packaging ::= package QualId [nl] `{' TopStatSeq `}'
*/
def packaging(start: Int): Tree = {
val pkg = qualId()
newLineOptWhenFollowedBy(LBRACE)
val stats = inDefScopeBraces(topStatSeq())
makePackaging(start, pkg, stats)
}
/** TopStatSeq ::= TopStat {semi TopStat}
* TopStat ::= Annotations Modifiers TmplDef
* | Packaging
* | package object objectDef
* | Import
* |
*/
def topStatSeq(): List[Tree] = {
val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
setLastStatOffset()
if (in.token == PACKAGE) {
val start = in.skipToken()
if (in.token == OBJECT)
stats += objectDef(start, atPos(start, in.skipToken()) { Modifiers(Package) })
else stats += packaging(start)
}
else if (in.token == IMPORT)
stats ++= importClause()
else if (in.token == AT || isTemplateIntro || isModifier)
stats +++= tmplDef(in.offset, defAnnotsMods(modifierTokens))
else if (!isStatSep) {
if (in.token == CASE)
syntaxErrorOrIncomplete(OnlyCaseClassOrCaseObjectAllowed())
else
syntaxErrorOrIncomplete(ExpectedClassOrObjectDef())
if (mustStartStat) // do parse all definitions even if they are probably local (i.e. a "}" has been forgotten)
defOrDcl(in.offset, defAnnotsMods(modifierTokens))
}
acceptStatSepUnlessAtEnd()
}
stats.toList
}
/** TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
* TemplateStat ::= Import
* | Annotations Modifiers Def
* | Annotations Modifiers Dcl
* | EnumCaseStat
* | Expr1
* |
*/
def templateStatSeq(): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
var self: ValDef = EmptyValDef
val stats = new ListBuffer[Tree]
if (isExprIntro) {
val first = expr1()
if (in.token == ARROW) {
first match {
case Typed(tree @ This(EmptyTypeIdent), tpt) =>
self = makeSelfDef(nme.WILDCARD, tpt).withPos(first.pos)
case _ =>
val ValDef(name, tpt, _) = convertToParam(first, expected = "self type clause")
if (name != nme.ERROR)
self = makeSelfDef(name, tpt).withPos(first.pos)
}
in.nextToken()
} else {
stats += first
acceptStatSepUnlessAtEnd()
}
}
var exitOnError = false
while (!isStatSeqEnd && !exitOnError) {
setLastStatOffset()
if (in.token == IMPORT)
stats ++= importClause()
else if (isExprIntro)
stats += expr1()
else if (isDefIntro(modifierTokensOrCase))
stats +++= defOrDcl(in.offset, defAnnotsMods(modifierTokens))
else if (!isStatSep) {
exitOnError = mustStartStat
syntaxErrorOrIncomplete("illegal start of definition")
}
acceptStatSepUnlessAtEnd()
}
(self, if (stats.isEmpty) List(EmptyTree) else stats.toList)
}
/** RefineStatSeq ::= RefineStat {semi RefineStat}
* RefineStat ::= Dcl
* |
* (in reality we admit Defs and filter them out afterwards)
*/
def refineStatSeq(): List[Tree] = {
val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
if (isDclIntro) {
stats += defOrDcl(in.offset, Modifiers())
} else if (!isStatSep) {
syntaxErrorOrIncomplete(
"illegal start of declaration" +
(if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
else ""))
}
acceptStatSepUnlessAtEnd()
}
stats.toList
}
def localDef(start: Int, implicitMods: Modifiers = EmptyModifiers): Tree = {
var mods = defAnnotsMods(localModifierTokens)
for (imod <- implicitMods.mods) mods = addMod(mods, imod)
defOrDcl(start, mods)
}
/** BlockStatSeq ::= { BlockStat semi } [ResultExpr]
* BlockStat ::= Import
* | Annotations [implicit] [lazy] Def
* | Annotations LocalModifiers TmplDef
* | Expr1
* |
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
var exitOnError = false
while (!isStatSeqEnd && in.token != CASE && !exitOnError) {
setLastStatOffset()
if (in.token == IMPORT)
stats ++= importClause()
else if (isExprIntro)
stats += expr(Location.InBlock)
else if (isDefIntro(localModifierTokens))
if (in.token == IMPLICIT) {
val start = in.offset
val imods = implicitMods()
if (isBindingIntro) stats += implicitClosure(start, Location.InBlock, imods)
else stats +++= localDef(start, imods)
} else {
stats +++= localDef(in.offset)
}
else if (!isStatSep && (in.token != CASE)) {
exitOnError = mustStartStat
val addendum = if (isModifier) " (no modifiers allowed here)" else ""
syntaxErrorOrIncomplete("illegal start of statement" + addendum)
}
acceptStatSepUnlessAtEnd(CASE)
}
stats.toList
}
/** CompilationUnit ::= {package QualId semi} TopStatSeq
*/
def compilationUnit(): Tree = checkNoEscapingPlaceholders {
def topstats(): List[Tree] = {
val ts = new ListBuffer[Tree]
while (in.token == SEMI) in.nextToken()
val start = in.offset
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
ts += objectDef(start, atPos(start, in.skipToken()) { Modifiers(Package) })
if (in.token != EOF) {
acceptStatSep()
ts ++= topStatSeq()
}
} else {
val pkg = qualId()
newLineOptWhenFollowedBy(LBRACE)
if (in.token == EOF)
ts += makePackaging(start, pkg, List())
else if (in.token == LBRACE) {
ts += inDefScopeBraces(makePackaging(start, pkg, topStatSeq()))
acceptStatSepUnlessAtEnd()
ts ++= topStatSeq()
}
else {
acceptStatSep()
ts += makePackaging(start, pkg, topstats())
}
}
}
else
ts ++= topStatSeq()
ts.toList
}
topstats() match {
case List(stat @ PackageDef(_, _)) => stat
case Nil => EmptyTree // without this case we'd get package defs without positions
case stats => PackageDef(Ident(nme.EMPTY_PACKAGE), stats)
}
}
}
class OutlineParser(source: SourceFile)(implicit ctx: Context) extends Parser(source) {
def skipBraces[T](body: T): T = {
accept(LBRACE)
var openBraces = 1
while (in.token != EOF && openBraces > 0) {
if (in.token == XMLSTART) xmlLiteral()
else {
if (in.token == LBRACE) openBraces += 1
else if (in.token == RBRACE) openBraces -= 1
in.nextToken()
}
}
body
}
override def blockExpr(): Tree = skipBraces(EmptyTree)
override def templateBody() = skipBraces((EmptyValDef, List(EmptyTree)))
}
}
|
ihji/dotty | tests/run/phantom-lazy-val.scala | <gh_stars>0
/* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomTermErasure,phantomTypeErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
println(1)
foo
println(2)
foo
}
lazy val foo = {
println("foo")
any
}
}
object Boo extends Phantom {
type BooAny = this.Any
def any: BooAny = assume
}
|
ihji/dotty | tests/run/phantom-val-2.scala |
object Test {
def main(args: Array[String]): Unit = {
val f = new Foo
println(1)
f.foo
f.foo
assert(!f.getClass.getDeclaredFields.exists(_.getName.startsWith("foo")), "field foo not erased")
}
}
class Foo {
import Boo._
val foo = {
println("foo")
any
}
}
object Boo extends Phantom {
type BooAny = this.Any
def any: BooAny = assume
}
|
ihji/dotty | tests/neg/i2771.scala | <reponame>ihji/dotty
trait A { type L[X] }
trait B { type L }
trait C { type M <: A }
trait D { type M >: B }
object Test {
def test(x: C with D): Unit = {
def f(y: x.M)(z: y.L[y.L]) = z // error: y.L has wrong kind
f(new B { type L[F[_]] = F[F] })(1) // error: F has wrong kind
}
type LB[F[_]]
type LL[F[_]] <: LB[F] // ok
def foo[X[_] <: Any]() = ()
foo[Int]() // an error would be raised later, during PostTyper.
def bar[X, Y]() = ()
bar[List, Int]() // error: List has wrong kind
bar[Y = List, X = Int]() // error: List has wrong kind
}
|
ihji/dotty | tests/neg/phantom-expr.scala |
class Foo {
import Boo._
import Boo1._
type Blinky <: BooAny
type Inky <: BooAny
val blinky = Boo.boo[Blinky]
val inky = Boo.boo[Inky]
val b = true
def fooIf1 =
if (b) blinky
else "" // error
def fooIf2 =
if (b) ""
else blinky // error
def fooIf3 =
if (b) boo1
else blinky // error
def fooMatch1 = blinky match { // error
case _: Blinky => ()
}
def fooMatch2 = 1 match { case 1 => 2
case _ => blinky // error
}
def fooMatch3 = 1 match {
case 1 => boo1
case _ => blinky // error
}
def fooTry =
try 1
catch { case ex: Exception => blinky // error
}
}
object Boo extends Phantom {
type BooAny = this.Any
def boo[B <: BooAny]: B = assume
}
object Boo1 extends Phantom {
type Boo1Any = this.Any
def boo1: Boo1Any = assume
}
|
ihji/dotty | sbt-dotty/sbt-test/sbt-dotty/example-project/build.sbt | <reponame>ihji/dotty
scalaVersion := sys.props("plugin.scalaVersion")
libraryDependencies += ("org.scala-lang.modules" %% "scala-xml" % "1.0.1").withDottyCompat()
|
ihji/dotty | compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala | <filename>compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
package dotty.tools
package dotc
package typer
import core._
import ast._
import Scopes._, Contexts._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._
import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._, TypeErasure._
import TypeApplications.AppliedType
import util.Positions._
import config.Printers.typr
import ast.Trees._
import NameOps._
import collection.mutable
import reporting.diagnostic.Message
import reporting.diagnostic.messages._
import Checking.{preCheckKind, preCheckKinds, checkNoPrivateLeaks}
trait TypeAssigner {
import tpd._
/** The qualifying class of a this or super with prefix `qual` (which might be empty).
* @param packageOk The qualifier may refer to a package.
*/
def qualifyingClass(tree: untpd.Tree, qual: Name, packageOK: Boolean)(implicit ctx: Context): Symbol = {
def qualifies(sym: Symbol) =
sym.isClass && (
qual.isEmpty ||
sym.name == qual ||
sym.is(Module) && sym.name.stripModuleClassSuffix == qual)
ctx.outersIterator.map(_.owner).find(qualifies) match {
case Some(c) if packageOK || !(c is Package) =>
c
case _ =>
ctx.error(
if (qual.isEmpty) tree.show + " can be used only in a class, object, or template"
else qual.show + " is not an enclosing class", tree.pos)
NoSymbol
}
}
/** An upper approximation of the given type `tp` that does not refer to any symbol in `symsToAvoid`.
* Approximation steps are:
*
* - follow aliases and upper bounds if the original refers to a forbidden symbol
* - widen termrefs that refer to a forbidden symbol
* - replace ClassInfos of forbidden classes by the intersection of their parents, refined by all
* non-private fields, methods, and type members.
* - if the prefix of a class refers to a forbidden symbol, first try to replace the prefix,
* if this is not possible, replace the ClassInfo as above.
* - drop refinements referring to a forbidden symbol.
*/
def avoid(tp: Type, symsToAvoid: => List[Symbol])(implicit ctx: Context): Type = {
val widenMap = new TypeMap {
lazy val forbidden = symsToAvoid.toSet
def toAvoid(tp: Type): Boolean =
// TODO: measure the cost of using `existsPart`, and if necessary replace it
// by a `TypeAccumulator` where we have set `stopAtStatic = true`.
tp existsPart {
case tp: TermRef => forbidden.contains(tp.symbol) || toAvoid(tp.underlying)
case tp: TypeRef => forbidden.contains(tp.symbol)
case tp: ThisType => forbidden.contains(tp.cls)
case _ => false
}
def apply(tp: Type): Type = tp match {
case tp: TermRef
if toAvoid(tp) && (variance > 0 || tp.info.widenExpr <:< tp) =>
// Can happen if `x: y.type`, then `x.type =:= y.type`, hence we can widen `x.type`
// to y.type in all contexts, not just covariant ones.
apply(tp.info.widenExpr)
case tp: TypeRef if toAvoid(tp) =>
tp.info match {
case TypeAlias(ref) =>
apply(ref)
case info: ClassInfo if variance > 0 =>
if (!(forbidden contains tp.symbol)) {
val prefix = apply(tp.prefix)
val tp1 = tp.derivedSelect(prefix)
if (tp1.typeSymbol.exists)
return tp1
}
val parentType = info.parentsWithArgs.reduceLeft(ctx.typeComparer.andType(_, _))
def addRefinement(parent: Type, decl: Symbol) = {
val inherited =
parentType.findMember(decl.name, info.cls.thisType, Private)
.suchThat(decl.matches(_))
val inheritedInfo = inherited.info
if (inheritedInfo.exists && decl.info <:< inheritedInfo && !(inheritedInfo <:< decl.info)) {
val r = RefinedType(parent, decl.name, decl.info)
typr.println(i"add ref $parent $decl --> " + r)
r
}
else
parent
}
val refinableDecls = info.decls.filter(
sym => !(sym.is(TypeParamAccessor | Private) || sym.isConstructor))
val fullType = (parentType /: refinableDecls)(addRefinement)
apply(fullType)
case TypeBounds(lo, hi) if variance > 0 =>
apply(hi)
case _ =>
mapOver(tp)
}
case tp @ HKApply(tycon, args) if toAvoid(tycon) =>
apply(tp.superType)
case tp @ AppliedType(tycon, args) if toAvoid(tycon) =>
val base = apply(tycon)
var args = tp.baseArgInfos(base.typeSymbol)
if (base.typeParams.length != args.length)
args = base.typeParams.map(_.paramInfo)
apply(base.appliedTo(args))
case tp @ RefinedType(parent, name, rinfo) if variance > 0 =>
val parent1 = apply(tp.parent)
val refinedInfo1 = apply(rinfo)
if (toAvoid(refinedInfo1)) {
typr.println(s"dropping refinement from $tp")
if (name.isTypeName) tp.derivedRefinedType(parent1, name, TypeBounds.empty)
else parent1
} else {
tp.derivedRefinedType(parent1, name, refinedInfo1)
}
case tp: TypeVar if ctx.typerState.constraint.contains(tp) =>
val lo = ctx.typerState.constraint.fullLowerBound(tp.origin)
val lo1 = avoid(lo, symsToAvoid)
if (lo1 ne lo) lo1 else tp
case _ =>
mapOver(tp)
}
}
widenMap(tp)
}
def avoidingType(expr: Tree, bindings: List[Tree])(implicit ctx: Context): Type =
avoid(expr.tpe, localSyms(bindings).filter(_.isTerm))
def avoidPrivateLeaks(sym: Symbol, pos: Position)(implicit ctx: Context): Type =
if (!sym.is(SyntheticOrPrivate) && sym.owner.isClass) checkNoPrivateLeaks(sym, pos)
else sym.info
def seqToRepeated(tree: Tree)(implicit ctx: Context): Tree =
Typed(tree, TypeTree(tree.tpe.widen.translateParameterized(defn.SeqClass, defn.RepeatedParamClass)))
/** A denotation exists really if it exists and does not point to a stale symbol. */
final def reallyExists(denot: Denotation)(implicit ctx: Context): Boolean = try
denot match {
case denot: SymDenotation =>
denot.exists && !denot.isAbsent
case denot: SingleDenotation =>
val sym = denot.symbol
(sym eq NoSymbol) || reallyExists(sym.denot)
case _ =>
true
}
catch {
case ex: StaleSymbol => false
}
/** If `tpe` is a named type, check that its denotation is accessible in the
* current context. Return the type with those alternatives as denotations
* which are accessible.
*
* Also performs the following normalizations on the type `tpe`.
* (1) parameter accessors are always dereferenced.
* (2) if the owner of the denotation is a package object, it is assured
* that the package object shows up as the prefix.
*/
def ensureAccessible(tpe: Type, superAccess: Boolean, pos: Position)(implicit ctx: Context): Type = {
def test(tpe: Type, firstTry: Boolean): Type = tpe match {
case tpe: NamedType =>
val pre = tpe.prefix
val name = tpe.name
val d = tpe.denot.accessibleFrom(pre, superAccess)
if (!d.exists) {
// it could be that we found an inaccessible private member, but there is
// an inherited non-private member with the same name and signature.
val d2 = pre.nonPrivateMember(name)
if (reallyExists(d2) && firstTry)
test(tpe.shadowed.withDenot(d2), false)
else if (pre.derivesFrom(defn.DynamicClass)) {
TryDynamicCallType
} else {
val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists)
val what = alts match {
case Nil =>
name.toString
case sym :: Nil =>
if (sym.owner == pre.typeSymbol) sym.show else sym.showLocated
case _ =>
em"none of the overloaded alternatives named $name"
}
val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else ""
val whyNot = new StringBuffer
alts foreach (_.isAccessibleFrom(pre, superAccess, whyNot))
if (tpe.isError) tpe
else errorType(ex"$what cannot be accessed as a member of $pre$where.$whyNot", pos)
}
}
else if (d.symbol is TypeParamAccessor)
ensureAccessible(d.info.bounds.hi, superAccess, pos)
else
ctx.makePackageObjPrefixExplicit(tpe withDenot d)
case _ =>
tpe
}
test(tpe, true)
}
/** The type of a selection with `name` of a tree with type `site`.
*/
def selectionType(site: Type, name: Name, pos: Position)(implicit ctx: Context): Type = {
val mbr = site.member(name)
if (reallyExists(mbr)) site.select(name, mbr)
else if (site.derivesFrom(defn.DynamicClass) && !Dynamic.isDynamicMethod(name)) {
TryDynamicCallType
} else {
if (site.isErroneous || name.toTermName == nme.ERROR) UnspecifiedErrorType
else {
def kind = if (name.isTypeName) "type" else "value"
def addendum =
if (site.derivesFrom(defn.DynamicClass)) "\npossible cause: maybe a wrong Dynamic method signature?"
else ""
errorType(
if (name == nme.CONSTRUCTOR) ex"$site does not have a constructor"
else NotAMember(site, name, kind),
pos)
}
}
}
/** The selection type, which is additionally checked for accessibility.
*/
def accessibleSelectionType(tree: untpd.RefTree, qual1: Tree)(implicit ctx: Context): Type = {
val ownType = selectionType(qual1.tpe.widenIfUnstable, tree.name, tree.pos)
ensureAccessible(ownType, qual1.isInstanceOf[Super], tree.pos)
}
/** Type assignment method. Each method takes as parameters
* - an untpd.Tree to which it assigns a type,
* - typed child trees it needs to access to cpmpute that type,
* - any further information it needs to access to compute that type.
*/
def assignType(tree: untpd.Ident, tp: Type)(implicit ctx: Context) =
tree.withType(tp)
def assignType(tree: untpd.Select, qual: Tree)(implicit ctx: Context): Select = {
def qualType = qual.tpe.widen
def arrayElemType = {
val JavaArrayType(elemtp) = qualType
elemtp
}
val p = nme.primitive
val tp = tree.name match {
case p.arrayApply => MethodType(defn.IntType :: Nil, arrayElemType)
case p.arrayUpdate => MethodType(defn.IntType :: arrayElemType :: Nil, defn.UnitType)
case p.arrayLength => MethodType(Nil, defn.IntType)
// Note that we do not need to handle calls to Array[T]#clone() specially:
// The JLS section 10.7 says "The return type of the clone method of an array type
// T[] is T[]", but the actual return type at the bytecode level is Object which
// is casted to T[] by javac. Since the return type of Array[T]#clone() is Array[T],
// this is exactly what Erasure will do.
case _ => accessibleSelectionType(tree, qual)
}
tree.withType(tp)
}
def assignType(tree: untpd.New, tpt: Tree)(implicit ctx: Context) =
tree.withType(tpt.tpe)
def assignType(tree: untpd.Literal)(implicit ctx: Context) =
tree.withType {
val value = tree.const
value.tag match {
case UnitTag => defn.UnitType
case NullTag => defn.NullType
case _ => if (ctx.erasedTypes) value.tpe else ConstantType(value)
}
}
def assignType(tree: untpd.This)(implicit ctx: Context) = {
val cls = qualifyingClass(tree, tree.qual.name, packageOK = false)
tree.withType(
if (cls.isClass) cls.thisType
else errorType("not a legal qualifying class for this", tree.pos))
}
def assignType(tree: untpd.Super, qual: Tree, inConstrCall: Boolean, mixinClass: Symbol = NoSymbol)(implicit ctx: Context) = {
val mix = tree.mix
qual.tpe match {
case err: ErrorType => untpd.cpy.Super(tree)(qual, mix).withType(err)
case qtype @ ThisType(_) =>
val cls = qtype.cls
def findMixinSuper(site: Type): Type = site.parents filter (_.name == mix.name) match {
case p :: Nil =>
p
case Nil =>
errorType(SuperQualMustBeParent(mix, cls), tree.pos)
case p :: q :: _ =>
errorType("ambiguous parent class qualifier", tree.pos)
}
val owntype =
if (mixinClass.exists) mixinClass.typeRef
else if (!mix.isEmpty) findMixinSuper(cls.info)
else if (inConstrCall || ctx.erasedTypes) cls.info.firstParent
else {
val ps = cls.classInfo.parentsWithArgs
if (ps.isEmpty) defn.AnyType else ps.reduceLeft((x: Type, y: Type) => x & y)
}
tree.withType(SuperType(cls.thisType, owntype))
}
}
/** Substitute argument type `argType` for parameter `pref` in type `tp`,
* skolemizing the argument type if it is not stable and `pref` occurs in `tp`.
*/
def safeSubstParam(tp: Type, pref: ParamRef, argType: Type)(implicit ctx: Context) = {
val tp1 = tp.substParam(pref, argType)
if ((tp1 eq tp) || argType.isStable) tp1
else tp.substParam(pref, SkolemType(argType.widen))
}
def assignType(tree: untpd.Apply, fn: Tree, args: List[Tree])(implicit ctx: Context) = {
val ownType = fn.tpe.widen match {
case fntpe: MethodType =>
def safeSubstParams(tp: Type, params: List[ParamRef], args: List[Tree]): Type = params match {
case param :: params1 =>
val tp1 = safeSubstParam(tp, param, args.head.tpe)
safeSubstParams(tp1, params1, args.tail)
case Nil =>
tp
}
if (sameLength(fntpe.paramInfos, args) || ctx.phase.prev.relaxedTyping)
if (fntpe.isDependent) safeSubstParams(fntpe.resultType, fntpe.paramRefs, args)
else fntpe.resultType
else
errorType(i"wrong number of arguments for $fntpe: ${fn.tpe}, expected: ${fntpe.paramInfos.length}, found: ${args.length}", tree.pos)
case t =>
errorType(err.takesNoParamsStr(fn, ""), tree.pos)
}
tree.withType(ownType)
}
def assignType(tree: untpd.TypeApply, fn: Tree, args: List[Tree])(implicit ctx: Context) = {
val ownType = fn.tpe.widen match {
case pt: TypeLambda =>
val paramNames = pt.paramNames
if (hasNamedArg(args)) {
val paramBoundsByName = paramNames.zip(pt.paramInfos).toMap
// Type arguments which are specified by name (immutable after this first loop)
val namedArgMap = new mutable.HashMap[Name, Type]
for (NamedArg(name, arg) <- args)
if (namedArgMap.contains(name))
ctx.error("duplicate name", arg.pos)
else if (!paramNames.contains(name))
ctx.error(s"undefined parameter name, required: ${paramNames.mkString(" or ")}", arg.pos)
else
namedArgMap(name) = preCheckKind(arg, paramBoundsByName(name.asTypeName)).tpe
// Holds indexes of non-named typed arguments in paramNames
val gapBuf = new mutable.ListBuffer[Int]
def nextPoly(idx: Int) = {
val newIndex = gapBuf.length
gapBuf += idx
// Re-index unassigned type arguments that remain after transformation
TypeParamRef(pt, newIndex)
}
// Type parameters after naming assignment, conserving paramNames order
val normArgs: List[Type] = paramNames.zipWithIndex.map { case (pname, idx) =>
namedArgMap.getOrElse(pname, nextPoly(idx))
}
val transform = new TypeMap {
def apply(t: Type) = t match {
case TypeParamRef(`pt`, idx) => normArgs(idx)
case _ => mapOver(t)
}
}
val resultType1 = transform(pt.resultType)
if (gapBuf.isEmpty) resultType1
else {
val gaps = gapBuf.toList
pt.derivedLambdaType(
gaps.map(paramNames),
gaps.map(idx => transform(pt.paramInfos(idx)).bounds),
resultType1)
}
}
else {
val argTypes = preCheckKinds(args, pt.paramInfos).tpes
if (sameLength(argTypes, paramNames) || ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes)
else wrongNumberOfTypeArgs(fn.tpe, pt.typeParams, args, tree.pos)
}
case _ =>
errorType(err.takesNoParamsStr(fn, "type "), tree.pos)
}
tree.withType(ownType)
}
def assignType(tree: untpd.Typed, tpt: Tree)(implicit ctx: Context) =
tree.withType(tpt.tpe)
def assignType(tree: untpd.NamedArg, arg: Tree)(implicit ctx: Context) =
tree.withType(arg.tpe)
def assignType(tree: untpd.Assign)(implicit ctx: Context) =
tree.withType(defn.UnitType)
def assignType(tree: untpd.Block, stats: List[Tree], expr: Tree)(implicit ctx: Context) =
tree.withType(avoidingType(expr, stats))
def assignType(tree: untpd.Inlined, bindings: List[Tree], expansion: Tree)(implicit ctx: Context) =
tree.withType(avoidingType(expansion, bindings))
def assignType(tree: untpd.If, thenp: Tree, elsep: Tree)(implicit ctx: Context) =
tree.withType(lubInSameUniverse(thenp :: elsep :: Nil, "branches of an if/else"))
def assignType(tree: untpd.Closure, meth: Tree, target: Tree)(implicit ctx: Context) =
tree.withType(
if (target.isEmpty) meth.tpe.widen.toFunctionType(tree.env.length)
else target.tpe)
def assignType(tree: untpd.CaseDef, body: Tree)(implicit ctx: Context) =
tree.withType(body.tpe)
def assignType(tree: untpd.Match, cases: List[CaseDef])(implicit ctx: Context) = {
if (tree.selector.typeOpt.isPhantom)
ctx.error("cannot pattern match on values of a phantom type", tree.selector.pos)
tree.withType(lubInSameUniverse(cases, "branches of a match"))
}
def assignType(tree: untpd.Return)(implicit ctx: Context) =
tree.withType(defn.NothingType)
def assignType(tree: untpd.Try, expr: Tree, cases: List[CaseDef])(implicit ctx: Context) =
if (cases.isEmpty) tree.withType(expr.tpe)
else tree.withType(lubInSameUniverse(expr :: cases, "branches of a try"))
def assignType(tree: untpd.SeqLiteral, elems: List[Tree], elemtpt: Tree)(implicit ctx: Context) = {
val ownType = tree match {
case tree: untpd.JavaSeqLiteral => defn.ArrayOf(elemtpt.tpe)
case _ => if (ctx.erasedTypes) defn.SeqType else defn.SeqType.appliedTo(elemtpt.tpe)
}
tree.withType(ownType)
}
def assignType(tree: untpd.SingletonTypeTree, ref: Tree)(implicit ctx: Context) =
tree.withType(ref.tpe)
def assignType(tree: untpd.AndTypeTree, left: Tree, right: Tree)(implicit ctx: Context) =
tree.withType(inSameUniverse(_ & _, left.tpe, right, "an `&`"))
def assignType(tree: untpd.OrTypeTree, left: Tree, right: Tree)(implicit ctx: Context) =
tree.withType(inSameUniverse(_ | _, left.tpe, right, "an `|`"))
/** Assign type of RefinedType.
* Refinements are typed as if they were members of refinement class `refineCls`.
*/
def assignType(tree: untpd.RefinedTypeTree, parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(implicit ctx: Context) = {
def addRefinement(parent: Type, refinement: Tree): Type = {
val rsym = refinement.symbol
val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info
if (rinfo.isError) rinfo
else if (!rinfo.exists) parent // can happen after failure in self type definition
else RefinedType(parent, rsym.name, rinfo)
}
val refined = (parent.tpe /: refinements)(addRefinement)
tree.withType(RecType.closeOver(rt => refined.substThis(refineCls, RecThis(rt))))
}
def assignType(tree: untpd.AppliedTypeTree, tycon: Tree, args: List[Tree])(implicit ctx: Context) = {
assert(!hasNamedArg(args))
val tparams = tycon.tpe.typeParams
val ownType =
if (sameLength(tparams, args)) tycon.tpe.appliedTo(args.tpes)
else wrongNumberOfTypeArgs(tycon.tpe, tparams, args, tree.pos)
tree.withType(ownType)
}
def assignType(tree: untpd.LambdaTypeTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) =
tree.withType(HKTypeLambda.fromParams(tparamDefs.map(_.symbol.asType), body.tpe))
def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) =
tree.withType(ExprType(result.tpe))
def assignType(tree: untpd.TypeBoundsTree, lo: Tree, hi: Tree)(implicit ctx: Context) =
tree.withType(
if (lo eq hi) TypeAlias(lo.tpe)
else inSameUniverse(TypeBounds(_, _), lo.tpe, hi, "type bounds"))
def assignType(tree: untpd.Bind, sym: Symbol)(implicit ctx: Context) =
tree.withType(NamedType.withFixedSym(NoPrefix, sym))
def assignType(tree: untpd.Alternative, trees: List[Tree])(implicit ctx: Context) =
tree.withType(ctx.typeComparer.lub(trees.tpes))
def assignType(tree: untpd.UnApply, proto: Type)(implicit ctx: Context) =
tree.withType(proto)
def assignType(tree: untpd.ValDef, sym: Symbol)(implicit ctx: Context) =
tree.withType(if (sym.exists) assertExists(symbolicIfNeeded(sym).orElse(sym.valRef)) else NoType)
def assignType(tree: untpd.DefDef, sym: Symbol)(implicit ctx: Context) =
tree.withType(symbolicIfNeeded(sym).orElse(sym.termRefWithSig))
def assignType(tree: untpd.TypeDef, sym: Symbol)(implicit ctx: Context) =
tree.withType(symbolicIfNeeded(sym).orElse(sym.typeRef))
private def symbolicIfNeeded(sym: Symbol)(implicit ctx: Context) = {
val owner = sym.owner
owner.infoOrCompleter match {
case info: ClassInfo if info.givenSelfType.exists =>
// In that case a simple typeRef/termWithWithSig could return a member of
// the self type, not the symbol itself. To avoid this, we make the reference
// symbolic. In general it seems to be faster to keep the non-symblic
// reference, since there is less pressure on the uniqueness tables that way
// and less work to update all the different references. That's why symbolic references
// are only used if necessary.
NamedType.withFixedSym(owner.thisType, sym)
case _ => NoType
}
}
def assertExists(tp: Type) = { assert(tp != NoType); tp }
def assignType(tree: untpd.Import, sym: Symbol)(implicit ctx: Context) =
tree.withType(sym.nonMemberTermRef)
def assignType(tree: untpd.Annotated, arg: Tree, annot: Tree)(implicit ctx: Context) =
tree.withType(AnnotatedType(arg.tpe.widen, Annotation(annot)))
def assignType(tree: untpd.PackageDef, pid: Tree)(implicit ctx: Context) =
tree.withType(pid.symbol.valRef)
/** Ensure that `tree2`'s type is in the same universe as `tree1`. If that's the case, return
* `op` applied to both types.
* If not, issue an error and return `tree1`'s type.
*/
private def inSameUniverse(op: (Type, Type) => Type, tp1: Type, tree2: Tree, relationship: => String)(implicit ctx: Context): Type =
if (tp1.topType == tree2.tpe.topType)
op(tp1, tree2.tpe)
else {
ctx.error(ex"$tp1 and ${tree2.tpe} are in different universes. They cannot be combined in $relationship", tree2.pos)
tp1
}
private def lubInSameUniverse(trees: List[Tree], relationship: => String)(implicit ctx: Context): Type =
trees match {
case first :: rest => (first.tpe /: rest)(inSameUniverse(_ | _, _, _, relationship))
case Nil => defn.NothingType
}
}
object TypeAssigner extends TypeAssigner
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/PhantomArgLift.scala | <reponame>ihji/dotty
package dotty.tools.dotc.transform
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.core.Contexts._
import dotty.tools.dotc.core.NameKinds._
import dotty.tools.dotc.core.Types._
import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
import dotty.tools.dotc.typer.EtaExpansion
import scala.collection.mutable.ListBuffer
/** This phase extracts the arguments of phantom type before the application to avoid losing any
* effects in the argument tree. This trivializes the removal of parameter in the Erasure phase.
*
* `f(x1,...)(y1,...)...(...)` with at least one phantom argument
*
* -->
*
* `val ev$f = f` // if `f` is some expression that needs evaluation
* `val ev$x1 = x1`
* ...
* `val ev$y1 = y1`
* ...
* `ev$f(ev$x1,...)(ev$y1,...)...(...)`
*
*/
class PhantomArgLift extends MiniPhaseTransform {
import tpd._
override def phaseName: String = "phantomArgLift"
/** Check what the phase achieves, to be called at any point after it is finished. */
override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match {
case tree: Apply =>
tree.args.foreach { arg =>
assert(!arg.tpe.isPhantom || isPureExpr(arg))
}
case _ =>
}
/* Tree transform */
override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = tree.tpe.widen match {
case _: MethodType => tree // Do the transformation higher in the tree if needed
case _ =>
if (!hasImpurePhantomArgs(tree)) tree
else {
val buffer = ListBuffer.empty[Tree]
val app = EtaExpansion.liftApp(buffer, tree)
if (buffer.isEmpty) app
else Block(buffer.result(), app)
}
}
/* private methods */
/** Returns true if at least on of the arguments is an impure phantom.
* Inner applies are also checked in case of multiple parameter list.
*/
private def hasImpurePhantomArgs(tree: Apply)(implicit ctx: Context): Boolean = {
tree.args.exists(arg => arg.tpe.isPhantom && !isPureExpr(arg)) || {
tree.fun match {
case fun: Apply => hasImpurePhantomArgs(fun)
case _ => false
}
}
}
}
|
ihji/dotty | compiler/test/dotty/Jars.scala | <gh_stars>0
package dotty
/** Jars used when compiling test, normally set from the sbt build */
object Jars {
/** Dotty library Jar */
val dottyLib: String = sys.env.get("DOTTY_LIB")
.getOrElse(Properties.dottyLib)
/** Dotty Compiler Jar */
val dottyCompiler: String = sys.env.get("DOTTY_COMPILER")
.getOrElse(Properties.dottyCompiler)
/** Dotty Interfaces Jar */
val dottyInterfaces: String = sys.env.get("DOTTY_INTERFACE")
.getOrElse(Properties.dottyInterfaces)
/** Dotty extras classpath from env or properties */
val dottyExtras: List[String] = sys.env.get("DOTTY_EXTRAS")
.map(_.split(":").toList).getOrElse(Properties.dottyExtras)
/** Dotty REPL dependencies */
val dottyReplDeps: List[String] = dottyLib :: dottyExtras
/** Dotty test dependencies */
val dottyTestDeps: List[String] =
dottyLib :: dottyCompiler :: dottyInterfaces :: dottyExtras
def scalaLibrary: String = sys.env.get("DOTTY_SCALA_LIBRARY")
.getOrElse(findJarFromRuntime("scala-library-2."))
/** Gets the scala 2.* library at runtime, note that doing this is unsafe
* unless you know that the library will be on the classpath of the running
* application. It is currently safe to call this function if the tests are
* run by sbt.
*/
private def findJarFromRuntime(partialName: String) = {
val urls = ClassLoader.getSystemClassLoader.asInstanceOf[java.net.URLClassLoader].getURLs.map(_.getFile.toString)
urls.find(_.contains(partialName)).getOrElse {
throw new java.io.FileNotFoundException(
s"""Unable to locate $partialName on classpath:\n${urls.toList.mkString("\n")}"""
)
}
}
}
|
ihji/dotty | tests/run/phantom-decls-2.scala | <reponame>ihji/dotty<filename>tests/run/phantom-decls-2.scala
/* Run this test with
* `run tests/run/xyz.scala -Xprint-diff-del -Xprint:arrayConstructors,phantomTermErasure,phantomTypeErasure,erasure`
* to see the the diffs after PhantomRefErasure, PhantomDeclErasure and Erasure.
*/
object Test {
import Boo._
def main(args: Array[String]): Unit = {
new Boo2().polyfun1(boo[Blinky])
new Boo2().polyfun1(boo[Inky])
new Boo2().polyfun1(boo[Pinky])
}
class Boo2 {
println("Boo2")
type Boo3 = BooAny
def polyfun1(p2: Boo3): Unit = {
println("Boo2.polyfun1")
}
}
}
object Boo extends Phantom {
type BooAny = this.Any
type Blinky <: this.Any
type Inky <: Blinky
type Pinky <: Inky
type Casper = Pinky
def boo[B <: this.Any]: B = assume
}
|
ihji/dotty | tests/neg/phantom-bottom.scala | <filename>tests/neg/phantom-bottom.scala
class BooFunDef1 {
import Boo._
def fun0(x: Foo): x.Y = Boo.nothing
def fun1(x: Foo): x.Y = ??? // error
def fun2(x: Foo): x.Y = null // error
def fun3(x: Foo): x.Y = Boo2.nothing // error
}
class Foo {
type Y <: Boo.BooAny
}
object Boo extends Phantom {
type BooAny = this.Any
type BooNothing = this.Nothing
def nothing: BooNothing = assume
}
object Boo2 extends Phantom {
type BooNothing2 = this.Nothing
def nothing: BooNothing2 = assume
} |
ihji/dotty | compiler/src/dotty/tools/dotc/transform/ElimJavaPackages.scala | package dotty.tools.dotc.transform
import dotty.tools.dotc.ast.tpd._
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.core.Flags._
import dotty.tools.dotc.core.Types.{Type, TypeRef}
import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
/**
* Eliminates syntactic references to Java packages, so that there's no chance
* they accidentally end up in the backend.
*/
class ElimJavaPackages extends MiniPhaseTransform {
override def phaseName: String = "elimJavaPackages"
override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree = {
if (isJavaPackage(tree)) {
assert(tree.tpe.isInstanceOf[TypeRef], s"Expected tree with type TypeRef, but got ${tree.tpe.show}")
Ident(tree.tpe.asInstanceOf[TypeRef])
} else {
tree
}
}
override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = {
tree match {
case tree: Select =>
assert(!isJavaPackage(tree), s"Unexpected reference to Java package in ${tree.show}")
case _ => ()
}
}
/**
* Is the given tree a syntactic reference to a Java package?
*/
private def isJavaPackage(tree: Select)(implicit ctx: Context): Boolean = {
tree.tpe match {
case TypeRef(prefix, _) =>
val flags = prefix.termSymbol.flags
// Testing for each flag separately is more efficient than using FlagConjunction.
flags.is(Package) && flags.is(JavaDefined)
case _ => false
}
}
}
|
ihji/dotty | tests/run/phantom-lazy-val-2.scala |
object Test {
def main(args: Array[String]): Unit = {
val f = new Foo
println(1)
f.foo
println(2)
f.foo
// TODO: Erase
// Currently not erasing fields for lazy vals
assert(f.getClass.getDeclaredFields.exists(_.getName.startsWith("foo")), "Field foo erased. Optimized accidentally?")
}
}
class Foo {
import Boo._
lazy val foo = {
println("foo")
any
}
}
object Boo extends Phantom {
type BooAny = this.Any
def any: BooAny = assume
}
|
ihji/dotty | compiler/src/dotty/tools/dotc/transform/TailRec.scala | package dotty.tools.dotc
package transform
import ast.Trees._
import ast.{TreeTypeMap, tpd}
import core._
import Contexts.Context
import Decorators._
import DenotTransformers.DenotTransformer
import Denotations.SingleDenotation
import Symbols._
import Types._
import NameKinds.TailLabelName
import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
/**
* A Tail Rec Transformer
* @author <NAME>, <NAME>,
* ported and heavily modified for dotty by <NAME>
* @version 1.1
*
* What it does:
* <p>
* Finds method calls in tail-position and replaces them with jumps.
* A call is in a tail-position if it is the last instruction to be
* executed in the body of a method. This is done by recursing over
* the trees that may contain calls in tail-position (trees that can't
* contain such calls are not transformed). However, they are not that
* many.
* </p>
* <p>
* Self-recursive calls in tail-position are replaced by jumps to a
* label at the beginning of the method. As the JVM provides no way to
* jump from a method to another one, non-recursive calls in
* tail-position are not optimized.
* </p>
* <p>
* A method call is self-recursive if it calls the current method and
* the method is final (otherwise, it could
* be a call to an overridden method in a subclass).
*
* Recursive calls on a different instance
* are optimized. Since 'this' is not a local variable it s added as
* a label parameter.
* </p>
* <p>
* This phase has been moved before pattern matching to catch more
* of the common cases of tail recursive functions. This means that
* more cases should be taken into account (like nested function, and
* pattern cases).
* </p>
* <p>
* If a method contains self-recursive calls, a label is added to at
* the beginning of its body and the calls are replaced by jumps to
* that label.
* </p>
* <p>
*
* In scalac, If the method had type parameters, the call must contain same
* parameters as type arguments. This is no longer case in dotc.
* In scalac, this is named tailCall but it does only provide optimization for
* self recursive functions, that's why it's renamed to tailrec
* </p>
*/
class TailRec extends MiniPhaseTransform with DenotTransformer with FullParameterization { thisTransform =>
import TailRec._
import dotty.tools.dotc.ast.tpd._
override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref
override def phaseName: String = "tailrec"
override def treeTransformPhase = thisTransform // TODO Make sure tailrec runs at next phase.
final val labelFlags = Flags.Synthetic | Flags.Label
/** Symbols of methods that have @tailrec annotatios inside */
private val methodsWithInnerAnnots = new collection.mutable.HashSet[Symbol]()
override def transformUnit(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = {
methodsWithInnerAnnots.clear()
tree
}
override def transformTyped(tree: Typed)(implicit ctx: Context, info: TransformerInfo): Tree = {
if (tree.tpt.tpe.hasAnnotation(defn.TailrecAnnot))
methodsWithInnerAnnots += ctx.owner.enclosingMethod
tree
}
private def mkLabel(method: Symbol, abstractOverClass: Boolean)(implicit ctx: Context): TermSymbol = {
val name = TailLabelName.fresh()
if (method.owner.isClass)
ctx.newSymbol(method, name.toTermName, labelFlags, fullyParameterizedType(method.info, method.enclosingClass.asClass, abstractOverClass, liftThisType = false))
else ctx.newSymbol(method, name.toTermName, labelFlags, method.info)
}
override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
val sym = tree.symbol
tree match {
case dd@DefDef(name, tparams, vparamss0, tpt, _)
if (sym.isEffectivelyFinal) && !((sym is Flags.Accessor) || (dd.rhs eq EmptyTree) || (sym is Flags.Label)) =>
val mandatory = sym.hasAnnotation(defn.TailrecAnnot)
atGroupEnd { implicit ctx: Context =>
cpy.DefDef(dd)(rhs = {
val defIsTopLevel = sym.owner.isClass
val origMeth = sym
val label = mkLabel(sym, abstractOverClass = defIsTopLevel)
val owner = ctx.owner.enclosingClass.asClass
val thisTpe = owner.thisType.widen
var rewrote = false
// Note: this can be split in two separate transforms(in different groups),
// than first one will collect info about which transformations and rewritings should be applied
// and second one will actually apply,
// now this speculatively transforms tree and throws away result in many cases
val rhsSemiTransformed = {
val transformer = new TailRecElimination(origMeth, dd.tparams, owner, thisTpe, mandatory, label, abstractOverClass = defIsTopLevel)
val rhs = atGroupEnd(implicit ctx => transformer.transform(dd.rhs))
rewrote = transformer.rewrote
rhs
}
if (rewrote) {
val dummyDefDef = cpy.DefDef(tree)(rhs = rhsSemiTransformed)
if (tree.symbol.owner.isClass) {
val labelDef = fullyParameterizedDef(label, dummyDefDef, abstractOverClass = defIsTopLevel)
val call = forwarder(label, dd, abstractOverClass = defIsTopLevel, liftThisType = true)
Block(List(labelDef), call)
} else { // inner method. Tail recursion does not change `this`
val labelDef = polyDefDef(label, trefs => vrefss => {
val origMeth = tree.symbol
val origTParams = tree.tparams.map(_.symbol)
val origVParams = tree.vparamss.flatten map (_.symbol)
new TreeTypeMap(
typeMap = identity(_)
.substDealias(origTParams, trefs)
.subst(origVParams, vrefss.flatten.map(_.tpe)),
oldOwners = origMeth :: Nil,
newOwners = label :: Nil
).transform(rhsSemiTransformed)
})
val callIntoLabel = (
if (dd.tparams.isEmpty) ref(label)
else ref(label).appliedToTypes(dd.tparams.map(_.tpe))
).appliedToArgss(vparamss0.map(_.map(x=> ref(x.symbol))))
Block(List(labelDef), callIntoLabel)
}} else {
if (mandatory) ctx.error(
"TailRec optimisation not applicable, method not tail recursive",
// FIXME: want to report this error on `dd.namePos`, but
// because of extension method getting a weird pos, it is
// better to report on symbol so there's no overlap
sym.pos
)
dd.rhs
}
})
}
case d: DefDef if d.symbol.hasAnnotation(defn.TailrecAnnot) || methodsWithInnerAnnots.contains(d.symbol) =>
ctx.error("TailRec optimisation not applicable, method is neither private nor final so can be overridden", sym.pos)
d
case d if d.symbol.hasAnnotation(defn.TailrecAnnot) || methodsWithInnerAnnots.contains(d.symbol) =>
ctx.error("TailRec optimisation not applicable, not a method", sym.pos)
d
case _ => tree
}
}
class TailRecElimination(method: Symbol, methTparams: List[Tree], enclosingClass: Symbol, thisType: Type, isMandatory: Boolean, label: Symbol, abstractOverClass: Boolean) extends tpd.TreeMap {
import dotty.tools.dotc.ast.tpd._
var rewrote = false
private val defaultReason = "it contains a recursive call not in tail position"
private var ctx: TailContext = yesTailContext
/** Rewrite this tree to contain no tail recursive calls */
def transform(tree: Tree, nctx: TailContext)(implicit c: Context): Tree = {
if (ctx == nctx) transform(tree)
else {
val saved = ctx
ctx = nctx
try transform(tree)
finally this.ctx = saved
}
}
def yesTailTransform(tree: Tree)(implicit c: Context): Tree =
transform(tree, yesTailContext)
def noTailTransform(tree: Tree)(implicit c: Context): Tree =
transform(tree, noTailContext)
def noTailTransforms[Tr <: Tree](trees: List[Tr])(implicit c: Context): List[Tr] =
trees.map(noTailTransform).asInstanceOf[List[Tr]]
override def transform(tree: Tree)(implicit c: Context): Tree = {
/* A possibly polymorphic apply to be considered for tail call transformation. */
def rewriteApply(tree: Tree, sym: Symbol, required: Boolean = false): Tree = {
def receiverArgumentsAndSymbol(t: Tree, accArgs: List[List[Tree]] = Nil, accT: List[Tree] = Nil):
(Tree, Tree, List[List[Tree]], List[Tree], Symbol) = t match {
case TypeApply(fun, targs) if fun.symbol eq t.symbol => receiverArgumentsAndSymbol(fun, accArgs, targs)
case Apply(fn, args) if fn.symbol == t.symbol => receiverArgumentsAndSymbol(fn, args :: accArgs, accT)
case Select(qual, _) => (qual, t, accArgs, accT, t.symbol)
case x: This => (x, x, accArgs, accT, x.symbol)
case x: Ident if x.symbol eq method => (EmptyTree, x, accArgs, accT, x.symbol)
case x => (x, x, accArgs, accT, x.symbol)
}
val (prefix, call, arguments, typeArguments, symbol) = receiverArgumentsAndSymbol(tree)
val hasConformingTargs = (typeArguments zip methTparams).forall{x => x._1.tpe <:< x._2.tpe}
val targs = typeArguments.map(noTailTransform)
val argumentss = arguments.map(noTailTransforms)
val isRecursiveCall = (method eq sym)
val recvWiden = prefix.tpe.widenDealias
def continue = {
val method = noTailTransform(call)
val methodWithTargs = if (targs.nonEmpty) TypeApply(method, targs) else method
if (methodWithTargs.tpe.widen.isParameterless) methodWithTargs
else argumentss.foldLeft(methodWithTargs) {
// case (method, args) => Apply(method, args) // Dotty deviation no auto-detupling yet. Interesting that one can do it in Scala2!
(method, args) => Apply(method, args)
}
}
def fail(reason: String) = {
if (isMandatory || required) c.error(s"Cannot rewrite recursive call: $reason", tree.pos)
else c.debuglog("Cannot rewrite recursive call at: " + tree.pos + " because: " + reason)
continue
}
if (isRecursiveCall) {
if (ctx.tailPos) {
val receiverIsSame = enclosingClass.typeRef.widenDealias =:= recvWiden
val receiverIsThis = prefix.tpe =:= thisType || prefix.tpe.widen =:= thisType
def rewriteTailCall(recv: Tree): Tree = {
c.debuglog("Rewriting tail recursive call: " + tree.pos)
rewrote = true
val receiver = noTailTransform(recv)
val callTargs: List[tpd.Tree] =
if (abstractOverClass) {
val classTypeArgs = recv.tpe.baseTypeWithArgs(enclosingClass).argInfos
targs ::: classTypeArgs.map(x => ref(x.typeSymbol))
} else targs
val method = if (callTargs.nonEmpty) TypeApply(Ident(label.termRef), callTargs) else Ident(label.termRef)
val thisPassed =
if (this.method.owner.isClass)
method.appliedTo(receiver.ensureConforms(method.tpe.widen.firstParamTypes.head))
else method
val res =
if (thisPassed.tpe.widen.isParameterless) thisPassed
else argumentss.foldLeft(thisPassed) {
(met, ar) => Apply(met, ar) // Dotty deviation no auto-detupling yet.
}
res
}
if (!hasConformingTargs) fail("it changes type arguments on a polymorphic recursive call")
else {
val recv = noTailTransform(prefix)
if (recv eq EmptyTree) rewriteTailCall(This(enclosingClass.asClass))
else if (receiverIsSame || receiverIsThis) rewriteTailCall(recv)
else fail("it changes type of 'this' on a polymorphic recursive call")
}
}
else fail(defaultReason)
} else {
val receiverIsSuper = (method.name eq sym) && enclosingClass.typeRef.widen <:< recvWiden
if (receiverIsSuper) fail("it contains a recursive call targeting a supertype")
else continue
}
}
def rewriteTry(tree: Try): Try = {
if (tree.finalizer eq EmptyTree) {
// SI-1672 Catches are in tail position when there is no finalizer
tpd.cpy.Try(tree)(
noTailTransform(tree.expr),
transformSub(tree.cases),
EmptyTree
)
}
else {
tpd.cpy.Try(tree)(
noTailTransform(tree.expr),
noTailTransforms(tree.cases),
noTailTransform(tree.finalizer)
)
}
}
val res: Tree = tree match {
case Ident(qual) =>
val sym = tree.symbol
if (sym == method && ctx.tailPos) rewriteApply(tree, sym)
else tree
case tree: Select =>
val sym = tree.symbol
if (sym == method && ctx.tailPos) rewriteApply(tree, sym)
else tpd.cpy.Select(tree)(noTailTransform(tree.qualifier), tree.name)
case Apply(fun, args) =>
val meth = fun.symbol
if (meth == defn.Boolean_|| || meth == defn.Boolean_&&)
tpd.cpy.Apply(tree)(fun, transform(args))
else
rewriteApply(tree, meth)
case TypeApply(fun, targs) =>
val meth = fun.symbol
rewriteApply(tree, meth)
case tree@Block(stats, expr) =>
tpd.cpy.Block(tree)(
noTailTransforms(stats),
transform(expr)
)
case tree @ Typed(t: Apply, tpt) if tpt.tpe.hasAnnotation(defn.TailrecAnnot) =>
tpd.Typed(rewriteApply(t, t.fun.symbol, required = true), tpt)
case tree@If(cond, thenp, elsep) =>
tpd.cpy.If(tree)(
noTailTransform(cond),
transform(thenp),
transform(elsep)
)
case tree@CaseDef(_, _, body) =>
cpy.CaseDef(tree)(body = transform(body))
case tree@Match(selector, cases) =>
tpd.cpy.Match(tree)(
noTailTransform(selector),
transformSub(cases)
)
case tree: Try =>
rewriteTry(tree)
case Alternative(_) | Bind(_, _) =>
assert(false, "We should never have gotten inside a pattern")
tree
case t @ DefDef(_, _, _, _, _) =>
t // todo: could improve to handle DefDef's with a label flag calls to which are in tail position
case ValDef(_, _, _) | EmptyTree | Super(_, _) | This(_) |
Literal(_) | TypeTree() | TypeDef(_, _) =>
tree
case Return(expr, from) =>
tpd.cpy.Return(tree)(noTailTransform(expr), from)
case _ =>
super.transform(tree)
}
res
}
}
/** If references to original `target` from fully parameterized method `derived` should be
* rewired to some fully parameterized method, that method symbol,
* otherwise NoSymbol.
*/
override protected def rewiredTarget(target: Symbol, derived: Symbol)(implicit ctx: Context): Symbol = NoSymbol
}
object TailRec {
final class TailContext(val tailPos: Boolean) extends AnyVal
final val noTailContext = new TailContext(false)
final val yesTailContext = new TailContext(true)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.