Skip to content

Commit 392ea9d

Browse files
committed
Added Regression Metrics for multi-output regression
1 parent 6c1e7ea commit 392ea9d

File tree

2 files changed

+137
-2
lines changed

2 files changed

+137
-2
lines changed

dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/evaluation/RegressionMetrics.scala

Lines changed: 120 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,11 @@ under the License.
1919
package io.github.mandar2812.dynaml.evaluation
2020

2121
import breeze.linalg.DenseVector
22+
import breeze.numerics.{abs, log, sqrt}
2223
import io.github.mandar2812.dynaml.utils
23-
import org.apache.log4j.{Priority, Logger}
24-
24+
import org.apache.log4j.{Logger, Priority}
2525
import com.quantifind.charts.Highcharts._
26+
import io.github.mandar2812.dynaml.utils.square
2627

2728
/**
2829
* Class implementing the calculation
@@ -138,3 +139,120 @@ object RegressionMetrics {
138139
(scoresAndLabels.map(_._2).max - scoresAndLabels.map(_._2).min)
139140

140141
}
142+
143+
class MultiRegressionMetrics(override protected val scoresAndLabels
144+
: List[(DenseVector[Double], DenseVector[Double])],
145+
val len: Int)
146+
extends Metrics[DenseVector[Double]] {
147+
private val logger = Logger.getLogger(this.getClass)
148+
149+
val num_outputs: Int = scoresAndLabels.head._2.length
150+
151+
val onesVec = DenseVector.ones[Double](num_outputs)
152+
153+
val length: DenseVector[Double] = DenseVector.fill(num_outputs)(len)
154+
155+
val rmse: DenseVector[Double] = sqrt(scoresAndLabels.map((p) =>
156+
square(p._1-p._2)).reduce((a: DenseVector[Double],b:DenseVector[Double]) => a+b):/length)
157+
158+
val mae: DenseVector[Double] = scoresAndLabels.map((p) =>
159+
abs(p._1 - p._2)).reduce((a: DenseVector[Double],b:DenseVector[Double]) => a+b):/length
160+
161+
val rmsle: DenseVector[Double] = sqrt(scoresAndLabels.map((p) =>
162+
square(log(onesVec + abs(p._1)) - log(abs(p._2) + onesVec)))
163+
.reduce((a: DenseVector[Double],b:DenseVector[Double]) => a+b):/length)
164+
165+
val Rsq: DenseVector[Double] = MultiRegressionMetrics.computeRsq(scoresAndLabels, length)
166+
167+
val corr: DenseVector[Double] = MultiRegressionMetrics.computeCorr(scoresAndLabels, length)
168+
169+
val predictionEfficiency = scoresAndLabels.map((p) =>
170+
square(p._1 - p._2)).reduce((a: DenseVector[Double],b:DenseVector[Double]) => a+b)/length
171+
172+
val modelYield: DenseVector[Double] = MultiRegressionMetrics.computeYield(scoresAndLabels, length)
173+
174+
val sigma: DenseVector[Double] =
175+
sqrt(utils.getStats(this.residuals().map(_._1))._2/(length - 1.0))
176+
177+
def residuals() = this.scoresAndLabels.map((s) => (s._2 - s._1, s._1))
178+
179+
def scores_and_labels() = this.scoresAndLabels
180+
181+
override def print(): Unit = {
182+
logger.info("Regression Model Performance: "+name)
183+
logger.info("============================")
184+
logger.info("MAE: \n" + mae)
185+
logger.info("RMSE: \n" + rmse)
186+
logger.info("RMSLE: \n" + rmsle)
187+
logger.info("R^2: \n" + Rsq)
188+
logger.info("Corr. Coefficient: \n" + corr)
189+
logger.info("Model Yield: \n"+modelYield)
190+
logger.info("Std Dev of Residuals: \n" + sigma)
191+
}
192+
193+
override def kpi() = DenseVector(mae, rmse, Rsq, corr)
194+
195+
override def generatePlots(): Unit = {
196+
//logger.info("Generating Plot of Residuals")
197+
//generateResidualPlot()
198+
//generateFitPlot()
199+
}
200+
201+
202+
}
203+
204+
object MultiRegressionMetrics {
205+
def computeRsq(scoresAndLabels: Iterable[(DenseVector[Double], DenseVector[Double])],
206+
size: DenseVector[Double]): DenseVector[Double] = {
207+
208+
val num_outputs = scoresAndLabels.head._2.length
209+
val mean: DenseVector[Double] =
210+
scoresAndLabels.map{_._2}.reduce((a: DenseVector[Double],b:DenseVector[Double]) => a+b):/size
211+
212+
var SSres = DenseVector.zeros[Double](num_outputs)
213+
var SStot = DenseVector.zeros[Double](num_outputs)
214+
215+
scoresAndLabels.foreach((couple) => {
216+
SSres :+= square(couple._2 - couple._1)
217+
SStot :+= square(couple._2 - mean)
218+
})
219+
220+
DenseVector.ones[Double](num_outputs) - (SSres:/SStot)
221+
}
222+
223+
def computeCorr(scoresAndLabels: Iterable[(DenseVector[Double], DenseVector[Double])],
224+
size: DenseVector[Double]): DenseVector[Double] = {
225+
226+
val num_outputs = scoresAndLabels.head._2.length
227+
228+
val meanLabel: DenseVector[Double] = scoresAndLabels.map{_._2}
229+
.reduce((a: DenseVector[Double],b:DenseVector[Double]) => a+b):/size
230+
231+
val meanScore = scoresAndLabels.map{_._1}
232+
.reduce((a: DenseVector[Double],b:DenseVector[Double]) => a+b):/size
233+
234+
var SSLabel = DenseVector.zeros[Double](num_outputs)
235+
var SSPred = DenseVector.zeros[Double](num_outputs)
236+
var SSLabelPred = DenseVector.zeros[Double](num_outputs)
237+
238+
scoresAndLabels.foreach((couple) => {
239+
SSLabel :+= square(couple._2 - meanLabel)
240+
SSPred :+= square(couple._1 - meanScore)
241+
SSLabelPred :+= (couple._1 - meanScore) :* (couple._2 - meanLabel)
242+
})
243+
244+
SSLabelPred:/(sqrt(SSPred):*sqrt(SSLabel))
245+
}
246+
247+
def computeYield(scoresAndLabels: Iterable[(DenseVector[Double], DenseVector[Double])],
248+
size: DenseVector[Double]): DenseVector[Double] = {
249+
val num_outputs = scoresAndLabels.head._2.length
250+
DenseVector.tabulate[Double](num_outputs)(dimension => {
251+
//for each dimension, calculate the model yield
252+
RegressionMetrics.computeYield(
253+
scoresAndLabels.map(c => (c._1(dimension), c._2(dimension))),
254+
size(0).toInt)
255+
})
256+
}
257+
}
258+
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
package io.github.mandar2812.dynaml.utils
2+
3+
import breeze.generic.UFunc
4+
import breeze.linalg.DenseVector
5+
6+
/**
7+
* Created by mandar on 17/6/16.
8+
*/
9+
object square extends UFunc {
10+
implicit object implDouble extends Impl[Double, Double] {
11+
def apply(a: Double) = math.pow(a, 2.0)
12+
}
13+
14+
implicit object implDV extends Impl[DenseVector[Double], DenseVector[Double]] {
15+
def apply(a: DenseVector[Double]) = a.map(x => math.pow(x, 2.0))
16+
}
17+
}

0 commit comments

Comments
 (0)