Commit 0ea6a635 authored by Zied SELLAMI's avatar Zied SELLAMI
Browse files

Initial commit

parent 48273af6
......@@ -5,6 +5,7 @@ import org.apache.spark.sql.SparkSession
object DataReader {
def writeFile(fileName: String, content: String)={
new File(fileName).getParentFile.mkdirs()
val pw = new PrintWriter(new File(fileName))
pw.write(content)
pw.close
......
......@@ -44,12 +44,12 @@ object Runner {
def main(args: Array[String]) ={
require(args.length == 4, "USAGE: projectName milestoneName privateToken tmpDir")
require(args.length == 5, "USAGE: projectName milestoneName privateToken tmpDir chartOutputPath")
implicit val sparkSession = SparkSession
.builder()
.appName("SparkSession for Burndownchart")
// .master("local[*]")
.master("local[*]")
.getOrCreate()
import sparkSession.implicits._
......@@ -59,6 +59,7 @@ object Runner {
implicit val privateToken = args(2)
val dataDir = args(3) +"/burndownchart"
val chartPath = args(4)
val title = s"Project Name: $projectName - Milestone: $milestoneName"
......@@ -119,26 +120,27 @@ object Runner {
val operationsHistory = addRemoveOperationsHistory.union(closedOperationsHistory).union(inferedAddOperations).sort(desc("id"),desc("timestamp"))
operationsHistory.show(50, false)
//Calculate the initial score of the Milestone
val initialScore = extractInitialScore(issuesDF)
val initialScore = processInitialScore(startDate, operationsHistory, issuesDF)
//building ideal and actual graph
//building ideal graph
val idealGraphValues = buildIdealLineGraphValues(startDate, dueDate, initialScore)
idealGraphValues.foreach(println)
//build actual graph
val actualGraphValues = buildActualLineGraphValues(startDate, operationsHistory, initialScore, idealGraphValues)
actualGraphValues.foreach(println)
//converting and saving graphs as html file
val html = GraphBuilder.getHTMLGraphCode(idealGraphValues, actualGraphValues , title)
DataReader.writeFile(dataDir+"/chart.html", html)
DataReader.writeFile(chartPath, html)
println(s"Burndown chart saved on ${dataDir}/chart.html")
println(s"Burndown chart saved on ${chartPath}")
}
......@@ -177,7 +179,6 @@ object Runner {
tmp_total
}
//https://ci.linagora.com/linagora/lgs/openpaas/linagora.esn.chat/milestones/11
val operationsValue = operationsHistory
.filter($"date" > startDate)
.select($"date", $"value")
......@@ -231,79 +232,28 @@ object Runner {
dateBefore
}
private def processInitialScore(startDate: Date, operationsHistory: Dataset[Operation], issuesDF: Dataset[Issue])(implicit sparkSession: SparkSession): Float = {
import sparkSession.implicits._
//val currentScore = processCurrentScore(issuesDF)
val scoreWithoutHistory = processScoreWithoutNotes(issuesDF, operationsHistory)
val operationsValue = operationsHistory
.select($"date", $"value")
.groupBy($"date")
.sum("value")
.sort(desc("date"))
operationsValue.show()
val initialScore = operationsValue
.filter($"date" <= startDate)
.select(sum("sum(value)").alias("estimation"))
.head().getAs[Double]("estimation").toFloat + scoreWithoutHistory
/* val scoreEvolution = operationsValue
.filter($"date" > startDate)
.select(sum("sum(value)").alias("estimation"))
.head().getAs[Double]("estimation").toFloat*/
//println(s"Current score $currentScore")
println(s"Score without history $scoreWithoutHistory")
//println(s"Estimated score $estimatedScore")
// println(s"Score evolution $scoreEvolution")
println(s"Initial score $initialScore")
initialScore
}
private def processCurrentScore(issuesDF: Dataset[Issue])(implicit sparkSession: SparkSession): Float = {
private def extractInitialScore(issuesDF: Dataset[Issue])(implicit sparkSession: SparkSession): Float = {
import sparkSession.implicits._
val extractScoreFromLabels: Seq[String] => Float ={
labels => (labels.map(value => if(Try(value.toFloat).isSuccess) {value.toFloat} else -1f) ++ Seq(0f)).filter(_ >= 0f).reduce((x, y) => x+y)
labels => (labels.map(label => {
if(label.startsWith("/")){
val value = label.replace("/","")
value.toFloat
} else -1f
}) ++ Seq(0f)).filter(_ >= 0f).reduce((x, y) => x+y)
}
val computeIssueScore = udf(extractScoreFromLabels)
issuesDF
.filter(_.state.equals("closed") == false)
.withColumn("issue_score",computeIssueScore($"labels"))
.select(sum("issue_score").alias("score"))
.head().getAs[Double]("score").toFloat
.select(sum("issue_score"))
.head().getAs[Double]("sum(issue_score)").toFloat
}
private def processScoreWithoutNotes(issuesDF: Dataset[Issue], operationsHistory: Dataset[Operation])(implicit sparkSession: SparkSession): Float = {
import sparkSession.implicits._
val extractScoreFromLabels: Seq[String] => Float ={
labels => (labels.map(value => if(Try(value.toFloat).isSuccess) {value.toFloat} else -1f) ++ Seq(0f)).filter(_ >= 0f).reduce((x, y) => x+y)
}
val computeIssueScore = udf(extractScoreFromLabels)
val issuesWithHistory = operationsHistory.map(_.id).distinct().collect()
val unstartedIssues = issuesDF
.filter(issue => (!issuesWithHistory.contains(issue.iid)))
val unstartedIssuesScore = unstartedIssues
.withColumn("issue_score",computeIssueScore($"labels"))
.select(sum("issue_score").alias("score"))
.head().getAs[Double]("score").toFloat
unstartedIssuesScore
}
private def processAddRemoveNoteOperation(note: Note, labelsScoreMap: Map[Long, Float], issuesIdIid: Map[Long, Long]): Seq[Operation] = {
val addedRegex = "added (~[\\d+\\s*]+)".r
......@@ -335,7 +285,7 @@ object Runner {
private def processClosedNoteOperation(note: Note, labelsScoreMap: Map[Long, Float], issuesIdIid: Map[Long, Long], operationsHistory: Seq[Operation]): Seq[Operation] = {
val closedValue = if(note.body.equals("closed")){
/* val closedValue = if(note.body.equals("closed")){
val value = operationsHistory.filter(operation =>
(operation.id == issuesIdIid.get(note.noteable_id).get) &&
......@@ -353,7 +303,9 @@ object Runner {
Seq(Operation(issuesIdIid.get(note.noteable_id).get, Date.valueOf(note.created_at.toLocalDateTime.toLocalDate), note.created_at, CLOSED, closedValue))
}else{
Seq()
}
}*/
Seq()
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment